blob: fae752946633a99e6dafd70774b3638e309f6038 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13
14#include "av1/common/common.h"
15#include "av1/common/entropy.h"
16#include "av1/common/entropymode.h"
17#include "av1/common/entropymv.h"
18#include "av1/common/mvref_common.h"
19#include "av1/common/pred_common.h"
20#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080021#if CONFIG_EXT_INTRA
22#include "av1/common/reconintra.h"
23#endif // CONFIG_EXT_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "av1/common/seg_common.h"
Yue Chen69f18e12016-09-08 14:48:15 -070025#if CONFIG_WARPED_MOTION
26#include "av1/common/warped_motion.h"
27#endif // CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -070028
Yaowu Xuc27fc142016-08-22 16:08:15 -070029#include "av1/decoder/decodeframe.h"
Jingning Han1aab8182016-06-03 11:09:06 -070030#include "av1/decoder/decodemv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070031
Yaowu Xuf883b422016-08-30 14:01:10 -070032#include "aom_dsp/aom_dsp_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070033
Michael Bebenita6048d052016-08-25 14:40:54 -070034#define ACCT_STR __func__
hui su5db97432016-10-14 16:10:14 -070035#if CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -070036static INLINE int read_uniform(aom_reader *r, int n) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070037 int l = get_unsigned_bits(n);
38 int m = (1 << l) - n;
Michael Bebenita6048d052016-08-25 14:40:54 -070039 int v = aom_read_literal(r, l - 1, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -070040
41 assert(l != 0);
42
43 if (v < m)
44 return v;
45 else
Michael Bebenita6048d052016-08-25 14:40:54 -070046 return (v << 1) - m + aom_read_literal(r, 1, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -070047}
hui su5db97432016-10-14 16:10:14 -070048#endif // CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -070049
Nathan E. Eggedd28aed2016-11-16 09:44:26 -050050#if CONFIG_EC_MULTISYMBOL
Thomas9ac55082016-09-23 18:04:17 +010051static PREDICTION_MODE read_intra_mode(aom_reader *r, aom_cdf_prob *cdf) {
Nathan E. Egge3ef926e2016-09-07 18:20:41 -040052 return (PREDICTION_MODE)
53 av1_intra_mode_inv[aom_read_symbol(r, cdf, INTRA_MODES, ACCT_STR)];
54}
Nathan E. Egge380cb1a2016-09-08 10:13:42 -040055#else
Yaowu Xuf883b422016-08-30 14:01:10 -070056static PREDICTION_MODE read_intra_mode(aom_reader *r, const aom_prob *p) {
Michael Bebenita6048d052016-08-25 14:40:54 -070057 return (PREDICTION_MODE)aom_read_tree(r, av1_intra_mode_tree, p, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -070058}
Nathan E. Egge380cb1a2016-09-08 10:13:42 -040059#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070060
Thomas Daviesf6936102016-09-05 16:51:31 +010061#if CONFIG_DELTA_Q
62static int read_delta_qindex(AV1_COMMON *cm, MACROBLOCKD *xd, aom_reader *r,
63 MB_MODE_INFO *const mbmi, int mi_col, int mi_row) {
64 FRAME_COUNTS *counts = xd->counts;
65 int sign, abs, reduced_delta_qindex = 0;
66 BLOCK_SIZE bsize = mbmi->sb_type;
67 const int b_col = mi_col & MAX_MIB_MASK;
68 const int b_row = mi_row & MAX_MIB_MASK;
69 const int read_delta_q_flag = (b_col == 0 && b_row == 0);
70 int rem_bits, thr, bit = 1;
71
72 if ((bsize != BLOCK_64X64 || mbmi->skip == 0) && read_delta_q_flag) {
73 abs = 0;
74 while (abs < DELTA_Q_SMALL && bit) {
75 bit = aom_read(r, cm->fc->delta_q_prob[abs], ACCT_STR);
76 if (counts) counts->delta_q[abs][bit]++;
77 abs += bit;
78 }
79 if (abs == DELTA_Q_SMALL) {
80 rem_bits = aom_read_literal(r, 3, ACCT_STR);
81 thr = (1 << rem_bits) + 1;
82 abs = aom_read_literal(r, rem_bits, ACCT_STR) + thr;
83 }
84
85 if (abs) {
86 sign = aom_read_bit(r, ACCT_STR);
87 } else {
88 sign = 1;
89 }
90
91 reduced_delta_qindex = sign ? -abs : abs;
92 }
93 return reduced_delta_qindex;
94}
95#endif
96
Yaowu Xuf883b422016-08-30 14:01:10 -070097static PREDICTION_MODE read_intra_mode_y(AV1_COMMON *cm, MACROBLOCKD *xd,
98 aom_reader *r, int size_group) {
Thomas Davies1bfb5ed2017-01-11 15:28:11 +000099#if CONFIG_EC_ADAPT
100 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
101#elif CONFIG_EC_MULTISYMBOL
102 FRAME_CONTEXT *ec_ctx = cm->fc;
103#endif
104
Yaowu Xuc27fc142016-08-22 16:08:15 -0700105 const PREDICTION_MODE y_mode =
Nathan E. Eggeecc21ec2016-11-16 09:44:26 -0500106#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000107 read_intra_mode(r, ec_ctx->y_mode_cdf[size_group]);
Nathan E. Egge5710c722016-09-08 10:01:16 -0400108#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700109 read_intra_mode(r, cm->fc->y_mode_prob[size_group]);
Nathan E. Egge5710c722016-09-08 10:01:16 -0400110#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700111 FRAME_COUNTS *counts = xd->counts;
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000112#if CONFIG_EC_ADAPT
113 (void)cm;
114#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700115 if (counts) ++counts->y_mode[size_group][y_mode];
116 return y_mode;
117}
118
Yaowu Xuf883b422016-08-30 14:01:10 -0700119static PREDICTION_MODE read_intra_mode_uv(AV1_COMMON *cm, MACROBLOCKD *xd,
120 aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700121 PREDICTION_MODE y_mode) {
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000122#if CONFIG_EC_ADAPT
123 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
124#elif CONFIG_EC_MULTISYMBOL
125 FRAME_CONTEXT *ec_ctx = cm->fc;
126#endif
127
Yaowu Xuc27fc142016-08-22 16:08:15 -0700128 const PREDICTION_MODE uv_mode =
Nathan E. Eggedd28aed2016-11-16 09:44:26 -0500129#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000130 read_intra_mode(r, ec_ctx->uv_mode_cdf[y_mode]);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -0400131#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700132 read_intra_mode(r, cm->fc->uv_mode_prob[y_mode]);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -0400133#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700134 FRAME_COUNTS *counts = xd->counts;
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000135#if CONFIG_EC_ADAPT
136 (void)cm;
137#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700138 if (counts) ++counts->uv_mode[y_mode][uv_mode];
139 return uv_mode;
140}
141
142#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700143static INTERINTRA_MODE read_interintra_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
144 aom_reader *r, int size_group) {
145 const INTERINTRA_MODE ii_mode = (INTERINTRA_MODE)aom_read_tree(
Michael Bebenita6048d052016-08-25 14:40:54 -0700146 r, av1_interintra_mode_tree, cm->fc->interintra_mode_prob[size_group],
147 ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700148 FRAME_COUNTS *counts = xd->counts;
149 if (counts) ++counts->interintra_mode[size_group][ii_mode];
150 return ii_mode;
151}
152#endif // CONFIG_EXT_INTER
153
Yaowu Xuf883b422016-08-30 14:01:10 -0700154static PREDICTION_MODE read_inter_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700155#if CONFIG_REF_MV && CONFIG_EXT_INTER
156 MB_MODE_INFO *mbmi,
157#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700158 aom_reader *r, int16_t ctx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700159#if CONFIG_REF_MV
160 FRAME_COUNTS *counts = xd->counts;
161 int16_t mode_ctx = ctx & NEWMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700162 aom_prob mode_prob = cm->fc->newmv_prob[mode_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700163
Michael Bebenita6048d052016-08-25 14:40:54 -0700164 if (aom_read(r, mode_prob, ACCT_STR) == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700165 if (counts) ++counts->newmv_mode[mode_ctx][0];
166
167#if CONFIG_EXT_INTER
168 if (has_second_ref(mbmi)) {
169#endif // CONFIG_EXT_INTER
170 return NEWMV;
171#if CONFIG_EXT_INTER
172 } else {
173 mode_prob = cm->fc->new2mv_prob;
Michael Bebenita6048d052016-08-25 14:40:54 -0700174 if (aom_read(r, mode_prob, ACCT_STR) == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700175 if (counts) ++counts->new2mv_mode[0];
176 return NEWMV;
177 } else {
178 if (counts) ++counts->new2mv_mode[1];
179 return NEWFROMNEARMV;
180 }
181 }
182#endif // CONFIG_EXT_INTER
183 }
184 if (counts) ++counts->newmv_mode[mode_ctx][1];
185
186 if (ctx & (1 << ALL_ZERO_FLAG_OFFSET)) return ZEROMV;
187
188 mode_ctx = (ctx >> ZEROMV_OFFSET) & ZEROMV_CTX_MASK;
189
190 mode_prob = cm->fc->zeromv_prob[mode_ctx];
Michael Bebenita6048d052016-08-25 14:40:54 -0700191 if (aom_read(r, mode_prob, ACCT_STR) == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700192 if (counts) ++counts->zeromv_mode[mode_ctx][0];
193 return ZEROMV;
194 }
195 if (counts) ++counts->zeromv_mode[mode_ctx][1];
196
197 mode_ctx = (ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
198
199 if (ctx & (1 << SKIP_NEARESTMV_OFFSET)) mode_ctx = 6;
200 if (ctx & (1 << SKIP_NEARMV_OFFSET)) mode_ctx = 7;
201 if (ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) mode_ctx = 8;
202
203 mode_prob = cm->fc->refmv_prob[mode_ctx];
204
Michael Bebenita6048d052016-08-25 14:40:54 -0700205 if (aom_read(r, mode_prob, ACCT_STR) == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700206 if (counts) ++counts->refmv_mode[mode_ctx][0];
207
208 return NEARESTMV;
209 } else {
210 if (counts) ++counts->refmv_mode[mode_ctx][1];
211 return NEARMV;
212 }
213
214 // Invalid prediction mode.
215 assert(0);
216#else
Nathan E. Eggea59b23d2016-11-16 09:44:26 -0500217#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400218 const int mode = av1_inter_mode_inv[aom_read_symbol(
219 r, cm->fc->inter_mode_cdf[ctx], INTER_MODES, ACCT_STR)];
220#else
Michael Bebenita6048d052016-08-25 14:40:54 -0700221 const int mode = aom_read_tree(r, av1_inter_mode_tree,
222 cm->fc->inter_mode_probs[ctx], ACCT_STR);
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400223#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700224 FRAME_COUNTS *counts = xd->counts;
225 if (counts) ++counts->inter_mode[ctx][mode];
226
227 return NEARESTMV + mode;
228#endif
229}
230
231#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700232static void read_drl_idx(const AV1_COMMON *cm, MACROBLOCKD *xd,
233 MB_MODE_INFO *mbmi, aom_reader *r) {
234 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700235 mbmi->ref_mv_idx = 0;
236
237 if (mbmi->mode == NEWMV) {
238 int idx;
239 for (idx = 0; idx < 2; ++idx) {
240 if (xd->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700241 uint8_t drl_ctx = av1_drl_ctx(xd->ref_mv_stack[ref_frame_type], idx);
242 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Michael Bebenita6048d052016-08-25 14:40:54 -0700243 if (!aom_read(r, drl_prob, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700244 mbmi->ref_mv_idx = idx;
245 if (xd->counts) ++xd->counts->drl_mode[drl_ctx][0];
246 return;
247 }
248 mbmi->ref_mv_idx = idx + 1;
249 if (xd->counts) ++xd->counts->drl_mode[drl_ctx][1];
250 }
251 }
252 }
253
254 if (mbmi->mode == NEARMV) {
255 int idx;
256 // Offset the NEARESTMV mode.
257 // TODO(jingning): Unify the two syntax decoding loops after the NEARESTMV
258 // mode is factored in.
259 for (idx = 1; idx < 3; ++idx) {
260 if (xd->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700261 uint8_t drl_ctx = av1_drl_ctx(xd->ref_mv_stack[ref_frame_type], idx);
262 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Michael Bebenita6048d052016-08-25 14:40:54 -0700263 if (!aom_read(r, drl_prob, ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700264 mbmi->ref_mv_idx = idx - 1;
265 if (xd->counts) ++xd->counts->drl_mode[drl_ctx][0];
266 return;
267 }
268 mbmi->ref_mv_idx = idx;
269 if (xd->counts) ++xd->counts->drl_mode[drl_ctx][1];
270 }
271 }
272 }
273}
274#endif
275
Yaowu Xub24e1152016-10-31 16:28:32 -0700276#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
277static MOTION_MODE read_motion_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
278 MB_MODE_INFO *mbmi, aom_reader *r) {
Yue Chen69f18e12016-09-08 14:48:15 -0700279 MOTION_MODE last_motion_mode_allowed = motion_mode_allowed(mbmi);
280 int motion_mode;
281 FRAME_COUNTS *counts = xd->counts;
Yaowu Xub24e1152016-10-31 16:28:32 -0700282
Yue Chen69f18e12016-09-08 14:48:15 -0700283 if (last_motion_mode_allowed == SIMPLE_TRANSLATION) return SIMPLE_TRANSLATION;
284#if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
285 if (last_motion_mode_allowed == OBMC_CAUSAL) {
286 motion_mode = aom_read(r, cm->fc->obmc_prob[mbmi->sb_type], ACCT_STR);
287 if (counts) ++counts->obmc[mbmi->sb_type][motion_mode];
288 return (MOTION_MODE)(SIMPLE_TRANSLATION + motion_mode);
289 } else {
290#endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
Yaowu Xub24e1152016-10-31 16:28:32 -0700291 motion_mode =
292 aom_read_tree(r, av1_motion_mode_tree,
293 cm->fc->motion_mode_prob[mbmi->sb_type], ACCT_STR);
294 if (counts) ++counts->motion_mode[mbmi->sb_type][motion_mode];
295 return (MOTION_MODE)(SIMPLE_TRANSLATION + motion_mode);
Yue Chen69f18e12016-09-08 14:48:15 -0700296#if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
Yaowu Xub24e1152016-10-31 16:28:32 -0700297 }
Yue Chen69f18e12016-09-08 14:48:15 -0700298#endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
Yaowu Xub24e1152016-10-31 16:28:32 -0700299}
300#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
301
Yaowu Xuc27fc142016-08-22 16:08:15 -0700302#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700303static PREDICTION_MODE read_inter_compound_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
304 aom_reader *r, int16_t ctx) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700305 const int mode =
306 aom_read_tree(r, av1_inter_compound_mode_tree,
307 cm->fc->inter_compound_mode_probs[ctx], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700308 FRAME_COUNTS *counts = xd->counts;
309
310 if (counts) ++counts->inter_compound_mode[ctx][mode];
311
312 assert(is_inter_compound_mode(NEAREST_NEARESTMV + mode));
313 return NEAREST_NEARESTMV + mode;
314}
315#endif // CONFIG_EXT_INTER
316
Thomas9ac55082016-09-23 18:04:17 +0100317static int read_segment_id(aom_reader *r, struct segmentation_probs *segp) {
Nathan E. Egge31296062016-11-16 09:44:26 -0500318#if CONFIG_EC_MULTISYMBOL
Michael Bebenita6048d052016-08-25 14:40:54 -0700319 return aom_read_symbol(r, segp->tree_cdf, MAX_SEGMENTS, ACCT_STR);
Nathan E. Eggef627e582016-08-19 20:06:51 -0400320#else
Michael Bebenita6048d052016-08-25 14:40:54 -0700321 return aom_read_tree(r, av1_segment_tree, segp->tree_probs, ACCT_STR);
Nathan E. Eggef627e582016-08-19 20:06:51 -0400322#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700323}
324
325#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700326static void read_tx_size_vartx(AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700327 MB_MODE_INFO *mbmi, FRAME_COUNTS *counts,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700328 TX_SIZE tx_size, int depth, int blk_row,
329 int blk_col, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700330 int is_split = 0;
331 const int tx_row = blk_row >> 1;
332 const int tx_col = blk_col >> 1;
Jingning Hanf64062f2016-11-02 16:22:18 -0700333 const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0);
334 const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700335 int ctx = txfm_partition_context(xd->above_txfm_context + tx_col,
Jingning Hanc8b89362016-11-01 10:28:53 -0700336 xd->left_txfm_context + tx_row,
337 mbmi->sb_type, tx_size);
clang-format67948d32016-09-07 22:40:40 -0700338 TX_SIZE(*const inter_tx_size)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700339 [MAX_MIB_SIZE] =
340 (TX_SIZE(*)[MAX_MIB_SIZE]) & mbmi->inter_tx_size[tx_row][tx_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700341 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
342
Jingning Han571189c2016-10-24 10:38:43 -0700343 if (depth == MAX_VARTX_DEPTH) {
Jingning Han94d5bfc2016-10-21 10:14:36 -0700344 int idx, idy;
345 inter_tx_size[0][0] = tx_size;
Jingning Han65abc312016-10-27 13:04:21 -0700346 for (idy = 0; idy < tx_size_high_unit[tx_size] / 2; ++idy)
347 for (idx = 0; idx < tx_size_wide_unit[tx_size] / 2; ++idx)
Jingning Han94d5bfc2016-10-21 10:14:36 -0700348 inter_tx_size[idy][idx] = tx_size;
349 mbmi->tx_size = tx_size;
Jingning Hane67b38a2016-11-04 10:30:00 -0700350 mbmi->min_tx_size = AOMMIN(mbmi->min_tx_size, get_min_tx_size(tx_size));
Jingning Han94d5bfc2016-10-21 10:14:36 -0700351 if (counts) ++counts->txfm_partition[ctx][0];
352 txfm_partition_update(xd->above_txfm_context + tx_col,
Jingning Han581d1692017-01-05 16:03:54 -0800353 xd->left_txfm_context + tx_row, tx_size, tx_size);
Jingning Han94d5bfc2016-10-21 10:14:36 -0700354 return;
355 }
356
Michael Bebenita6048d052016-08-25 14:40:54 -0700357 is_split = aom_read(r, cm->fc->txfm_partition_prob[ctx], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700358
359 if (is_split) {
Jingning Hanf64062f2016-11-02 16:22:18 -0700360 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
361 const int bsl = tx_size_wide_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700362 int i;
363
364 if (counts) ++counts->txfm_partition[ctx][1];
365
366 if (tx_size == TX_8X8) {
Jingning Han9ca05b72017-01-03 14:41:36 -0800367 int idx, idy;
Jingning Hanab9ecba2017-01-13 09:11:58 -0800368 inter_tx_size[0][0] = sub_txs;
Jingning Han9ca05b72017-01-03 14:41:36 -0800369 for (idy = 0; idy < tx_size_high_unit[tx_size] / 2; ++idy)
370 for (idx = 0; idx < tx_size_wide_unit[tx_size] / 2; ++idx)
Jingning Han581d1692017-01-05 16:03:54 -0800371 inter_tx_size[idy][idx] = inter_tx_size[0][0];
Jingning Hanab9ecba2017-01-13 09:11:58 -0800372 mbmi->tx_size = sub_txs;
Jingning Hane67b38a2016-11-04 10:30:00 -0700373 mbmi->min_tx_size = get_min_tx_size(mbmi->tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700374 txfm_partition_update(xd->above_txfm_context + tx_col,
Jingning Hanab9ecba2017-01-13 09:11:58 -0800375 xd->left_txfm_context + tx_row, sub_txs, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700376 return;
377 }
378
379 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700380 for (i = 0; i < 4; ++i) {
Jingning Hanf64062f2016-11-02 16:22:18 -0700381 int offsetr = blk_row + (i >> 1) * bsl;
382 int offsetc = blk_col + (i & 0x01) * bsl;
383 read_tx_size_vartx(cm, xd, mbmi, counts, sub_txs, depth + 1, offsetr,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700384 offsetc, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700385 }
386 } else {
387 int idx, idy;
388 inter_tx_size[0][0] = tx_size;
Jingning Han65abc312016-10-27 13:04:21 -0700389 for (idy = 0; idy < tx_size_high_unit[tx_size] / 2; ++idy)
390 for (idx = 0; idx < tx_size_wide_unit[tx_size] / 2; ++idx)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700391 inter_tx_size[idy][idx] = tx_size;
392 mbmi->tx_size = tx_size;
Jingning Hane67b38a2016-11-04 10:30:00 -0700393 mbmi->min_tx_size = AOMMIN(mbmi->min_tx_size, get_min_tx_size(tx_size));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700394 if (counts) ++counts->txfm_partition[ctx][0];
395 txfm_partition_update(xd->above_txfm_context + tx_col,
Jingning Han581d1692017-01-05 16:03:54 -0800396 xd->left_txfm_context + tx_row, tx_size, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700397 }
398}
399#endif
400
Yaowu Xuf883b422016-08-30 14:01:10 -0700401static TX_SIZE read_selected_tx_size(AV1_COMMON *cm, MACROBLOCKD *xd,
402 int tx_size_cat, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700403 FRAME_COUNTS *counts = xd->counts;
404 const int ctx = get_tx_size_context(xd);
Urvang Joshifeb925f2016-12-05 10:37:29 -0800405 const int depth =
406 aom_read_tree(r, av1_tx_size_tree[tx_size_cat],
407 cm->fc->tx_size_probs[tx_size_cat][ctx], ACCT_STR);
408 const TX_SIZE tx_size = depth_to_tx_size(depth);
409#if CONFIG_RECT_TX
410 assert(!is_rect_tx(tx_size));
411#endif // CONFIG_RECT_TX
Jingning Han906be072016-10-26 11:04:31 -0700412 if (counts) ++counts->tx_size[tx_size_cat][ctx][depth];
Jingning Han4e1737a2016-10-25 16:05:02 -0700413 return tx_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700414}
415
Urvang Joshifeb925f2016-12-05 10:37:29 -0800416static TX_SIZE read_tx_size(AV1_COMMON *cm, MACROBLOCKD *xd, int is_inter,
417 int allow_select_inter, aom_reader *r) {
418 const TX_MODE tx_mode = cm->tx_mode;
419 const BLOCK_SIZE bsize = xd->mi[0]->mbmi.sb_type;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700420 if (xd->lossless[xd->mi[0]->mbmi.segment_id]) return TX_4X4;
Jingning Han4be1a4d2017-01-06 10:59:20 -0800421#if CONFIG_CB4X4 && CONFIG_VAR_TX
Jingning Han74fd89f2017-01-18 15:54:55 -0800422 if ((bsize > BLOCK_4X4 && is_inter) || bsize >= BLOCK_8X8) {
Jingning Han4be1a4d2017-01-06 10:59:20 -0800423#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700424 if (bsize >= BLOCK_8X8) {
Urvang Joshifeb925f2016-12-05 10:37:29 -0800425#endif // CONFIG_CB4X4 && CONFIG_VAR_TX
426 if ((!is_inter || allow_select_inter) && tx_mode == TX_MODE_SELECT) {
427 const int32_t tx_size_cat = is_inter ? inter_tx_size_cat_lookup[bsize]
428 : intra_tx_size_cat_lookup[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700429 const TX_SIZE coded_tx_size =
Urvang Joshifeb925f2016-12-05 10:37:29 -0800430 read_selected_tx_size(cm, xd, tx_size_cat, r);
Peter de Rivaza7c81462016-09-26 14:20:13 +0100431#if CONFIG_EXT_TX && CONFIG_RECT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -0700432 if (coded_tx_size > max_txsize_lookup[bsize]) {
433 assert(coded_tx_size == max_txsize_lookup[bsize] + 1);
434 return max_txsize_rect_lookup[bsize];
435 }
Peter de Rivaza7c81462016-09-26 14:20:13 +0100436#else
437 assert(coded_tx_size <= max_txsize_lookup[bsize]);
438#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -0700439 return coded_tx_size;
440 } else {
Urvang Joshifeb925f2016-12-05 10:37:29 -0800441 return tx_size_from_tx_mode(bsize, tx_mode, is_inter);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700442 }
443 } else {
444#if CONFIG_EXT_TX && CONFIG_RECT_TX
445 assert(IMPLIES(tx_mode == ONLY_4X4, bsize == BLOCK_4X4));
446 return max_txsize_rect_lookup[bsize];
447#else
448 return TX_4X4;
Urvang Joshifeb925f2016-12-05 10:37:29 -0800449#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -0700450 }
451}
452
Yaowu Xuf883b422016-08-30 14:01:10 -0700453static int dec_get_segment_id(const AV1_COMMON *cm, const uint8_t *segment_ids,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700454 int mi_offset, int x_mis, int y_mis) {
455 int x, y, segment_id = INT_MAX;
456
457 for (y = 0; y < y_mis; y++)
458 for (x = 0; x < x_mis; x++)
459 segment_id =
Yaowu Xuf883b422016-08-30 14:01:10 -0700460 AOMMIN(segment_id, segment_ids[mi_offset + y * cm->mi_cols + x]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700461
462 assert(segment_id >= 0 && segment_id < MAX_SEGMENTS);
463 return segment_id;
464}
465
Yaowu Xuf883b422016-08-30 14:01:10 -0700466static void set_segment_id(AV1_COMMON *cm, int mi_offset, int x_mis, int y_mis,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700467 int segment_id) {
468 int x, y;
469
470 assert(segment_id >= 0 && segment_id < MAX_SEGMENTS);
471
472 for (y = 0; y < y_mis; y++)
473 for (x = 0; x < x_mis; x++)
474 cm->current_frame_seg_map[mi_offset + y * cm->mi_cols + x] = segment_id;
475}
476
Yaowu Xuf883b422016-08-30 14:01:10 -0700477static int read_intra_segment_id(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700478 int mi_offset, int x_mis, int y_mis,
Yaowu Xuf883b422016-08-30 14:01:10 -0700479 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700480 struct segmentation *const seg = &cm->seg;
481 FRAME_COUNTS *counts = xd->counts;
482 struct segmentation_probs *const segp = &cm->fc->seg;
483 int segment_id;
484
485 if (!seg->enabled) return 0; // Default for disabled segmentation
486
487 assert(seg->update_map && !seg->temporal_update);
488
489 segment_id = read_segment_id(r, segp);
490 if (counts) ++counts->seg.tree_total[segment_id];
491 set_segment_id(cm, mi_offset, x_mis, y_mis, segment_id);
492 return segment_id;
493}
494
Yaowu Xuf883b422016-08-30 14:01:10 -0700495static void copy_segment_id(const AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700496 const uint8_t *last_segment_ids,
497 uint8_t *current_segment_ids, int mi_offset,
498 int x_mis, int y_mis) {
499 int x, y;
500
501 for (y = 0; y < y_mis; y++)
502 for (x = 0; x < x_mis; x++)
503 current_segment_ids[mi_offset + y * cm->mi_cols + x] =
504 last_segment_ids ? last_segment_ids[mi_offset + y * cm->mi_cols + x]
505 : 0;
506}
507
Yaowu Xuf883b422016-08-30 14:01:10 -0700508static int read_inter_segment_id(AV1_COMMON *const cm, MACROBLOCKD *const xd,
509 int mi_row, int mi_col, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700510 struct segmentation *const seg = &cm->seg;
511 FRAME_COUNTS *counts = xd->counts;
512 struct segmentation_probs *const segp = &cm->fc->seg;
513 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
514 int predicted_segment_id, segment_id;
515 const int mi_offset = mi_row * cm->mi_cols + mi_col;
Jingning Hanc709e1f2016-12-06 14:48:09 -0800516 const int bw = mi_size_wide[mbmi->sb_type];
517 const int bh = mi_size_high[mbmi->sb_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700518
519 // TODO(slavarnway): move x_mis, y_mis into xd ?????
Yaowu Xuf883b422016-08-30 14:01:10 -0700520 const int x_mis = AOMMIN(cm->mi_cols - mi_col, bw);
521 const int y_mis = AOMMIN(cm->mi_rows - mi_row, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700522
523 if (!seg->enabled) return 0; // Default for disabled segmentation
524
525 predicted_segment_id = cm->last_frame_seg_map
526 ? dec_get_segment_id(cm, cm->last_frame_seg_map,
527 mi_offset, x_mis, y_mis)
528 : 0;
529
530 if (!seg->update_map) {
531 copy_segment_id(cm, cm->last_frame_seg_map, cm->current_frame_seg_map,
532 mi_offset, x_mis, y_mis);
533 return predicted_segment_id;
534 }
535
536 if (seg->temporal_update) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700537 const int ctx = av1_get_pred_context_seg_id(xd);
538 const aom_prob pred_prob = segp->pred_probs[ctx];
Michael Bebenita6048d052016-08-25 14:40:54 -0700539 mbmi->seg_id_predicted = aom_read(r, pred_prob, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700540 if (counts) ++counts->seg.pred[ctx][mbmi->seg_id_predicted];
541 if (mbmi->seg_id_predicted) {
542 segment_id = predicted_segment_id;
543 } else {
544 segment_id = read_segment_id(r, segp);
545 if (counts) ++counts->seg.tree_mispred[segment_id];
546 }
547 } else {
548 segment_id = read_segment_id(r, segp);
549 if (counts) ++counts->seg.tree_total[segment_id];
550 }
551 set_segment_id(cm, mi_offset, x_mis, y_mis, segment_id);
552 return segment_id;
553}
554
Yaowu Xuf883b422016-08-30 14:01:10 -0700555static int read_skip(AV1_COMMON *cm, const MACROBLOCKD *xd, int segment_id,
556 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700557 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
558 return 1;
559 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700560 const int ctx = av1_get_skip_context(xd);
Michael Bebenita6048d052016-08-25 14:40:54 -0700561 const int skip = aom_read(r, cm->fc->skip_probs[ctx], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700562 FRAME_COUNTS *counts = xd->counts;
563 if (counts) ++counts->skip[ctx][skip];
564 return skip;
565 }
566}
567
Urvang Joshib100db72016-10-12 16:28:56 -0700568#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -0700569static void read_palette_mode_info(AV1_COMMON *const cm, MACROBLOCKD *const xd,
570 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700571 MODE_INFO *const mi = xd->mi[0];
572 MB_MODE_INFO *const mbmi = &mi->mbmi;
573 const MODE_INFO *const above_mi = xd->above_mi;
574 const MODE_INFO *const left_mi = xd->left_mi;
575 const BLOCK_SIZE bsize = mbmi->sb_type;
576 int i, n, palette_ctx = 0;
577 PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
578
579 if (mbmi->mode == DC_PRED) {
580 if (above_mi)
581 palette_ctx += (above_mi->mbmi.palette_mode_info.palette_size[0] > 0);
582 if (left_mi)
583 palette_ctx += (left_mi->mbmi.palette_mode_info.palette_size[0] > 0);
Michael Bebenita6048d052016-08-25 14:40:54 -0700584 if (aom_read(
585 r, av1_default_palette_y_mode_prob[bsize - BLOCK_8X8][palette_ctx],
586 ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700587 pmi->palette_size[0] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700588 aom_read_tree(r, av1_palette_size_tree,
Michael Bebenita6048d052016-08-25 14:40:54 -0700589 av1_default_palette_y_size_prob[bsize - BLOCK_8X8],
590 ACCT_STR) +
Yaowu Xuc27fc142016-08-22 16:08:15 -0700591 2;
592 n = pmi->palette_size[0];
593 for (i = 0; i < n; ++i)
Michael Bebenita6048d052016-08-25 14:40:54 -0700594 pmi->palette_colors[i] = aom_read_literal(r, cm->bit_depth, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700595
596 xd->plane[0].color_index_map[0] = read_uniform(r, n);
597 assert(xd->plane[0].color_index_map[0] < n);
598 }
599 }
600
601 if (mbmi->uv_mode == DC_PRED) {
Michael Bebenita6048d052016-08-25 14:40:54 -0700602 if (aom_read(r, av1_default_palette_uv_mode_prob[pmi->palette_size[0] > 0],
603 ACCT_STR)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700604 pmi->palette_size[1] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700605 aom_read_tree(r, av1_palette_size_tree,
Michael Bebenita6048d052016-08-25 14:40:54 -0700606 av1_default_palette_uv_size_prob[bsize - BLOCK_8X8],
607 ACCT_STR) +
Yaowu Xuc27fc142016-08-22 16:08:15 -0700608 2;
609 n = pmi->palette_size[1];
610 for (i = 0; i < n; ++i) {
611 pmi->palette_colors[PALETTE_MAX_SIZE + i] =
Michael Bebenita6048d052016-08-25 14:40:54 -0700612 aom_read_literal(r, cm->bit_depth, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700613 pmi->palette_colors[2 * PALETTE_MAX_SIZE + i] =
Michael Bebenita6048d052016-08-25 14:40:54 -0700614 aom_read_literal(r, cm->bit_depth, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700615 }
616 xd->plane[1].color_index_map[0] = read_uniform(r, n);
617 assert(xd->plane[1].color_index_map[0] < n);
618 }
619 }
620}
Urvang Joshib100db72016-10-12 16:28:56 -0700621#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700622
hui su5db97432016-10-14 16:10:14 -0700623#if CONFIG_FILTER_INTRA
624static void read_filter_intra_mode_info(AV1_COMMON *const cm,
625 MACROBLOCKD *const xd, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700626 MODE_INFO *const mi = xd->mi[0];
627 MB_MODE_INFO *const mbmi = &mi->mbmi;
628 FRAME_COUNTS *counts = xd->counts;
hui su5db97432016-10-14 16:10:14 -0700629 FILTER_INTRA_MODE_INFO *filter_intra_mode_info =
630 &mbmi->filter_intra_mode_info;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700631
Urvang Joshib100db72016-10-12 16:28:56 -0700632 if (mbmi->mode == DC_PRED
633#if CONFIG_PALETTE
634 && mbmi->palette_mode_info.palette_size[0] == 0
635#endif // CONFIG_PALETTE
636 ) {
hui su5db97432016-10-14 16:10:14 -0700637 filter_intra_mode_info->use_filter_intra_mode[0] =
638 aom_read(r, cm->fc->filter_intra_probs[0], ACCT_STR);
639 if (filter_intra_mode_info->use_filter_intra_mode[0]) {
640 filter_intra_mode_info->filter_intra_mode[0] =
Yaowu Xuc27fc142016-08-22 16:08:15 -0700641 read_uniform(r, FILTER_INTRA_MODES);
642 }
hui su5db97432016-10-14 16:10:14 -0700643 if (counts) {
644 ++counts->filter_intra[0]
645 [filter_intra_mode_info->use_filter_intra_mode[0]];
646 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700647 }
Urvang Joshib100db72016-10-12 16:28:56 -0700648 if (mbmi->uv_mode == DC_PRED
649#if CONFIG_PALETTE
650 && mbmi->palette_mode_info.palette_size[1] == 0
651#endif // CONFIG_PALETTE
652 ) {
hui su5db97432016-10-14 16:10:14 -0700653 filter_intra_mode_info->use_filter_intra_mode[1] =
654 aom_read(r, cm->fc->filter_intra_probs[1], ACCT_STR);
655 if (filter_intra_mode_info->use_filter_intra_mode[1]) {
656 filter_intra_mode_info->filter_intra_mode[1] =
Yaowu Xuc27fc142016-08-22 16:08:15 -0700657 read_uniform(r, FILTER_INTRA_MODES);
658 }
hui su5db97432016-10-14 16:10:14 -0700659 if (counts) {
660 ++counts->filter_intra[1]
661 [filter_intra_mode_info->use_filter_intra_mode[1]];
662 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700663 }
664}
hui su5db97432016-10-14 16:10:14 -0700665#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -0700666
hui su5db97432016-10-14 16:10:14 -0700667#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -0700668static void read_intra_angle_info(AV1_COMMON *const cm, MACROBLOCKD *const xd,
669 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700670 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
671 const BLOCK_SIZE bsize = mbmi->sb_type;
hui sueda3d762016-12-06 16:58:23 -0800672#if CONFIG_INTRA_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -0700673 const int ctx = av1_get_pred_context_intra_interp(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700674 int p_angle;
hui sueda3d762016-12-06 16:58:23 -0800675#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700676
hui sueda3d762016-12-06 16:58:23 -0800677 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700678 if (bsize < BLOCK_8X8) return;
679
hui su45dc5972016-12-08 17:42:50 -0800680 if (av1_is_directional_mode(mbmi->mode, bsize)) {
681 const int max_angle_delta = av1_get_max_angle_delta(mbmi->sb_type, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700682 mbmi->angle_delta[0] =
hui su45dc5972016-12-08 17:42:50 -0800683 read_uniform(r, 2 * max_angle_delta + 1) - max_angle_delta;
hui sueda3d762016-12-06 16:58:23 -0800684#if CONFIG_INTRA_INTERP
hui su45dc5972016-12-08 17:42:50 -0800685 p_angle = mode_to_angle_map[mbmi->mode] +
686 mbmi->angle_delta[0] * av1_get_angle_step(mbmi->sb_type, 0);
Yaowu Xuf883b422016-08-30 14:01:10 -0700687 if (av1_is_intra_filter_switchable(p_angle)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700688 FRAME_COUNTS *counts = xd->counts;
Michael Bebenita6048d052016-08-25 14:40:54 -0700689 mbmi->intra_filter = aom_read_tree(
690 r, av1_intra_filter_tree, cm->fc->intra_filter_probs[ctx], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700691 if (counts) ++counts->intra_filter[ctx][mbmi->intra_filter];
692 } else {
693 mbmi->intra_filter = INTRA_FILTER_LINEAR;
694 }
hui sueda3d762016-12-06 16:58:23 -0800695#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700696 }
697
hui su45dc5972016-12-08 17:42:50 -0800698 if (av1_is_directional_mode(mbmi->uv_mode, bsize)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700699 mbmi->angle_delta[1] =
hui su45dc5972016-12-08 17:42:50 -0800700 read_uniform(r, 2 * MAX_ANGLE_DELTA_UV + 1) - MAX_ANGLE_DELTA_UV;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700701 }
702}
703#endif // CONFIG_EXT_INTRA
704
Jingning Hanab7163d2016-11-04 09:46:35 -0700705static void read_tx_type(const AV1_COMMON *const cm, MACROBLOCKD *xd,
706 MB_MODE_INFO *mbmi,
707#if CONFIG_SUPERTX
708 int supertx_enabled,
709#endif
710 aom_reader *r) {
711 const int inter_block = is_inter_block(mbmi);
Jingning Hane67b38a2016-11-04 10:30:00 -0700712#if CONFIG_VAR_TX
713 const TX_SIZE tx_size = inter_block ? mbmi->min_tx_size : mbmi->tx_size;
714#else
Jingning Hanab7163d2016-11-04 09:46:35 -0700715 const TX_SIZE tx_size = mbmi->tx_size;
Jingning Hane67b38a2016-11-04 10:30:00 -0700716#endif
Jingning Hanab7163d2016-11-04 09:46:35 -0700717 if (!FIXED_TX_TYPE) {
718#if CONFIG_EXT_TX
Urvang Joshifeb925f2016-12-05 10:37:29 -0800719 const TX_SIZE square_tx_size = txsize_sqr_map[tx_size];
Jingning Hanab7163d2016-11-04 09:46:35 -0700720 if (get_ext_tx_types(tx_size, mbmi->sb_type, inter_block) > 1 &&
Yue Cheneeacc4c2017-01-17 17:29:17 -0800721 ((!cm->seg.enabled && cm->base_qindex > 0) ||
722 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
723 !mbmi->skip &&
Jingning Hanab7163d2016-11-04 09:46:35 -0700724#if CONFIG_SUPERTX
725 !supertx_enabled &&
726#endif // CONFIG_SUPERTX
727 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
728 int eset = get_ext_tx_set(tx_size, mbmi->sb_type, inter_block);
729 FRAME_COUNTS *counts = xd->counts;
730
731 if (inter_block) {
732 if (eset > 0) {
733 mbmi->tx_type = aom_read_tree(
734 r, av1_ext_tx_inter_tree[eset],
Urvang Joshifeb925f2016-12-05 10:37:29 -0800735 cm->fc->inter_ext_tx_prob[eset][square_tx_size], ACCT_STR);
Jingning Hanab7163d2016-11-04 09:46:35 -0700736 if (counts)
Urvang Joshifeb925f2016-12-05 10:37:29 -0800737 ++counts->inter_ext_tx[eset][square_tx_size][mbmi->tx_type];
Jingning Hanab7163d2016-11-04 09:46:35 -0700738 }
739 } else if (ALLOW_INTRA_EXT_TX) {
740 if (eset > 0) {
741 mbmi->tx_type = aom_read_tree(
742 r, av1_ext_tx_intra_tree[eset],
Urvang Joshifeb925f2016-12-05 10:37:29 -0800743 cm->fc->intra_ext_tx_prob[eset][square_tx_size][mbmi->mode],
744 ACCT_STR);
Jingning Hanab7163d2016-11-04 09:46:35 -0700745 if (counts)
Urvang Joshifeb925f2016-12-05 10:37:29 -0800746 ++counts->intra_ext_tx[eset][square_tx_size][mbmi->mode]
747 [mbmi->tx_type];
Jingning Hanab7163d2016-11-04 09:46:35 -0700748 }
749 }
750 } else {
751 mbmi->tx_type = DCT_DCT;
752 }
753#else
Yue Cheneeacc4c2017-01-17 17:29:17 -0800754
755 if (tx_size < TX_32X32 &&
756 ((!cm->seg.enabled && cm->base_qindex > 0) ||
757 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
758 !mbmi->skip &&
Jingning Hanab7163d2016-11-04 09:46:35 -0700759#if CONFIG_SUPERTX
760 !supertx_enabled &&
761#endif // CONFIG_SUPERTX
762 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
763 FRAME_COUNTS *counts = xd->counts;
Yue Cheneeacc4c2017-01-17 17:29:17 -0800764
Jingning Hanab7163d2016-11-04 09:46:35 -0700765 if (inter_block) {
Nathan E. Eggedfa33f22016-11-16 09:44:26 -0500766#if CONFIG_EC_MULTISYMBOL
Jingning Hanab7163d2016-11-04 09:46:35 -0700767 mbmi->tx_type = av1_ext_tx_inv[aom_read_symbol(
768 r, cm->fc->inter_ext_tx_cdf[tx_size], TX_TYPES, ACCT_STR)];
769#else
770 mbmi->tx_type = aom_read_tree(
771 r, av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[tx_size], ACCT_STR);
772#endif
773 if (counts) ++counts->inter_ext_tx[tx_size][mbmi->tx_type];
774 } else {
775 const TX_TYPE tx_type_nom = intra_mode_to_tx_type_context[mbmi->mode];
Nathan E. Egge29ccee02016-11-16 09:44:26 -0500776#if CONFIG_EC_MULTISYMBOL
Jingning Hanab7163d2016-11-04 09:46:35 -0700777 mbmi->tx_type = av1_ext_tx_inv[aom_read_symbol(
778 r, cm->fc->intra_ext_tx_cdf[tx_size][tx_type_nom], TX_TYPES,
779 ACCT_STR)];
780#else
781 mbmi->tx_type = aom_read_tree(
782 r, av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[tx_size][tx_type_nom],
783 ACCT_STR);
784#endif
785 if (counts) ++counts->intra_ext_tx[tx_size][tx_type_nom][mbmi->tx_type];
786 }
787 } else {
788 mbmi->tx_type = DCT_DCT;
789 }
790#endif // CONFIG_EXT_TX
791 }
792}
793
Yaowu Xuf883b422016-08-30 14:01:10 -0700794static void read_intra_frame_mode_info(AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700795 MACROBLOCKD *const xd, int mi_row,
Yaowu Xuf883b422016-08-30 14:01:10 -0700796 int mi_col, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700797 MODE_INFO *const mi = xd->mi[0];
798 MB_MODE_INFO *const mbmi = &mi->mbmi;
799 const MODE_INFO *above_mi = xd->above_mi;
800 const MODE_INFO *left_mi = xd->left_mi;
801 const BLOCK_SIZE bsize = mbmi->sb_type;
802 int i;
803 const int mi_offset = mi_row * cm->mi_cols + mi_col;
Jingning Han85dc03f2016-12-06 16:03:10 -0800804 const int bw = mi_size_wide[bsize];
805 const int bh = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700806
807 // TODO(slavarnway): move x_mis, y_mis into xd ?????
Yaowu Xuf883b422016-08-30 14:01:10 -0700808 const int x_mis = AOMMIN(cm->mi_cols - mi_col, bw);
809 const int y_mis = AOMMIN(cm->mi_rows - mi_row, bh);
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000810#if CONFIG_EC_ADAPT
811 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
812#elif CONFIG_EC_MULTISYMBOL
813 FRAME_CONTEXT *ec_ctx = cm->fc;
814#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700815
816 mbmi->segment_id = read_intra_segment_id(cm, xd, mi_offset, x_mis, y_mis, r);
817 mbmi->skip = read_skip(cm, xd, mbmi->segment_id, r);
Arild Fuldseth07441162016-08-15 15:07:52 +0200818
819#if CONFIG_DELTA_Q
820 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +0100821 xd->current_qindex =
822 xd->prev_qindex +
823 read_delta_qindex(cm, xd, r, mbmi, mi_col, mi_row) * cm->delta_q_res;
824 xd->prev_qindex = xd->current_qindex;
Arild Fuldseth07441162016-08-15 15:07:52 +0200825 }
826#endif
827
Urvang Joshifeb925f2016-12-05 10:37:29 -0800828 mbmi->tx_size = read_tx_size(cm, xd, 0, 1, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700829 mbmi->ref_frame[0] = INTRA_FRAME;
830 mbmi->ref_frame[1] = NONE;
831
Jingning Han52261842016-12-14 12:17:49 -0800832#if CONFIG_CB4X4
833 (void)i;
834 mbmi->mode =
Nathan E. Egge10ba2be2016-11-16 09:44:26 -0500835#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000836 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 0));
Jingning Han52261842016-12-14 12:17:49 -0800837#else
838 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
Nathan E. Egge10ba2be2016-11-16 09:44:26 -0500839#endif
Jingning Han52261842016-12-14 12:17:49 -0800840#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700841 switch (bsize) {
842 case BLOCK_4X4:
843 for (i = 0; i < 4; ++i)
844 mi->bmi[i].as_mode =
Nathan E. Egge10ba2be2016-11-16 09:44:26 -0500845#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000846 read_intra_mode(r,
847 get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, i));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400848#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700849 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, i));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400850#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700851 mbmi->mode = mi->bmi[3].as_mode;
852 break;
853 case BLOCK_4X8:
854 mi->bmi[0].as_mode = mi->bmi[2].as_mode =
Nathan E. Egge10ba2be2016-11-16 09:44:26 -0500855#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000856 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 0));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400857#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700858 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400859#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700860 mi->bmi[1].as_mode = mi->bmi[3].as_mode = mbmi->mode =
Nathan E. Egge10ba2be2016-11-16 09:44:26 -0500861#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000862 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 1));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400863#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700864 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 1));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400865#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700866 break;
867 case BLOCK_8X4:
868 mi->bmi[0].as_mode = mi->bmi[1].as_mode =
Nathan E. Egge10ba2be2016-11-16 09:44:26 -0500869#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000870 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 0));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400871#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700872 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400873#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700874 mi->bmi[2].as_mode = mi->bmi[3].as_mode = mbmi->mode =
Nathan E. Egge10ba2be2016-11-16 09:44:26 -0500875#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000876 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 2));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400877#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700878 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 2));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400879#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700880 break;
881 default:
882 mbmi->mode =
Nathan E. Egge10ba2be2016-11-16 09:44:26 -0500883#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000884 read_intra_mode(r, get_y_mode_cdf(ec_ctx, mi, above_mi, left_mi, 0));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400885#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700886 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400887#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700888 }
Jingning Han52261842016-12-14 12:17:49 -0800889#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700890
891 mbmi->uv_mode = read_intra_mode_uv(cm, xd, r, mbmi->mode);
892#if CONFIG_EXT_INTRA
893 read_intra_angle_info(cm, xd, r);
894#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -0700895#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700896 mbmi->palette_mode_info.palette_size[0] = 0;
897 mbmi->palette_mode_info.palette_size[1] = 0;
898 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
899 read_palette_mode_info(cm, xd, r);
Urvang Joshib100db72016-10-12 16:28:56 -0700900#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -0700901#if CONFIG_FILTER_INTRA
902 mbmi->filter_intra_mode_info.use_filter_intra_mode[0] = 0;
903 mbmi->filter_intra_mode_info.use_filter_intra_mode[1] = 0;
904 if (bsize >= BLOCK_8X8) read_filter_intra_mode_info(cm, xd, r);
905#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -0700906
Jingning Hanab7163d2016-11-04 09:46:35 -0700907 read_tx_type(cm, xd, mbmi,
908#if CONFIG_SUPERTX
909 0,
Nathan E. Egge72762a22016-09-07 17:12:07 -0400910#endif
Jingning Hanab7163d2016-11-04 09:46:35 -0700911 r);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700912}
913
Thomas9ac55082016-09-23 18:04:17 +0100914static int read_mv_component(aom_reader *r, nmv_component *mvcomp, int usehp) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700915 int mag, d, fr, hp;
Michael Bebenita6048d052016-08-25 14:40:54 -0700916 const int sign = aom_read(r, mvcomp->sign, ACCT_STR);
917 const int mv_class =
Alex Converseaca9feb2016-10-10 11:08:10 -0700918#if CONFIG_EC_MULTISYMBOL
Nathan E. Egged7b893c2016-09-08 15:08:48 -0400919 aom_read_symbol(r, mvcomp->class_cdf, MV_CLASSES, ACCT_STR);
920#else
Michael Bebenita6048d052016-08-25 14:40:54 -0700921 aom_read_tree(r, av1_mv_class_tree, mvcomp->classes, ACCT_STR);
Nathan E. Egged7b893c2016-09-08 15:08:48 -0400922#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700923 const int class0 = mv_class == MV_CLASS_0;
924
925 // Integer part
926 if (class0) {
Nathan E. Egge45ea9632016-09-08 17:25:49 -0400927 d = aom_read(r, mvcomp->class0[0], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700928 mag = 0;
929 } else {
930 int i;
931 const int n = mv_class + CLASS0_BITS - 1; // number of bits
932
933 d = 0;
Michael Bebenita6048d052016-08-25 14:40:54 -0700934 for (i = 0; i < n; ++i) d |= aom_read(r, mvcomp->bits[i], ACCT_STR) << i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700935 mag = CLASS0_SIZE << (mv_class + 2);
936 }
937
Nathan E. Egge5f34b612016-09-08 15:59:53 -0400938// Fractional part
Alex Converseaca9feb2016-10-10 11:08:10 -0700939#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge5f34b612016-09-08 15:59:53 -0400940 fr = aom_read_symbol(r, class0 ? mvcomp->class0_fp_cdf[d] : mvcomp->fp_cdf,
941 MV_FP_SIZE, ACCT_STR);
942#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700943 fr = aom_read_tree(r, av1_mv_fp_tree,
Michael Bebenita6048d052016-08-25 14:40:54 -0700944 class0 ? mvcomp->class0_fp[d] : mvcomp->fp, ACCT_STR);
Nathan E. Egge5f34b612016-09-08 15:59:53 -0400945#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700946
947 // High precision part (if hp is not used, the default value of the hp is 1)
Michael Bebenita6048d052016-08-25 14:40:54 -0700948 hp = usehp ? aom_read(r, class0 ? mvcomp->class0_hp : mvcomp->hp, ACCT_STR)
949 : 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700950
951 // Result
952 mag += ((d << 3) | (fr << 1) | hp) + 1;
953 return sign ? -mag : mag;
954}
955
Yaowu Xuf883b422016-08-30 14:01:10 -0700956static INLINE void read_mv(aom_reader *r, MV *mv, const MV *ref,
Thomas9ac55082016-09-23 18:04:17 +0100957 nmv_context *ctx, nmv_context_counts *counts,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700958 int allow_hp) {
959 MV_JOINT_TYPE joint_type;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700960 MV diff = { 0, 0 };
Michael Bebenita6048d052016-08-25 14:40:54 -0700961 joint_type =
Alex Converseaca9feb2016-10-10 11:08:10 -0700962#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge5f7fd7a2016-09-08 11:22:03 -0400963 (MV_JOINT_TYPE)aom_read_symbol(r, ctx->joint_cdf, MV_JOINTS, ACCT_STR);
964#else
Michael Bebenita6048d052016-08-25 14:40:54 -0700965 (MV_JOINT_TYPE)aom_read_tree(r, av1_mv_joint_tree, ctx->joints, ACCT_STR);
Nathan E. Egge5f7fd7a2016-09-08 11:22:03 -0400966#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700967
968 if (mv_joint_vertical(joint_type))
Alex Converse6317c882016-09-29 14:21:37 -0700969 diff.row = read_mv_component(r, &ctx->comps[0], allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700970
971 if (mv_joint_horizontal(joint_type))
Alex Converse6317c882016-09-29 14:21:37 -0700972 diff.col = read_mv_component(r, &ctx->comps[1], allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700973
Alex Converse6317c882016-09-29 14:21:37 -0700974 av1_inc_mv(&diff, counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700975
976 mv->row = ref->row + diff.row;
977 mv->col = ref->col + diff.col;
978}
979
Yaowu Xuf883b422016-08-30 14:01:10 -0700980static REFERENCE_MODE read_block_reference_mode(AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700981 const MACROBLOCKD *xd,
Yaowu Xuf883b422016-08-30 14:01:10 -0700982 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700983 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700984 const int ctx = av1_get_reference_mode_context(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700985 const REFERENCE_MODE mode =
Michael Bebenita6048d052016-08-25 14:40:54 -0700986 (REFERENCE_MODE)aom_read(r, cm->fc->comp_inter_prob[ctx], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700987 FRAME_COUNTS *counts = xd->counts;
988 if (counts) ++counts->comp_inter[ctx][mode];
989 return mode; // SINGLE_REFERENCE or COMPOUND_REFERENCE
990 } else {
991 return cm->reference_mode;
992 }
993}
994
995// Read the referncence frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700996static void read_ref_frames(AV1_COMMON *const cm, MACROBLOCKD *const xd,
997 aom_reader *r, int segment_id,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700998 MV_REFERENCE_FRAME ref_frame[2]) {
999 FRAME_CONTEXT *const fc = cm->fc;
1000 FRAME_COUNTS *counts = xd->counts;
1001
1002 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
1003 ref_frame[0] = (MV_REFERENCE_FRAME)get_segdata(&cm->seg, segment_id,
1004 SEG_LVL_REF_FRAME);
1005 ref_frame[1] = NONE;
1006 } else {
1007 const REFERENCE_MODE mode = read_block_reference_mode(cm, xd, r);
1008 // FIXME(rbultje) I'm pretty sure this breaks segmentation ref frame coding
1009 if (mode == COMPOUND_REFERENCE) {
1010#if CONFIG_EXT_REFS
1011 const int idx = cm->ref_frame_sign_bias[cm->comp_bwd_ref[0]];
1012#else
1013 const int idx = cm->ref_frame_sign_bias[cm->comp_fixed_ref];
1014#endif // CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07001015 const int ctx = av1_get_pred_context_comp_ref_p(cm, xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001016 const int bit = aom_read(r, fc->comp_ref_prob[ctx][0], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001017
1018 if (counts) ++counts->comp_ref[ctx][0][bit];
1019
1020#if CONFIG_EXT_REFS
1021 // Decode forward references.
1022 if (!bit) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001023 const int ctx1 = av1_get_pred_context_comp_ref_p1(cm, xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001024 const int bit1 = aom_read(r, fc->comp_ref_prob[ctx1][1], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001025 if (counts) ++counts->comp_ref[ctx1][1][bit1];
1026 ref_frame[!idx] = cm->comp_fwd_ref[bit1 ? 0 : 1];
1027 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001028 const int ctx2 = av1_get_pred_context_comp_ref_p2(cm, xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001029 const int bit2 = aom_read(r, fc->comp_ref_prob[ctx2][2], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001030 if (counts) ++counts->comp_ref[ctx2][2][bit2];
1031 ref_frame[!idx] = cm->comp_fwd_ref[bit2 ? 3 : 2];
1032 }
1033
1034 // Decode backward references.
1035 {
Yaowu Xuf883b422016-08-30 14:01:10 -07001036 const int ctx_bwd = av1_get_pred_context_comp_bwdref_p(cm, xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001037 const int bit_bwd =
1038 aom_read(r, fc->comp_bwdref_prob[ctx_bwd][0], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001039 if (counts) ++counts->comp_bwdref[ctx_bwd][0][bit_bwd];
1040 ref_frame[idx] = cm->comp_bwd_ref[bit_bwd];
1041 }
1042#else
1043 ref_frame[!idx] = cm->comp_var_ref[bit];
1044 ref_frame[idx] = cm->comp_fixed_ref;
1045#endif // CONFIG_EXT_REFS
1046 } else if (mode == SINGLE_REFERENCE) {
1047#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07001048 const int ctx0 = av1_get_pred_context_single_ref_p1(xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001049 const int bit0 = aom_read(r, fc->single_ref_prob[ctx0][0], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001050 if (counts) ++counts->single_ref[ctx0][0][bit0];
1051
1052 if (bit0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001053 const int ctx1 = av1_get_pred_context_single_ref_p2(xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001054 const int bit1 = aom_read(r, fc->single_ref_prob[ctx1][1], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001055 if (counts) ++counts->single_ref[ctx1][1][bit1];
1056 ref_frame[0] = bit1 ? ALTREF_FRAME : BWDREF_FRAME;
1057 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001058 const int ctx2 = av1_get_pred_context_single_ref_p3(xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001059 const int bit2 = aom_read(r, fc->single_ref_prob[ctx2][2], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001060 if (counts) ++counts->single_ref[ctx2][2][bit2];
1061 if (bit2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001062 const int ctx4 = av1_get_pred_context_single_ref_p5(xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001063 const int bit4 = aom_read(r, fc->single_ref_prob[ctx4][4], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001064 if (counts) ++counts->single_ref[ctx4][4][bit4];
1065 ref_frame[0] = bit4 ? GOLDEN_FRAME : LAST3_FRAME;
1066 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001067 const int ctx3 = av1_get_pred_context_single_ref_p4(xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001068 const int bit3 = aom_read(r, fc->single_ref_prob[ctx3][3], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001069 if (counts) ++counts->single_ref[ctx3][3][bit3];
1070 ref_frame[0] = bit3 ? LAST2_FRAME : LAST_FRAME;
1071 }
1072 }
1073#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001074 const int ctx0 = av1_get_pred_context_single_ref_p1(xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001075 const int bit0 = aom_read(r, fc->single_ref_prob[ctx0][0], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001076 if (counts) ++counts->single_ref[ctx0][0][bit0];
1077
1078 if (bit0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001079 const int ctx1 = av1_get_pred_context_single_ref_p2(xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001080 const int bit1 = aom_read(r, fc->single_ref_prob[ctx1][1], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001081 if (counts) ++counts->single_ref[ctx1][1][bit1];
1082 ref_frame[0] = bit1 ? ALTREF_FRAME : GOLDEN_FRAME;
1083 } else {
1084 ref_frame[0] = LAST_FRAME;
1085 }
1086#endif // CONFIG_EXT_REFS
1087
1088 ref_frame[1] = NONE;
1089 } else {
1090 assert(0 && "Invalid prediction mode.");
1091 }
1092 }
1093}
1094
Angie Chiang9c4f8952016-11-21 11:13:19 -08001095static INLINE void read_mb_interp_filter(AV1_COMMON *const cm,
1096 MACROBLOCKD *const xd,
1097 MB_MODE_INFO *const mbmi,
1098 aom_reader *r) {
1099 FRAME_COUNTS *counts = xd->counts;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001100#if CONFIG_DUAL_FILTER
Angie Chiang9c4f8952016-11-21 11:13:19 -08001101 int dir;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001102 if (cm->interp_filter != SWITCHABLE) {
Angie Chiang9c4f8952016-11-21 11:13:19 -08001103 for (dir = 0; dir < 4; ++dir) mbmi->interp_filter[dir] = cm->interp_filter;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001104 } else {
Angie Chiang9c4f8952016-11-21 11:13:19 -08001105 for (dir = 0; dir < 2; ++dir) {
1106 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
1107 mbmi->interp_filter[dir] = EIGHTTAP_REGULAR;
1108
1109 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
1110 (mbmi->ref_frame[1] > INTRA_FRAME &&
1111 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Nathan E. Egge00b33312016-11-16 09:44:26 -05001112#if CONFIG_EC_MULTISYMBOL
Angie Chiang9c4f8952016-11-21 11:13:19 -08001113 mbmi->interp_filter[dir] =
1114 (InterpFilter)av1_switchable_interp_inv[aom_read_symbol(
1115 r, cm->fc->switchable_interp_cdf[ctx], SWITCHABLE_FILTERS,
1116 ACCT_STR)];
1117#else
1118 mbmi->interp_filter[dir] = (InterpFilter)aom_read_tree(
1119 r, av1_switchable_interp_tree, cm->fc->switchable_interp_prob[ctx],
1120 ACCT_STR);
1121#endif
1122 if (counts) ++counts->switchable_interp[ctx][mbmi->interp_filter[dir]];
1123 }
1124 }
1125 // The index system works as:
1126 // (0, 1) -> (vertical, horizontal) filter types for the first ref frame.
1127 // (2, 3) -> (vertical, horizontal) filter types for the second ref frame.
1128 mbmi->interp_filter[2] = mbmi->interp_filter[0];
1129 mbmi->interp_filter[3] = mbmi->interp_filter[1];
1130 }
1131#else // CONFIG_DUAL_FILTER
Angie Chiang9c4f8952016-11-21 11:13:19 -08001132 if (cm->interp_filter != SWITCHABLE) {
1133 mbmi->interp_filter = cm->interp_filter;
1134 } else {
1135 const int ctx = av1_get_pred_context_switchable_interp(xd);
Nathan E. Egge00b33312016-11-16 09:44:26 -05001136#if CONFIG_EC_MULTISYMBOL
Angie Chiang9c4f8952016-11-21 11:13:19 -08001137 mbmi->interp_filter =
Michael Bebenita6048d052016-08-25 14:40:54 -07001138 (InterpFilter)av1_switchable_interp_inv[aom_read_symbol(
1139 r, cm->fc->switchable_interp_cdf[ctx], SWITCHABLE_FILTERS,
1140 ACCT_STR)];
Nathan E. Egge4947c292016-04-26 11:37:06 -04001141#else
Angie Chiang9c4f8952016-11-21 11:13:19 -08001142 mbmi->interp_filter = (InterpFilter)aom_read_tree(
Michael Bebenita6048d052016-08-25 14:40:54 -07001143 r, av1_switchable_interp_tree, cm->fc->switchable_interp_prob[ctx],
1144 ACCT_STR);
Nathan E. Egge4947c292016-04-26 11:37:06 -04001145#endif
Angie Chiang9c4f8952016-11-21 11:13:19 -08001146 if (counts) ++counts->switchable_interp[ctx][mbmi->interp_filter];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001147 }
Angie Chiang9c4f8952016-11-21 11:13:19 -08001148#endif // CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07001149}
1150
Yaowu Xuf883b422016-08-30 14:01:10 -07001151static void read_intra_block_mode_info(AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001152 MACROBLOCKD *const xd, MODE_INFO *mi,
Yaowu Xuf883b422016-08-30 14:01:10 -07001153 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001154 MB_MODE_INFO *const mbmi = &mi->mbmi;
1155 const BLOCK_SIZE bsize = mi->mbmi.sb_type;
1156 int i;
1157
1158 mbmi->ref_frame[0] = INTRA_FRAME;
1159 mbmi->ref_frame[1] = NONE;
1160
Jingning Han52261842016-12-14 12:17:49 -08001161#if CONFIG_CB4X4
1162 (void)i;
1163 mbmi->mode = read_intra_mode_y(cm, xd, r, size_group_lookup[bsize]);
1164#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001165 switch (bsize) {
1166 case BLOCK_4X4:
1167 for (i = 0; i < 4; ++i)
1168 mi->bmi[i].as_mode = read_intra_mode_y(cm, xd, r, 0);
1169 mbmi->mode = mi->bmi[3].as_mode;
1170 break;
1171 case BLOCK_4X8:
1172 mi->bmi[0].as_mode = mi->bmi[2].as_mode = read_intra_mode_y(cm, xd, r, 0);
1173 mi->bmi[1].as_mode = mi->bmi[3].as_mode = mbmi->mode =
1174 read_intra_mode_y(cm, xd, r, 0);
1175 break;
1176 case BLOCK_8X4:
1177 mi->bmi[0].as_mode = mi->bmi[1].as_mode = read_intra_mode_y(cm, xd, r, 0);
1178 mi->bmi[2].as_mode = mi->bmi[3].as_mode = mbmi->mode =
1179 read_intra_mode_y(cm, xd, r, 0);
1180 break;
1181 default:
1182 mbmi->mode = read_intra_mode_y(cm, xd, r, size_group_lookup[bsize]);
1183 }
Jingning Han52261842016-12-14 12:17:49 -08001184#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001185
1186 mbmi->uv_mode = read_intra_mode_uv(cm, xd, r, mbmi->mode);
1187#if CONFIG_EXT_INTRA
1188 read_intra_angle_info(cm, xd, r);
1189#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001190#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001191 mbmi->palette_mode_info.palette_size[0] = 0;
1192 mbmi->palette_mode_info.palette_size[1] = 0;
1193 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1194 read_palette_mode_info(cm, xd, r);
Urvang Joshib100db72016-10-12 16:28:56 -07001195#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07001196#if CONFIG_FILTER_INTRA
1197 mbmi->filter_intra_mode_info.use_filter_intra_mode[0] = 0;
1198 mbmi->filter_intra_mode_info.use_filter_intra_mode[1] = 0;
1199 if (bsize >= BLOCK_8X8) read_filter_intra_mode_info(cm, xd, r);
1200#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001201}
1202
1203static INLINE int is_mv_valid(const MV *mv) {
1204 return mv->row > MV_LOW && mv->row < MV_UPP && mv->col > MV_LOW &&
1205 mv->col < MV_UPP;
1206}
1207
Yaowu Xuf883b422016-08-30 14:01:10 -07001208static INLINE int assign_mv(AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001209 PREDICTION_MODE mode,
Jingning Han5c60cdf2016-09-30 09:37:46 -07001210 MV_REFERENCE_FRAME ref_frame[2], int block,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001211 int_mv mv[2], int_mv ref_mv[2],
1212 int_mv nearest_mv[2], int_mv near_mv[2],
Yaowu Xuf883b422016-08-30 14:01:10 -07001213 int is_compound, int allow_hp, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001214 int i;
1215 int ret = 1;
Thomas Davies24523292017-01-11 16:56:47 +00001216#if CONFIG_EC_ADAPT
1217 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1218#else
1219 FRAME_CONTEXT *ec_ctx = cm->fc;
1220#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001221#if CONFIG_REF_MV
1222 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Jingning Han5cfa6712016-12-14 09:53:38 -08001223#if CONFIG_CB4X4
1224 int_mv *pred_mv = mbmi->pred_mv;
1225 (void)block;
1226#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001227 BLOCK_SIZE bsize = mbmi->sb_type;
1228 int_mv *pred_mv =
Yaowu Xuf5bbbfa2016-09-26 09:13:38 -07001229 (bsize >= BLOCK_8X8) ? mbmi->pred_mv : xd->mi[0]->bmi[block].pred_mv;
Jingning Han5cfa6712016-12-14 09:53:38 -08001230#endif // CONFIG_CB4X4
Jingning Han5c60cdf2016-09-30 09:37:46 -07001231#else
1232 (void)block;
Jingning Han5cfa6712016-12-14 09:53:38 -08001233#endif // CONFIG_REF_MV
Sarah Parkere5299862016-08-16 14:57:37 -07001234 (void)ref_frame;
Thomas Davies24523292017-01-11 16:56:47 +00001235 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001236
1237 switch (mode) {
1238#if CONFIG_EXT_INTER
1239 case NEWFROMNEARMV:
1240#endif // CONFIG_EXT_INTER
1241 case NEWMV: {
1242 FRAME_COUNTS *counts = xd->counts;
1243#if !CONFIG_REF_MV
1244 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
1245#endif
1246 for (i = 0; i < 1 + is_compound; ++i) {
1247#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001248 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1249 int nmv_ctx =
1250 av1_nmv_ctx(xd->ref_mv_count[rf_type], xd->ref_mv_stack[rf_type], i,
1251 mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001252 nmv_context_counts *const mv_counts =
1253 counts ? &counts->mv[nmv_ctx] : NULL;
Thomas Davies24523292017-01-11 16:56:47 +00001254 read_mv(r, &mv[i].as_mv, &ref_mv[i].as_mv, &ec_ctx->nmvc[nmv_ctx],
Jingning Han42bc3a92016-10-03 07:21:27 -07001255 mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001256#else
Thomas Davies24523292017-01-11 16:56:47 +00001257 read_mv(r, &mv[i].as_mv, &ref_mv[i].as_mv, &ec_ctx->nmvc, mv_counts,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001258 allow_hp);
1259#endif
1260 ret = ret && is_mv_valid(&mv[i].as_mv);
1261
1262#if CONFIG_REF_MV
1263 pred_mv[i].as_int = ref_mv[i].as_int;
1264#endif
1265 }
1266 break;
1267 }
1268 case NEARESTMV: {
1269 mv[0].as_int = nearest_mv[0].as_int;
1270 if (is_compound) mv[1].as_int = nearest_mv[1].as_int;
1271
1272#if CONFIG_REF_MV
1273 pred_mv[0].as_int = nearest_mv[0].as_int;
1274 if (is_compound) pred_mv[1].as_int = nearest_mv[1].as_int;
1275#endif
1276 break;
1277 }
1278 case NEARMV: {
1279 mv[0].as_int = near_mv[0].as_int;
1280 if (is_compound) mv[1].as_int = near_mv[1].as_int;
1281
1282#if CONFIG_REF_MV
1283 pred_mv[0].as_int = near_mv[0].as_int;
1284 if (is_compound) pred_mv[1].as_int = near_mv[1].as_int;
1285#endif
1286 break;
1287 }
1288 case ZEROMV: {
Sarah Parkere5299862016-08-16 14:57:37 -07001289#if CONFIG_GLOBAL_MOTION
David Barkercdcac6d2016-12-01 17:04:16 +00001290 mv[0].as_int = gm_get_motion_vector(&cm->global_motion[ref_frame[0]],
1291 cm->allow_high_precision_mv)
1292 .as_int;
Sarah Parkere5299862016-08-16 14:57:37 -07001293 if (is_compound)
David Barkercdcac6d2016-12-01 17:04:16 +00001294 mv[1].as_int = gm_get_motion_vector(&cm->global_motion[ref_frame[1]],
1295 cm->allow_high_precision_mv)
1296 .as_int;
Sarah Parkere5299862016-08-16 14:57:37 -07001297#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001298 mv[0].as_int = 0;
1299 if (is_compound) mv[1].as_int = 0;
Sarah Parkere5299862016-08-16 14:57:37 -07001300#endif // CONFIG_GLOBAL_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001301
1302#if CONFIG_REF_MV
1303 pred_mv[0].as_int = 0;
1304 if (is_compound) pred_mv[1].as_int = 0;
1305#endif
1306 break;
1307 }
1308#if CONFIG_EXT_INTER
1309 case NEW_NEWMV: {
1310 FRAME_COUNTS *counts = xd->counts;
1311#if !CONFIG_REF_MV
1312 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
1313#endif
1314 assert(is_compound);
1315 for (i = 0; i < 2; ++i) {
1316#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001317 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1318 int nmv_ctx =
1319 av1_nmv_ctx(xd->ref_mv_count[rf_type], xd->ref_mv_stack[rf_type], i,
1320 mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001321 nmv_context_counts *const mv_counts =
1322 counts ? &counts->mv[nmv_ctx] : NULL;
Thomas Davies24523292017-01-11 16:56:47 +00001323 read_mv(r, &mv[i].as_mv, &ref_mv[i].as_mv, &ec_ctx->nmvc[nmv_ctx],
Jingning Han42bc3a92016-10-03 07:21:27 -07001324 mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001325#else
Thomas Davies24523292017-01-11 16:56:47 +00001326 read_mv(r, &mv[i].as_mv, &ref_mv[i].as_mv, &ec_ctx->nmvc, mv_counts,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001327 allow_hp);
1328#endif
1329 ret = ret && is_mv_valid(&mv[i].as_mv);
1330 }
1331 break;
1332 }
1333 case NEAREST_NEARESTMV: {
1334 assert(is_compound);
1335 mv[0].as_int = nearest_mv[0].as_int;
1336 mv[1].as_int = nearest_mv[1].as_int;
1337 break;
1338 }
1339 case NEAREST_NEARMV: {
1340 assert(is_compound);
1341 mv[0].as_int = nearest_mv[0].as_int;
1342 mv[1].as_int = near_mv[1].as_int;
1343 break;
1344 }
1345 case NEAR_NEARESTMV: {
1346 assert(is_compound);
1347 mv[0].as_int = near_mv[0].as_int;
1348 mv[1].as_int = nearest_mv[1].as_int;
1349 break;
1350 }
1351 case NEAR_NEARMV: {
1352 assert(is_compound);
1353 mv[0].as_int = near_mv[0].as_int;
1354 mv[1].as_int = near_mv[1].as_int;
1355 break;
1356 }
1357 case NEW_NEARESTMV: {
1358 FRAME_COUNTS *counts = xd->counts;
1359#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001360 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1361 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1362 xd->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001363 nmv_context_counts *const mv_counts =
1364 counts ? &counts->mv[nmv_ctx] : NULL;
Thomas Davies24523292017-01-11 16:56:47 +00001365 read_mv(r, &mv[0].as_mv, &ref_mv[0].as_mv, &ec_ctx->nmvc[nmv_ctx],
Jingning Han42bc3a92016-10-03 07:21:27 -07001366 mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001367#else
1368 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
Thomas Davies24523292017-01-11 16:56:47 +00001369 read_mv(r, &mv[0].as_mv, &ref_mv[0].as_mv, &ec_ctx->nmvc, mv_counts,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001370 allow_hp);
1371#endif
1372 assert(is_compound);
1373 ret = ret && is_mv_valid(&mv[0].as_mv);
1374 mv[1].as_int = nearest_mv[1].as_int;
1375 break;
1376 }
1377 case NEAREST_NEWMV: {
1378 FRAME_COUNTS *counts = xd->counts;
1379#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001380 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1381 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1382 xd->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001383 nmv_context_counts *const mv_counts =
1384 counts ? &counts->mv[nmv_ctx] : NULL;
1385 mv[0].as_int = nearest_mv[0].as_int;
Thomas Davies24523292017-01-11 16:56:47 +00001386 read_mv(r, &mv[1].as_mv, &ref_mv[1].as_mv, &ec_ctx->nmvc[nmv_ctx],
Jingning Han42bc3a92016-10-03 07:21:27 -07001387 mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001388#else
1389 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
1390 mv[0].as_int = nearest_mv[0].as_int;
Thomas Davies24523292017-01-11 16:56:47 +00001391 read_mv(r, &mv[1].as_mv, &ref_mv[1].as_mv, &ec_ctx->nmvc, mv_counts,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001392 allow_hp);
1393#endif
1394 assert(is_compound);
1395 ret = ret && is_mv_valid(&mv[1].as_mv);
1396 break;
1397 }
1398 case NEAR_NEWMV: {
1399 FRAME_COUNTS *counts = xd->counts;
1400#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001401 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1402 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1403 xd->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001404 nmv_context_counts *const mv_counts =
1405 counts ? &counts->mv[nmv_ctx] : NULL;
1406 mv[0].as_int = near_mv[0].as_int;
Thomas Davies24523292017-01-11 16:56:47 +00001407 read_mv(r, &mv[1].as_mv, &ref_mv[1].as_mv, &ec_ctx->nmvc[nmv_ctx],
Jingning Han42bc3a92016-10-03 07:21:27 -07001408 mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001409#else
1410 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
1411 mv[0].as_int = near_mv[0].as_int;
Thomas Davies24523292017-01-11 16:56:47 +00001412 read_mv(r, &mv[1].as_mv, &ref_mv[1].as_mv, &ec_ctx->nmvc, mv_counts,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001413 allow_hp);
1414#endif
1415 assert(is_compound);
1416
1417 ret = ret && is_mv_valid(&mv[1].as_mv);
1418 break;
1419 }
1420 case NEW_NEARMV: {
1421 FRAME_COUNTS *counts = xd->counts;
1422#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001423 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1424 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[rf_type],
1425 xd->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001426 nmv_context_counts *const mv_counts =
1427 counts ? &counts->mv[nmv_ctx] : NULL;
Thomas Davies24523292017-01-11 16:56:47 +00001428 read_mv(r, &mv[0].as_mv, &ref_mv[0].as_mv, &ec_ctx->nmvc[nmv_ctx],
Jingning Han42bc3a92016-10-03 07:21:27 -07001429 mv_counts, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001430#else
1431 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
Thomas Davies24523292017-01-11 16:56:47 +00001432 read_mv(r, &mv[0].as_mv, &ref_mv[0].as_mv, &ec_ctx->nmvc, mv_counts,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001433 allow_hp);
1434#endif
1435 assert(is_compound);
1436 ret = ret && is_mv_valid(&mv[0].as_mv);
1437 mv[1].as_int = near_mv[1].as_int;
1438 break;
1439 }
1440 case ZERO_ZEROMV: {
1441 assert(is_compound);
1442 mv[0].as_int = 0;
1443 mv[1].as_int = 0;
1444 break;
1445 }
1446#endif // CONFIG_EXT_INTER
1447 default: { return 0; }
1448 }
1449 return ret;
1450}
1451
Yaowu Xuf883b422016-08-30 14:01:10 -07001452static int read_is_inter_block(AV1_COMMON *const cm, MACROBLOCKD *const xd,
1453 int segment_id, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001454 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
1455 return get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) != INTRA_FRAME;
1456 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001457 const int ctx = av1_get_intra_inter_context(xd);
Michael Bebenita6048d052016-08-25 14:40:54 -07001458 const int is_inter = aom_read(r, cm->fc->intra_inter_prob[ctx], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001459 FRAME_COUNTS *counts = xd->counts;
1460 if (counts) ++counts->intra_inter[ctx][is_inter];
1461 return is_inter;
1462 }
1463}
1464
1465static void fpm_sync(void *const data, int mi_row) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001466 AV1Decoder *const pbi = (AV1Decoder *)data;
1467 av1_frameworker_wait(pbi->frame_worker_owner, pbi->common.prev_frame,
1468 mi_row << pbi->common.mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001469}
1470
Yaowu Xuf883b422016-08-30 14:01:10 -07001471static void read_inter_block_mode_info(AV1Decoder *const pbi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001472 MACROBLOCKD *const xd,
1473 MODE_INFO *const mi,
David Barker491983d2016-11-10 13:22:17 +00001474#if (CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION || CONFIG_EXT_INTER) && \
1475 CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07001476 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001477 int supertx_enabled) {
1478#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001479 int mi_row, int mi_col, aom_reader *r) {
Yue Chencb60b182016-10-13 15:18:22 -07001480#endif // CONFIG_MOTION_VAR && CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07001481 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001482 MB_MODE_INFO *const mbmi = &mi->mbmi;
1483 const BLOCK_SIZE bsize = mbmi->sb_type;
1484 const int allow_hp = cm->allow_high_precision_mv;
Jingning Han5cfa6712016-12-14 09:53:38 -08001485 const int unify_bsize = CONFIG_CB4X4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001486 int_mv nearestmv[2], nearmv[2];
1487 int_mv ref_mvs[MODE_CTX_REF_FRAMES][MAX_MV_REF_CANDIDATES];
1488#if CONFIG_EXT_INTER
1489 int mv_idx;
1490#endif // CONFIG_EXT_INTER
1491 int ref, is_compound;
1492 int16_t inter_mode_ctx[MODE_CTX_REF_FRAMES];
1493#if CONFIG_REF_MV && CONFIG_EXT_INTER
1494 int16_t compound_inter_mode_ctx[MODE_CTX_REF_FRAMES];
1495#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1496 int16_t mode_ctx = 0;
Yue Chen69f18e12016-09-08 14:48:15 -07001497#if CONFIG_WARPED_MOTION
1498 double pts[144], pts_inref[144];
1499#endif // CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001500
Urvang Joshib100db72016-10-12 16:28:56 -07001501#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001502 mbmi->palette_mode_info.palette_size[0] = 0;
1503 mbmi->palette_mode_info.palette_size[1] = 0;
Urvang Joshib100db72016-10-12 16:28:56 -07001504#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001505
1506 read_ref_frames(cm, xd, r, mbmi->segment_id, mbmi->ref_frame);
1507 is_compound = has_second_ref(mbmi);
1508
1509 for (ref = 0; ref < 1 + is_compound; ++ref) {
1510 MV_REFERENCE_FRAME frame = mbmi->ref_frame[ref];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001511
Jingning Hanacddc032016-11-17 15:26:20 -08001512 av1_find_mv_refs(cm, xd, mi, frame,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001513#if CONFIG_REF_MV
Jingning Hanacddc032016-11-17 15:26:20 -08001514 &xd->ref_mv_count[frame], xd->ref_mv_stack[frame],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001515#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001516 compound_inter_mode_ctx,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001517#endif // CONFIG_EXT_INTER
1518#endif
Jingning Hanacddc032016-11-17 15:26:20 -08001519 ref_mvs[frame], mi_row, mi_col, fpm_sync, (void *)pbi,
Yaowu Xuf883b422016-08-30 14:01:10 -07001520 inter_mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001521 }
1522
1523#if CONFIG_REF_MV
Jingning Hanacddc032016-11-17 15:26:20 -08001524 if (is_compound) {
1525 MV_REFERENCE_FRAME ref_frame = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001526 av1_find_mv_refs(cm, xd, mi, ref_frame, &xd->ref_mv_count[ref_frame],
1527 xd->ref_mv_stack[ref_frame],
1528#if CONFIG_EXT_INTER
1529 compound_inter_mode_ctx,
1530#endif // CONFIG_EXT_INTER
1531 ref_mvs[ref_frame], mi_row, mi_col, fpm_sync, (void *)pbi,
1532 inter_mode_ctx);
1533
1534 if (xd->ref_mv_count[ref_frame] < 2) {
1535 MV_REFERENCE_FRAME rf[2];
David Barkercdcac6d2016-12-01 17:04:16 +00001536 int_mv zeromv[2];
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001537 av1_set_ref_frame(rf, ref_frame);
David Barkercdcac6d2016-12-01 17:04:16 +00001538#if CONFIG_GLOBAL_MOTION
1539 zeromv[0].as_int = gm_get_motion_vector(&cm->global_motion[rf[0]],
1540 cm->allow_high_precision_mv)
1541 .as_int;
1542 zeromv[1].as_int = (rf[1] != NONE)
1543 ? gm_get_motion_vector(&cm->global_motion[rf[1]],
1544 cm->allow_high_precision_mv)
1545 .as_int
1546 : 0;
1547#else
1548 zeromv[0].as_int = zeromv[1].as_int = 0;
1549#endif
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001550 for (ref = 0; ref < 2; ++ref) {
1551 lower_mv_precision(&ref_mvs[rf[ref]][0].as_mv, allow_hp);
1552 lower_mv_precision(&ref_mvs[rf[ref]][1].as_mv, allow_hp);
1553 }
1554
David Barkercdcac6d2016-12-01 17:04:16 +00001555 if (ref_mvs[rf[0]][0].as_int != zeromv[0].as_int ||
1556 ref_mvs[rf[0]][1].as_int != zeromv[0].as_int ||
1557 ref_mvs[rf[1]][0].as_int != zeromv[1].as_int ||
1558 ref_mvs[rf[1]][1].as_int != zeromv[1].as_int)
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001559 inter_mode_ctx[ref_frame] &= ~(1 << ALL_ZERO_FLAG_OFFSET);
1560 }
1561 }
1562
Yaowu Xuc27fc142016-08-22 16:08:15 -07001563#if CONFIG_EXT_INTER
1564 if (is_compound)
1565 mode_ctx = compound_inter_mode_ctx[mbmi->ref_frame[0]];
1566 else
1567#endif // CONFIG_EXT_INTER
1568 mode_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -07001569 av1_mode_context_analyzer(inter_mode_ctx, mbmi->ref_frame, bsize, -1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001570 mbmi->ref_mv_idx = 0;
1571#else
1572 mode_ctx = inter_mode_ctx[mbmi->ref_frame[0]];
1573#endif
1574
1575 if (segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1576 mbmi->mode = ZEROMV;
1577 if (bsize < BLOCK_8X8) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001578 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001579 "Invalid usage of segement feature on small blocks");
1580 return;
1581 }
1582 } else {
Jingning Han5cfa6712016-12-14 09:53:38 -08001583 if (bsize >= BLOCK_8X8 || unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001584#if CONFIG_EXT_INTER
1585 if (is_compound)
1586 mbmi->mode = read_inter_compound_mode(cm, xd, r, mode_ctx);
1587 else
1588#endif // CONFIG_EXT_INTER
1589 mbmi->mode = read_inter_mode(cm, xd,
1590#if CONFIG_REF_MV && CONFIG_EXT_INTER
1591 mbmi,
1592#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1593 r, mode_ctx);
1594#if CONFIG_REF_MV
1595 if (mbmi->mode == NEARMV || mbmi->mode == NEWMV)
1596 read_drl_idx(cm, xd, mbmi, r);
1597#endif
1598 }
1599 }
1600
1601#if CONFIG_EXT_INTER
Jingning Han5cfa6712016-12-14 09:53:38 -08001602 if ((bsize < BLOCK_8X8 && unify_bsize) ||
Yaowu Xuc27fc142016-08-22 16:08:15 -07001603 (mbmi->mode != ZEROMV && mbmi->mode != ZERO_ZEROMV)) {
1604#else
Jingning Han5cfa6712016-12-14 09:53:38 -08001605 if ((bsize < BLOCK_8X8 && !unify_bsize) || mbmi->mode != ZEROMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001606#endif // CONFIG_EXT_INTER
1607 for (ref = 0; ref < 1 + is_compound; ++ref) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001608 av1_find_best_ref_mvs(allow_hp, ref_mvs[mbmi->ref_frame[ref]],
1609 &nearestmv[ref], &nearmv[ref]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001610 }
1611 }
1612
1613#if CONFIG_REF_MV
1614 if (mbmi->ref_mv_idx > 0) {
1615 int_mv cur_mv =
1616 xd->ref_mv_stack[mbmi->ref_frame[0]][1 + mbmi->ref_mv_idx].this_mv;
1617 nearmv[0] = cur_mv;
1618 }
1619
1620#if CONFIG_EXT_INTER
1621 if (is_compound && bsize >= BLOCK_8X8 && mbmi->mode != ZERO_ZEROMV) {
1622#else
Jingning Han5cfa6712016-12-14 09:53:38 -08001623 if (is_compound && (bsize >= BLOCK_8X8 || unify_bsize) &&
1624 mbmi->mode != NEWMV && mbmi->mode != ZEROMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001625#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001626 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001627
1628#if CONFIG_EXT_INTER
1629 if (xd->ref_mv_count[ref_frame_type] > 0) {
1630#else
1631 if (xd->ref_mv_count[ref_frame_type] == 1 && mbmi->mode == NEARESTMV) {
1632#endif // CONFIG_EXT_INTER
1633#if CONFIG_EXT_INTER
1634 if (mbmi->mode == NEAREST_NEARESTMV) {
1635#endif // CONFIG_EXT_INTER
1636 nearestmv[0] = xd->ref_mv_stack[ref_frame_type][0].this_mv;
1637 nearestmv[1] = xd->ref_mv_stack[ref_frame_type][0].comp_mv;
1638 lower_mv_precision(&nearestmv[0].as_mv, allow_hp);
1639 lower_mv_precision(&nearestmv[1].as_mv, allow_hp);
1640#if CONFIG_EXT_INTER
1641 } else if (mbmi->mode == NEAREST_NEWMV || mbmi->mode == NEAREST_NEARMV) {
1642 nearestmv[0] = xd->ref_mv_stack[ref_frame_type][0].this_mv;
1643 lower_mv_precision(&nearestmv[0].as_mv, allow_hp);
1644 } else if (mbmi->mode == NEW_NEARESTMV || mbmi->mode == NEAR_NEARESTMV) {
1645 nearestmv[1] = xd->ref_mv_stack[ref_frame_type][0].comp_mv;
1646 lower_mv_precision(&nearestmv[1].as_mv, allow_hp);
1647 }
1648#endif // CONFIG_EXT_INTER
1649 }
1650
1651#if CONFIG_EXT_INTER
1652 if (xd->ref_mv_count[ref_frame_type] > 1) {
1653 if (mbmi->mode == NEAR_NEWMV || mbmi->mode == NEAR_NEARESTMV ||
1654 mbmi->mode == NEAR_NEARMV) {
1655 nearmv[0] = xd->ref_mv_stack[ref_frame_type][1].this_mv;
1656 lower_mv_precision(&nearmv[0].as_mv, allow_hp);
1657 }
1658
1659 if (mbmi->mode == NEW_NEARMV || mbmi->mode == NEAREST_NEARMV ||
1660 mbmi->mode == NEAR_NEARMV) {
1661 nearmv[1] = xd->ref_mv_stack[ref_frame_type][1].comp_mv;
1662 lower_mv_precision(&nearmv[1].as_mv, allow_hp);
1663 }
1664 }
1665#else
1666 if (xd->ref_mv_count[ref_frame_type] > 1) {
1667 int ref_mv_idx = 1 + mbmi->ref_mv_idx;
1668 nearestmv[0] = xd->ref_mv_stack[ref_frame_type][0].this_mv;
1669 nearestmv[1] = xd->ref_mv_stack[ref_frame_type][0].comp_mv;
1670 nearmv[0] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].this_mv;
1671 nearmv[1] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].comp_mv;
1672 }
1673#endif // CONFIG_EXT_INTER
1674 }
1675#endif
1676
Angie Chiang1733f6b2017-01-05 09:52:20 -08001677#if !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION
Angie Chiang9c4f8952016-11-21 11:13:19 -08001678 read_mb_interp_filter(cm, xd, mbmi, r);
Angie Chiang1733f6b2017-01-05 09:52:20 -08001679#endif // !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001680
Jingning Han5cfa6712016-12-14 09:53:38 -08001681 if (bsize < BLOCK_8X8 && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001682 const int num_4x4_w = 1 << xd->bmode_blocks_wl;
1683 const int num_4x4_h = 1 << xd->bmode_blocks_hl;
1684 int idx, idy;
1685 PREDICTION_MODE b_mode;
1686 int_mv nearest_sub8x8[2], near_sub8x8[2];
1687#if CONFIG_EXT_INTER
1688 int_mv ref_mv[2][2];
1689#endif // CONFIG_EXT_INTER
1690 for (idy = 0; idy < 2; idy += num_4x4_h) {
1691 for (idx = 0; idx < 2; idx += num_4x4_w) {
1692 int_mv block[2];
1693 const int j = idy * 2 + idx;
1694 int_mv ref_mv_s8[2];
1695#if CONFIG_REF_MV
1696#if CONFIG_EXT_INTER
1697 if (!is_compound)
1698#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001699 mode_ctx = av1_mode_context_analyzer(inter_mode_ctx, mbmi->ref_frame,
1700 bsize, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001701#endif
1702#if CONFIG_EXT_INTER
1703 if (is_compound)
1704 b_mode = read_inter_compound_mode(cm, xd, r, mode_ctx);
1705 else
1706#endif // CONFIG_EXT_INTER
1707 b_mode = read_inter_mode(cm, xd,
1708#if CONFIG_REF_MV && CONFIG_EXT_INTER
1709 mbmi,
1710#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1711 r, mode_ctx);
1712
1713#if CONFIG_EXT_INTER
1714 mv_idx = (b_mode == NEWFROMNEARMV) ? 1 : 0;
1715
1716 if (b_mode != ZEROMV && b_mode != ZERO_ZEROMV) {
1717#else
1718 if (b_mode != ZEROMV) {
1719#endif // CONFIG_EXT_INTER
1720#if CONFIG_REF_MV
1721 CANDIDATE_MV ref_mv_stack[2][MAX_REF_MV_STACK_SIZE];
1722 uint8_t ref_mv_count[2];
1723#endif
1724 for (ref = 0; ref < 1 + is_compound; ++ref)
1725#if CONFIG_EXT_INTER
1726 {
1727 int_mv mv_ref_list[MAX_MV_REF_CANDIDATES];
Yaowu Xuf883b422016-08-30 14:01:10 -07001728 av1_update_mv_context(xd, mi, mbmi->ref_frame[ref], mv_ref_list, j,
1729 mi_row, mi_col, NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001730#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001731 av1_append_sub8x8_mvs_for_idx(cm, xd, j, ref, mi_row, mi_col,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001732#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001733 ref_mv_stack[ref], &ref_mv_count[ref],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001734#endif
1735#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001736 mv_ref_list,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001737#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001738 &nearest_sub8x8[ref],
1739 &near_sub8x8[ref]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001740#if CONFIG_EXT_INTER
1741 if (have_newmv_in_inter_mode(b_mode)) {
1742 mv_ref_list[0].as_int = nearest_sub8x8[ref].as_int;
1743 mv_ref_list[1].as_int = near_sub8x8[ref].as_int;
Yaowu Xuf883b422016-08-30 14:01:10 -07001744 av1_find_best_ref_mvs(allow_hp, mv_ref_list, &ref_mv[0][ref],
1745 &ref_mv[1][ref]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001746 }
1747 }
1748#endif // CONFIG_EXT_INTER
1749 }
1750
1751 for (ref = 0; ref < 1 + is_compound && b_mode != ZEROMV; ++ref) {
1752#if CONFIG_REF_MV
1753 ref_mv_s8[ref] = nearest_sub8x8[ref];
1754 lower_mv_precision(&ref_mv_s8[ref].as_mv, allow_hp);
1755#else
1756 ref_mv_s8[ref] = nearestmv[ref];
1757#endif
1758 }
1759#if CONFIG_EXT_INTER
1760 (void)ref_mv_s8;
1761#endif
1762
Jingning Han5c60cdf2016-09-30 09:37:46 -07001763 if (!assign_mv(cm, xd, b_mode, mbmi->ref_frame, j, block,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001764#if CONFIG_EXT_INTER
1765 ref_mv[mv_idx],
1766#else
1767 ref_mv_s8,
1768#endif // CONFIG_EXT_INTER
1769 nearest_sub8x8, near_sub8x8, is_compound, allow_hp, r)) {
1770 xd->corrupted |= 1;
1771 break;
1772 };
1773
1774 mi->bmi[j].as_mv[0].as_int = block[0].as_int;
Sarah Parkerd7fa8542016-10-11 11:51:59 -07001775 mi->bmi[j].as_mode = b_mode;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001776 if (is_compound) mi->bmi[j].as_mv[1].as_int = block[1].as_int;
1777
1778 if (num_4x4_h == 2) mi->bmi[j + 2] = mi->bmi[j];
1779 if (num_4x4_w == 2) mi->bmi[j + 1] = mi->bmi[j];
1780 }
1781 }
1782
1783#if CONFIG_REF_MV
Yaowu Xuf5bbbfa2016-09-26 09:13:38 -07001784 mbmi->pred_mv[0].as_int = mi->bmi[3].pred_mv[0].as_int;
1785 mbmi->pred_mv[1].as_int = mi->bmi[3].pred_mv[1].as_int;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001786#endif
1787 mi->mbmi.mode = b_mode;
1788
1789 mbmi->mv[0].as_int = mi->bmi[3].as_mv[0].as_int;
1790 mbmi->mv[1].as_int = mi->bmi[3].as_mv[1].as_int;
1791 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001792 int_mv ref_mv[2];
1793 ref_mv[0] = nearestmv[0];
1794 ref_mv[1] = nearestmv[1];
1795
1796 for (ref = 0; ref < 1 + is_compound && mbmi->mode == NEWMV; ++ref) {
1797#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001798 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001799 if (xd->ref_mv_count[ref_frame_type] > 1) {
1800 ref_mv[ref] =
1801 (ref == 0)
1802 ? xd->ref_mv_stack[ref_frame_type][mbmi->ref_mv_idx].this_mv
1803 : xd->ref_mv_stack[ref_frame_type][mbmi->ref_mv_idx].comp_mv;
Jingning Hanff6ee6a2016-12-07 09:55:21 -08001804 clamp_mv_ref(&ref_mv[ref].as_mv, xd->n8_w << MI_SIZE_LOG2,
1805 xd->n8_h << MI_SIZE_LOG2, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001806 }
1807#endif
1808 nearestmv[ref] = ref_mv[ref];
1809 }
1810
1811 xd->corrupted |=
Jingning Han5c60cdf2016-09-30 09:37:46 -07001812 !assign_mv(cm, xd, mbmi->mode, mbmi->ref_frame, 0, mbmi->mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001813#if CONFIG_EXT_INTER
1814 mbmi->mode == NEWFROMNEARMV ? nearmv : nearestmv,
1815#else
1816 ref_mv,
1817#endif // CONFIG_EXT_INTER
1818 nearestmv, nearmv, is_compound, allow_hp, r);
1819 }
1820
1821#if CONFIG_EXT_INTER
1822 mbmi->use_wedge_interintra = 0;
1823 if (cm->reference_mode != COMPOUND_REFERENCE &&
1824#if CONFIG_SUPERTX
1825 !supertx_enabled &&
1826#endif
1827 is_interintra_allowed(mbmi)) {
1828 const int bsize_group = size_group_lookup[bsize];
Michael Bebenita6048d052016-08-25 14:40:54 -07001829 const int interintra =
1830 aom_read(r, cm->fc->interintra_prob[bsize_group], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001831 if (xd->counts) xd->counts->interintra[bsize_group][interintra]++;
1832 assert(mbmi->ref_frame[1] == NONE);
1833 if (interintra) {
1834 const INTERINTRA_MODE interintra_mode =
1835 read_interintra_mode(cm, xd, r, bsize_group);
1836 mbmi->ref_frame[1] = INTRA_FRAME;
1837 mbmi->interintra_mode = interintra_mode;
1838#if CONFIG_EXT_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001839 mbmi->angle_delta[0] = 0;
1840 mbmi->angle_delta[1] = 0;
hui sueda3d762016-12-06 16:58:23 -08001841#if CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001842 mbmi->intra_filter = INTRA_FILTER_LINEAR;
hui sueda3d762016-12-06 16:58:23 -08001843#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001844#endif // CONFIG_EXT_INTRA
hui su5db97432016-10-14 16:10:14 -07001845#if CONFIG_FILTER_INTRA
1846 mbmi->filter_intra_mode_info.use_filter_intra_mode[0] = 0;
1847 mbmi->filter_intra_mode_info.use_filter_intra_mode[1] = 0;
1848#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001849 if (is_interintra_wedge_used(bsize)) {
1850 mbmi->use_wedge_interintra =
Michael Bebenita6048d052016-08-25 14:40:54 -07001851 aom_read(r, cm->fc->wedge_interintra_prob[bsize], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001852 if (xd->counts)
1853 xd->counts->wedge_interintra[bsize][mbmi->use_wedge_interintra]++;
1854 if (mbmi->use_wedge_interintra) {
1855 mbmi->interintra_wedge_index =
Michael Bebenita6048d052016-08-25 14:40:54 -07001856 aom_read_literal(r, get_wedge_bits_lookup(bsize), ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001857 mbmi->interintra_wedge_sign = 0;
1858 }
1859 }
1860 }
1861 }
1862#endif // CONFIG_EXT_INTER
1863
Yue Chencb60b182016-10-13 15:18:22 -07001864#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
1865 mbmi->motion_mode = SIMPLE_TRANSLATION;
Yue Chen69f18e12016-09-08 14:48:15 -07001866#if CONFIG_WARPED_MOTION
1867 if (mbmi->sb_type >= BLOCK_8X8 && !has_second_ref(mbmi))
1868 mbmi->num_proj_ref[0] = findSamples(cm, xd, mi_row, mi_col, pts, pts_inref);
1869#endif // CONFIG_WARPED_MOTION
1870
Yaowu Xuc27fc142016-08-22 16:08:15 -07001871#if CONFIG_SUPERTX
Yue Chen69f18e12016-09-08 14:48:15 -07001872 if (!supertx_enabled) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001873#endif // CONFIG_SUPERTX
1874#if CONFIG_EXT_INTER
1875 if (mbmi->ref_frame[1] != INTRA_FRAME)
1876#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -07001877 mbmi->motion_mode = read_motion_mode(cm, xd, mbmi, r);
Yue Chen69f18e12016-09-08 14:48:15 -07001878#if CONFIG_WARPED_MOTION
1879 if (mbmi->motion_mode == WARPED_CAUSAL) {
1880 mbmi->wm_params[0].wmtype = DEFAULT_WMTYPE;
1881 find_projection(mbmi->num_proj_ref[0], pts, pts_inref,
1882 &mbmi->wm_params[0]);
1883 }
1884#endif // CONFIG_WARPED_MOTION
1885#if CONFIG_SUPERTX
1886 }
1887#endif // CONFIG_SUPERTX
Yue Chencb60b182016-10-13 15:18:22 -07001888#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001889
1890#if CONFIG_EXT_INTER
Sarah Parker6fdc8532016-11-16 17:47:13 -08001891 mbmi->interinter_compound_data.type = COMPOUND_AVERAGE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001892 if (cm->reference_mode != SINGLE_REFERENCE &&
Sarah Parker6fdc8532016-11-16 17:47:13 -08001893 is_inter_compound_mode(mbmi->mode)
Yue Chencb60b182016-10-13 15:18:22 -07001894#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Sarah Parker6fdc8532016-11-16 17:47:13 -08001895 && mbmi->motion_mode == SIMPLE_TRANSLATION
Yue Chencb60b182016-10-13 15:18:22 -07001896#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Sarah Parker6fdc8532016-11-16 17:47:13 -08001897 ) {
1898 mbmi->interinter_compound_data.type = aom_read_tree(
Sarah Parker6fddd182016-11-10 20:57:20 -08001899 r, av1_compound_type_tree, cm->fc->compound_type_prob[bsize], ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001900 if (xd->counts)
Sarah Parker6fdc8532016-11-16 17:47:13 -08001901 xd->counts->compound_interinter[bsize]
1902 [mbmi->interinter_compound_data.type]++;
1903 if (mbmi->interinter_compound_data.type == COMPOUND_WEDGE) {
1904 mbmi->interinter_compound_data.wedge_index =
Michael Bebenita6048d052016-08-25 14:40:54 -07001905 aom_read_literal(r, get_wedge_bits_lookup(bsize), ACCT_STR);
Sarah Parker6fdc8532016-11-16 17:47:13 -08001906 mbmi->interinter_compound_data.wedge_sign = aom_read_bit(r, ACCT_STR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001907 }
Sarah Parker569edda2016-12-14 14:57:38 -08001908#if CONFIG_COMPOUND_SEGMENT
1909 else if (mbmi->interinter_compound_data.type == COMPOUND_SEG) {
Sarah Parkerb9f757c2017-01-06 17:12:24 -08001910 mbmi->interinter_compound_data.mask_type =
1911 aom_read_literal(r, MAX_SEG_MASK_BITS, ACCT_STR);
Sarah Parker569edda2016-12-14 14:57:38 -08001912 }
1913#endif // CONFIG_COMPOUND_SEGMENT
Yaowu Xuc27fc142016-08-22 16:08:15 -07001914 }
1915#endif // CONFIG_EXT_INTER
1916
Yue Chen69f18e12016-09-08 14:48:15 -07001917#if CONFIG_WARPED_MOTION
1918 if (mbmi->motion_mode != WARPED_CAUSAL) {
1919#endif // CONFIG_WARPED_MOTION
Angie Chiang1733f6b2017-01-05 09:52:20 -08001920#if CONFIG_DUAL_FILTER || CONFIG_WARPED_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -07001921 read_mb_interp_filter(cm, xd, mbmi, r);
Angie Chiang1733f6b2017-01-05 09:52:20 -08001922#endif // CONFIG_DUAL_FILTER || CONFIG_WARPED_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -07001923#if CONFIG_WARPED_MOTION
1924 } else {
1925#if CONFIG_DUAL_FILTER
1926 mbmi->interp_filter[0] =
1927 cm->interp_filter == SWITCHABLE ? EIGHTTAP_REGULAR : cm->interp_filter;
1928 mbmi->interp_filter[1] =
1929 cm->interp_filter == SWITCHABLE ? EIGHTTAP_REGULAR : cm->interp_filter;
1930#else
1931 mbmi->interp_filter =
1932 cm->interp_filter == SWITCHABLE ? EIGHTTAP_REGULAR : cm->interp_filter;
1933#endif // CONFIG_DUAL_FILTER
1934 }
1935#endif // CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001936}
1937
Yaowu Xuf883b422016-08-30 14:01:10 -07001938static void read_inter_frame_mode_info(AV1Decoder *const pbi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001939 MACROBLOCKD *const xd,
1940#if CONFIG_SUPERTX
1941 int supertx_enabled,
1942#endif // CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07001943 int mi_row, int mi_col, aom_reader *r) {
1944 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001945 MODE_INFO *const mi = xd->mi[0];
1946 MB_MODE_INFO *const mbmi = &mi->mbmi;
1947 int inter_block = 1;
1948#if CONFIG_VAR_TX
1949 BLOCK_SIZE bsize = mbmi->sb_type;
1950#endif // CONFIG_VAR_TX
1951
1952 mbmi->mv[0].as_int = 0;
1953 mbmi->mv[1].as_int = 0;
1954 mbmi->segment_id = read_inter_segment_id(cm, xd, mi_row, mi_col, r);
1955#if CONFIG_SUPERTX
1956 if (!supertx_enabled) {
1957#endif // CONFIG_SUPERTX
1958 mbmi->skip = read_skip(cm, xd, mbmi->segment_id, r);
Arild Fuldseth07441162016-08-15 15:07:52 +02001959#if CONFIG_DELTA_Q
1960 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001961 xd->current_qindex =
1962 xd->prev_qindex +
1963 read_delta_qindex(cm, xd, r, mbmi, mi_col, mi_row) * cm->delta_q_res;
1964 xd->prev_qindex = xd->current_qindex;
Arild Fuldseth07441162016-08-15 15:07:52 +02001965 }
1966#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001967 inter_block = read_is_inter_block(cm, xd, mbmi->segment_id, r);
1968
1969#if CONFIG_VAR_TX
1970 xd->above_txfm_context = cm->above_txfm_context + mi_col;
1971 xd->left_txfm_context =
1972 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
Jingning Han581d1692017-01-05 16:03:54 -08001973
1974 if (cm->tx_mode == TX_MODE_SELECT &&
1975#if CONFIG_CB4X4
1976 (bsize >= BLOCK_8X8 ||
1977 (bsize >= BLOCK_4X4 && inter_block && !mbmi->skip)) &&
1978#else
1979 bsize >= BLOCK_8X8 &&
1980#endif
1981 !mbmi->skip && inter_block) {
Jingning Han70e5f3f2016-11-09 17:03:07 -08001982 const TX_SIZE max_tx_size = max_txsize_rect_lookup[bsize];
Jingning Hanf64062f2016-11-02 16:22:18 -07001983 const int bh = tx_size_high_unit[max_tx_size];
1984 const int bw = tx_size_wide_unit[max_tx_size];
Jingning Han65abc312016-10-27 13:04:21 -07001985 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1986 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001987 int idx, idy;
Yue Chena1e48dc2016-08-29 17:29:33 -07001988
Jingning Hanfe45b212016-11-22 10:30:23 -08001989 mbmi->min_tx_size = TX_SIZES_ALL;
1990 for (idy = 0; idy < height; idy += bh)
1991 for (idx = 0; idx < width; idx += bw)
1992 read_tx_size_vartx(cm, xd, mbmi, xd->counts, max_tx_size,
1993 height != width, idy, idx, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001994 } else {
Urvang Joshifeb925f2016-12-05 10:37:29 -08001995 mbmi->tx_size = read_tx_size(cm, xd, inter_block, !mbmi->skip, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001996
1997 if (inter_block) {
Jingning Han9ca05b72017-01-03 14:41:36 -08001998 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1999 const int height = block_size_high[bsize] >> tx_size_high_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002000 int idx, idy;
2001 for (idy = 0; idy < height; ++idy)
2002 for (idx = 0; idx < width; ++idx)
2003 mbmi->inter_tx_size[idy >> 1][idx >> 1] = mbmi->tx_size;
2004 }
Jingning Hane67b38a2016-11-04 10:30:00 -07002005 mbmi->min_tx_size = get_min_tx_size(mbmi->tx_size);
Jingning Han1b1dc932016-11-09 10:55:30 -08002006 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, mbmi->skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002007 }
2008#else
Urvang Joshifeb925f2016-12-05 10:37:29 -08002009 mbmi->tx_size = read_tx_size(cm, xd, inter_block, !mbmi->skip, r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002010#endif // CONFIG_VAR_TX
2011#if CONFIG_SUPERTX
2012 }
2013#if CONFIG_VAR_TX
2014 else if (inter_block) {
2015 const int width = num_4x4_blocks_wide_lookup[bsize];
2016 const int height = num_4x4_blocks_high_lookup[bsize];
2017 int idx, idy;
2018 xd->mi[0]->mbmi.tx_size = xd->supertx_size;
2019 for (idy = 0; idy < height; ++idy)
2020 for (idx = 0; idx < width; ++idx)
2021 xd->mi[0]->mbmi.inter_tx_size[idy >> 1][idx >> 1] = xd->supertx_size;
2022 }
2023#endif // CONFIG_VAR_TX
2024#endif // CONFIG_SUPERTX
2025
2026 if (inter_block)
2027 read_inter_block_mode_info(pbi, xd,
David Barker491983d2016-11-10 13:22:17 +00002028#if (CONFIG_MOTION_VAR || CONFIG_EXT_INTER || CONFIG_WARPED_MOTION) && \
2029 CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002030
2031 mi, mi_row, mi_col, r, supertx_enabled);
2032#else
2033 mi, mi_row, mi_col, r);
Yue Chencb60b182016-10-13 15:18:22 -07002034#endif // CONFIG_MOTION_VAR && CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002035 else
2036 read_intra_block_mode_info(cm, xd, mi, r);
2037
Jingning Hanab7163d2016-11-04 09:46:35 -07002038 read_tx_type(cm, xd, mbmi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002039#if CONFIG_SUPERTX
Jingning Hanab7163d2016-11-04 09:46:35 -07002040 supertx_enabled,
Nathan E. Egge93878c42016-05-03 10:01:32 -04002041#endif
Jingning Hanab7163d2016-11-04 09:46:35 -07002042 r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002043}
2044
Yaowu Xuf883b422016-08-30 14:01:10 -07002045void av1_read_mode_info(AV1Decoder *const pbi, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002046#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07002047 int supertx_enabled,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002048#endif // CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07002049 int mi_row, int mi_col, aom_reader *r, int x_mis,
2050 int y_mis) {
2051 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002052 MODE_INFO *const mi = xd->mi[0];
2053 MV_REF *frame_mvs = cm->cur_frame->mvs + mi_row * cm->mi_cols + mi_col;
2054 int w, h;
2055
2056 if (frame_is_intra_only(cm)) {
2057 read_intra_frame_mode_info(cm, xd, mi_row, mi_col, r);
2058#if CONFIG_REF_MV
2059 for (h = 0; h < y_mis; ++h) {
2060 MV_REF *const frame_mv = frame_mvs + h * cm->mi_cols;
2061 for (w = 0; w < x_mis; ++w) {
2062 MV_REF *const mv = frame_mv + w;
2063 mv->ref_frame[0] = NONE;
2064 mv->ref_frame[1] = NONE;
2065 }
2066 }
2067#endif
2068 } else {
2069 read_inter_frame_mode_info(pbi, xd,
2070#if CONFIG_SUPERTX
2071 supertx_enabled,
2072#endif // CONFIG_SUPERTX
2073 mi_row, mi_col, r);
2074 for (h = 0; h < y_mis; ++h) {
2075 MV_REF *const frame_mv = frame_mvs + h * cm->mi_cols;
2076 for (w = 0; w < x_mis; ++w) {
2077 MV_REF *const mv = frame_mv + w;
2078 mv->ref_frame[0] = mi->mbmi.ref_frame[0];
2079 mv->ref_frame[1] = mi->mbmi.ref_frame[1];
2080 mv->mv[0].as_int = mi->mbmi.mv[0].as_int;
2081 mv->mv[1].as_int = mi->mbmi.mv[1].as_int;
Yaowu Xu4306b6e2016-09-27 12:55:32 -07002082#if CONFIG_REF_MV
2083 mv->pred_mv[0].as_int = mi->mbmi.pred_mv[0].as_int;
2084 mv->pred_mv[1].as_int = mi->mbmi.pred_mv[1].as_int;
2085#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002086 }
2087 }
2088 }
2089}