blob: d97e2801708a8546962bb66e7a5abbc574e0ce6d [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13
14#include "av1/common/common.h"
15#include "av1/common/entropy.h"
16#include "av1/common/entropymode.h"
17#include "av1/common/entropymv.h"
18#include "av1/common/mvref_common.h"
19#include "av1/common/pred_common.h"
20#include "av1/common/reconinter.h"
21#include "av1/common/seg_common.h"
22
Yaowu Xuc27fc142016-08-22 16:08:15 -070023#include "av1/decoder/decodeframe.h"
Jingning Han1aab8182016-06-03 11:09:06 -070024#include "av1/decoder/decodemv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070025
Yaowu Xuf883b422016-08-30 14:01:10 -070026#include "aom_dsp/aom_dsp_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070027
Yaowu Xuf883b422016-08-30 14:01:10 -070028static INLINE int read_uniform(aom_reader *r, int n) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070029 int l = get_unsigned_bits(n);
30 int m = (1 << l) - n;
Yaowu Xuf883b422016-08-30 14:01:10 -070031 int v = aom_read_literal(r, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070032
33 assert(l != 0);
34
35 if (v < m)
36 return v;
37 else
Yaowu Xuf883b422016-08-30 14:01:10 -070038 return (v << 1) - m + aom_read_literal(r, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070039}
40
Yaowu Xuf883b422016-08-30 14:01:10 -070041static PREDICTION_MODE read_intra_mode(aom_reader *r, const aom_prob *p) {
42 return (PREDICTION_MODE)aom_read_tree(r, av1_intra_mode_tree, p);
Yaowu Xuc27fc142016-08-22 16:08:15 -070043}
44
Yaowu Xuf883b422016-08-30 14:01:10 -070045static PREDICTION_MODE read_intra_mode_y(AV1_COMMON *cm, MACROBLOCKD *xd,
46 aom_reader *r, int size_group) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070047 const PREDICTION_MODE y_mode =
48 read_intra_mode(r, cm->fc->y_mode_prob[size_group]);
49 FRAME_COUNTS *counts = xd->counts;
50 if (counts) ++counts->y_mode[size_group][y_mode];
51 return y_mode;
52}
53
Yaowu Xuf883b422016-08-30 14:01:10 -070054static PREDICTION_MODE read_intra_mode_uv(AV1_COMMON *cm, MACROBLOCKD *xd,
55 aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -070056 PREDICTION_MODE y_mode) {
57 const PREDICTION_MODE uv_mode =
58 read_intra_mode(r, cm->fc->uv_mode_prob[y_mode]);
59 FRAME_COUNTS *counts = xd->counts;
60 if (counts) ++counts->uv_mode[y_mode][uv_mode];
61 return uv_mode;
62}
63
64#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -070065static INTERINTRA_MODE read_interintra_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
66 aom_reader *r, int size_group) {
67 const INTERINTRA_MODE ii_mode = (INTERINTRA_MODE)aom_read_tree(
68 r, av1_interintra_mode_tree, cm->fc->interintra_mode_prob[size_group]);
Yaowu Xuc27fc142016-08-22 16:08:15 -070069 FRAME_COUNTS *counts = xd->counts;
70 if (counts) ++counts->interintra_mode[size_group][ii_mode];
71 return ii_mode;
72}
73#endif // CONFIG_EXT_INTER
74
Yaowu Xuf883b422016-08-30 14:01:10 -070075static PREDICTION_MODE read_inter_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -070076#if CONFIG_REF_MV && CONFIG_EXT_INTER
77 MB_MODE_INFO *mbmi,
78#endif
Yaowu Xuf883b422016-08-30 14:01:10 -070079 aom_reader *r, int16_t ctx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070080#if CONFIG_REF_MV
81 FRAME_COUNTS *counts = xd->counts;
82 int16_t mode_ctx = ctx & NEWMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -070083 aom_prob mode_prob = cm->fc->newmv_prob[mode_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -070084
Yaowu Xuf883b422016-08-30 14:01:10 -070085 if (aom_read(r, mode_prob) == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070086 if (counts) ++counts->newmv_mode[mode_ctx][0];
87
88#if CONFIG_EXT_INTER
89 if (has_second_ref(mbmi)) {
90#endif // CONFIG_EXT_INTER
91 return NEWMV;
92#if CONFIG_EXT_INTER
93 } else {
94 mode_prob = cm->fc->new2mv_prob;
Yaowu Xuf883b422016-08-30 14:01:10 -070095 if (aom_read(r, mode_prob) == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070096 if (counts) ++counts->new2mv_mode[0];
97 return NEWMV;
98 } else {
99 if (counts) ++counts->new2mv_mode[1];
100 return NEWFROMNEARMV;
101 }
102 }
103#endif // CONFIG_EXT_INTER
104 }
105 if (counts) ++counts->newmv_mode[mode_ctx][1];
106
107 if (ctx & (1 << ALL_ZERO_FLAG_OFFSET)) return ZEROMV;
108
109 mode_ctx = (ctx >> ZEROMV_OFFSET) & ZEROMV_CTX_MASK;
110
111 mode_prob = cm->fc->zeromv_prob[mode_ctx];
Yaowu Xuf883b422016-08-30 14:01:10 -0700112 if (aom_read(r, mode_prob) == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700113 if (counts) ++counts->zeromv_mode[mode_ctx][0];
114 return ZEROMV;
115 }
116 if (counts) ++counts->zeromv_mode[mode_ctx][1];
117
118 mode_ctx = (ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
119
120 if (ctx & (1 << SKIP_NEARESTMV_OFFSET)) mode_ctx = 6;
121 if (ctx & (1 << SKIP_NEARMV_OFFSET)) mode_ctx = 7;
122 if (ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) mode_ctx = 8;
123
124 mode_prob = cm->fc->refmv_prob[mode_ctx];
125
Yaowu Xuf883b422016-08-30 14:01:10 -0700126 if (aom_read(r, mode_prob) == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700127 if (counts) ++counts->refmv_mode[mode_ctx][0];
128
129 return NEARESTMV;
130 } else {
131 if (counts) ++counts->refmv_mode[mode_ctx][1];
132 return NEARMV;
133 }
134
135 // Invalid prediction mode.
136 assert(0);
137#else
138 const int mode =
Yaowu Xuf883b422016-08-30 14:01:10 -0700139 aom_read_tree(r, av1_inter_mode_tree, cm->fc->inter_mode_probs[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700140 FRAME_COUNTS *counts = xd->counts;
141 if (counts) ++counts->inter_mode[ctx][mode];
142
143 return NEARESTMV + mode;
144#endif
145}
146
147#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700148static void read_drl_idx(const AV1_COMMON *cm, MACROBLOCKD *xd,
149 MB_MODE_INFO *mbmi, aom_reader *r) {
150 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700151 mbmi->ref_mv_idx = 0;
152
153 if (mbmi->mode == NEWMV) {
154 int idx;
155 for (idx = 0; idx < 2; ++idx) {
156 if (xd->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700157 uint8_t drl_ctx = av1_drl_ctx(xd->ref_mv_stack[ref_frame_type], idx);
158 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
159 if (!aom_read(r, drl_prob)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700160 mbmi->ref_mv_idx = idx;
161 if (xd->counts) ++xd->counts->drl_mode[drl_ctx][0];
162 return;
163 }
164 mbmi->ref_mv_idx = idx + 1;
165 if (xd->counts) ++xd->counts->drl_mode[drl_ctx][1];
166 }
167 }
168 }
169
170 if (mbmi->mode == NEARMV) {
171 int idx;
172 // Offset the NEARESTMV mode.
173 // TODO(jingning): Unify the two syntax decoding loops after the NEARESTMV
174 // mode is factored in.
175 for (idx = 1; idx < 3; ++idx) {
176 if (xd->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700177 uint8_t drl_ctx = av1_drl_ctx(xd->ref_mv_stack[ref_frame_type], idx);
178 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
179 if (!aom_read(r, drl_prob)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700180 mbmi->ref_mv_idx = idx - 1;
181 if (xd->counts) ++xd->counts->drl_mode[drl_ctx][0];
182 return;
183 }
184 mbmi->ref_mv_idx = idx;
185 if (xd->counts) ++xd->counts->drl_mode[drl_ctx][1];
186 }
187 }
188 }
189}
190#endif
191
192#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700193static PREDICTION_MODE read_inter_compound_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
194 aom_reader *r, int16_t ctx) {
195 const int mode = aom_read_tree(r, av1_inter_compound_mode_tree,
196 cm->fc->inter_compound_mode_probs[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700197 FRAME_COUNTS *counts = xd->counts;
198
199 if (counts) ++counts->inter_compound_mode[ctx][mode];
200
201 assert(is_inter_compound_mode(NEAREST_NEARESTMV + mode));
202 return NEAREST_NEARESTMV + mode;
203}
204#endif // CONFIG_EXT_INTER
205
Yaowu Xuf883b422016-08-30 14:01:10 -0700206static int read_segment_id(aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207 const struct segmentation_probs *segp) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700208 return aom_read_tree(r, av1_segment_tree, segp->tree_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700209}
210
211#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700212static void read_tx_size_vartx(AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700213 MB_MODE_INFO *mbmi, FRAME_COUNTS *counts,
214 TX_SIZE tx_size, int blk_row, int blk_col,
Yaowu Xuf883b422016-08-30 14:01:10 -0700215 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700216 int is_split = 0;
217 const int tx_row = blk_row >> 1;
218 const int tx_col = blk_col >> 1;
219 int max_blocks_high = num_4x4_blocks_high_lookup[mbmi->sb_type];
220 int max_blocks_wide = num_4x4_blocks_wide_lookup[mbmi->sb_type];
221 int ctx = txfm_partition_context(xd->above_txfm_context + tx_col,
222 xd->left_txfm_context + tx_row, tx_size);
clang-format67948d32016-09-07 22:40:40 -0700223 TX_SIZE(*const inter_tx_size)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700224 [MAX_MIB_SIZE] =
225 (TX_SIZE(*)[MAX_MIB_SIZE]) & mbmi->inter_tx_size[tx_row][tx_col];
226
227 if (xd->mb_to_bottom_edge < 0) max_blocks_high += xd->mb_to_bottom_edge >> 5;
228 if (xd->mb_to_right_edge < 0) max_blocks_wide += xd->mb_to_right_edge >> 5;
229
230 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
231
Yaowu Xuf883b422016-08-30 14:01:10 -0700232 is_split = aom_read(r, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700233
234 if (is_split) {
235 BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
236 int bsl = b_width_log2_lookup[bsize];
237 int i;
238
239 if (counts) ++counts->txfm_partition[ctx][1];
240
241 if (tx_size == TX_8X8) {
242 inter_tx_size[0][0] = TX_4X4;
243 mbmi->tx_size = TX_4X4;
244 txfm_partition_update(xd->above_txfm_context + tx_col,
245 xd->left_txfm_context + tx_row, TX_4X4);
246 return;
247 }
248
249 assert(bsl > 0);
250 --bsl;
251 for (i = 0; i < 4; ++i) {
252 int offsetr = blk_row + ((i >> 1) << bsl);
253 int offsetc = blk_col + ((i & 0x01) << bsl);
254 read_tx_size_vartx(cm, xd, mbmi, counts, tx_size - 1, offsetr, offsetc,
255 r);
256 }
257 } else {
258 int idx, idy;
259 inter_tx_size[0][0] = tx_size;
260 for (idy = 0; idy < num_4x4_blocks_high_txsize_lookup[tx_size] / 2; ++idy)
261 for (idx = 0; idx < num_4x4_blocks_wide_txsize_lookup[tx_size] / 2; ++idx)
262 inter_tx_size[idy][idx] = tx_size;
263 mbmi->tx_size = tx_size;
264 if (counts) ++counts->txfm_partition[ctx][0];
265 txfm_partition_update(xd->above_txfm_context + tx_col,
266 xd->left_txfm_context + tx_row, tx_size);
267 }
268}
269#endif
270
Yaowu Xuf883b422016-08-30 14:01:10 -0700271static TX_SIZE read_selected_tx_size(AV1_COMMON *cm, MACROBLOCKD *xd,
272 int tx_size_cat, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700273 FRAME_COUNTS *counts = xd->counts;
274 const int ctx = get_tx_size_context(xd);
Yaowu Xuf883b422016-08-30 14:01:10 -0700275 int tx_size = aom_read_tree(r, av1_tx_size_tree[tx_size_cat],
276 cm->fc->tx_size_probs[tx_size_cat][ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700277 if (counts) ++counts->tx_size[tx_size_cat][ctx][tx_size];
278 return (TX_SIZE)tx_size;
279}
280
Yaowu Xuf883b422016-08-30 14:01:10 -0700281static TX_SIZE read_tx_size_intra(AV1_COMMON *cm, MACROBLOCKD *xd,
282 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700283 TX_MODE tx_mode = cm->tx_mode;
284 BLOCK_SIZE bsize = xd->mi[0]->mbmi.sb_type;
285 if (xd->lossless[xd->mi[0]->mbmi.segment_id]) return TX_4X4;
286 if (bsize >= BLOCK_8X8) {
287 if (tx_mode == TX_MODE_SELECT) {
288 const TX_SIZE tx_size =
289 read_selected_tx_size(cm, xd, intra_tx_size_cat_lookup[bsize], r);
290 assert(tx_size <= max_txsize_lookup[bsize]);
291 return tx_size;
292 } else {
293 return tx_size_from_tx_mode(bsize, cm->tx_mode, 0);
294 }
295 } else {
296 return TX_4X4;
297 }
298}
299
Yaowu Xuf883b422016-08-30 14:01:10 -0700300static TX_SIZE read_tx_size_inter(AV1_COMMON *cm, MACROBLOCKD *xd,
301 int allow_select, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700302 TX_MODE tx_mode = cm->tx_mode;
303 BLOCK_SIZE bsize = xd->mi[0]->mbmi.sb_type;
304 if (xd->lossless[xd->mi[0]->mbmi.segment_id]) return TX_4X4;
305 if (bsize >= BLOCK_8X8) {
306 if (allow_select && tx_mode == TX_MODE_SELECT) {
307 const TX_SIZE coded_tx_size =
308 read_selected_tx_size(cm, xd, inter_tx_size_cat_lookup[bsize], r);
309#if !CONFIG_RECT_TX
310 assert(coded_tx_size <= max_txsize_lookup[bsize]);
311#else
312 if (coded_tx_size > max_txsize_lookup[bsize]) {
313 assert(coded_tx_size == max_txsize_lookup[bsize] + 1);
314 return max_txsize_rect_lookup[bsize];
315 }
316#endif // !CONFIG_RECT_TX
317 return coded_tx_size;
318 } else {
319 return tx_size_from_tx_mode(bsize, cm->tx_mode, 1);
320 }
321 } else {
322#if CONFIG_EXT_TX && CONFIG_RECT_TX
323 assert(IMPLIES(tx_mode == ONLY_4X4, bsize == BLOCK_4X4));
324 return max_txsize_rect_lookup[bsize];
325#else
326 return TX_4X4;
327#endif
328 }
329}
330
Yaowu Xuf883b422016-08-30 14:01:10 -0700331static int dec_get_segment_id(const AV1_COMMON *cm, const uint8_t *segment_ids,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700332 int mi_offset, int x_mis, int y_mis) {
333 int x, y, segment_id = INT_MAX;
334
335 for (y = 0; y < y_mis; y++)
336 for (x = 0; x < x_mis; x++)
337 segment_id =
Yaowu Xuf883b422016-08-30 14:01:10 -0700338 AOMMIN(segment_id, segment_ids[mi_offset + y * cm->mi_cols + x]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700339
340 assert(segment_id >= 0 && segment_id < MAX_SEGMENTS);
341 return segment_id;
342}
343
Yaowu Xuf883b422016-08-30 14:01:10 -0700344static void set_segment_id(AV1_COMMON *cm, int mi_offset, int x_mis, int y_mis,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700345 int segment_id) {
346 int x, y;
347
348 assert(segment_id >= 0 && segment_id < MAX_SEGMENTS);
349
350 for (y = 0; y < y_mis; y++)
351 for (x = 0; x < x_mis; x++)
352 cm->current_frame_seg_map[mi_offset + y * cm->mi_cols + x] = segment_id;
353}
354
Yaowu Xuf883b422016-08-30 14:01:10 -0700355static int read_intra_segment_id(AV1_COMMON *const cm, MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700356 int mi_offset, int x_mis, int y_mis,
Yaowu Xuf883b422016-08-30 14:01:10 -0700357 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700358 struct segmentation *const seg = &cm->seg;
359 FRAME_COUNTS *counts = xd->counts;
360 struct segmentation_probs *const segp = &cm->fc->seg;
361 int segment_id;
362
363 if (!seg->enabled) return 0; // Default for disabled segmentation
364
365 assert(seg->update_map && !seg->temporal_update);
366
367 segment_id = read_segment_id(r, segp);
368 if (counts) ++counts->seg.tree_total[segment_id];
369 set_segment_id(cm, mi_offset, x_mis, y_mis, segment_id);
370 return segment_id;
371}
372
Yaowu Xuf883b422016-08-30 14:01:10 -0700373static void copy_segment_id(const AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700374 const uint8_t *last_segment_ids,
375 uint8_t *current_segment_ids, int mi_offset,
376 int x_mis, int y_mis) {
377 int x, y;
378
379 for (y = 0; y < y_mis; y++)
380 for (x = 0; x < x_mis; x++)
381 current_segment_ids[mi_offset + y * cm->mi_cols + x] =
382 last_segment_ids ? last_segment_ids[mi_offset + y * cm->mi_cols + x]
383 : 0;
384}
385
Yaowu Xuf883b422016-08-30 14:01:10 -0700386static int read_inter_segment_id(AV1_COMMON *const cm, MACROBLOCKD *const xd,
387 int mi_row, int mi_col, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700388 struct segmentation *const seg = &cm->seg;
389 FRAME_COUNTS *counts = xd->counts;
390 struct segmentation_probs *const segp = &cm->fc->seg;
391 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
392 int predicted_segment_id, segment_id;
393 const int mi_offset = mi_row * cm->mi_cols + mi_col;
394 const int bw = num_8x8_blocks_wide_lookup[mbmi->sb_type];
395 const int bh = num_8x8_blocks_high_lookup[mbmi->sb_type];
396
397 // TODO(slavarnway): move x_mis, y_mis into xd ?????
Yaowu Xuf883b422016-08-30 14:01:10 -0700398 const int x_mis = AOMMIN(cm->mi_cols - mi_col, bw);
399 const int y_mis = AOMMIN(cm->mi_rows - mi_row, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700400
401 if (!seg->enabled) return 0; // Default for disabled segmentation
402
403 predicted_segment_id = cm->last_frame_seg_map
404 ? dec_get_segment_id(cm, cm->last_frame_seg_map,
405 mi_offset, x_mis, y_mis)
406 : 0;
407
408 if (!seg->update_map) {
409 copy_segment_id(cm, cm->last_frame_seg_map, cm->current_frame_seg_map,
410 mi_offset, x_mis, y_mis);
411 return predicted_segment_id;
412 }
413
414 if (seg->temporal_update) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700415 const int ctx = av1_get_pred_context_seg_id(xd);
416 const aom_prob pred_prob = segp->pred_probs[ctx];
417 mbmi->seg_id_predicted = aom_read(r, pred_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700418 if (counts) ++counts->seg.pred[ctx][mbmi->seg_id_predicted];
419 if (mbmi->seg_id_predicted) {
420 segment_id = predicted_segment_id;
421 } else {
422 segment_id = read_segment_id(r, segp);
423 if (counts) ++counts->seg.tree_mispred[segment_id];
424 }
425 } else {
426 segment_id = read_segment_id(r, segp);
427 if (counts) ++counts->seg.tree_total[segment_id];
428 }
429 set_segment_id(cm, mi_offset, x_mis, y_mis, segment_id);
430 return segment_id;
431}
432
Yaowu Xuf883b422016-08-30 14:01:10 -0700433static int read_skip(AV1_COMMON *cm, const MACROBLOCKD *xd, int segment_id,
434 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700435 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
436 return 1;
437 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700438 const int ctx = av1_get_skip_context(xd);
439 const int skip = aom_read(r, cm->fc->skip_probs[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700440 FRAME_COUNTS *counts = xd->counts;
441 if (counts) ++counts->skip[ctx][skip];
442 return skip;
443 }
444}
445
Yaowu Xuf883b422016-08-30 14:01:10 -0700446static void read_palette_mode_info(AV1_COMMON *const cm, MACROBLOCKD *const xd,
447 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700448 MODE_INFO *const mi = xd->mi[0];
449 MB_MODE_INFO *const mbmi = &mi->mbmi;
450 const MODE_INFO *const above_mi = xd->above_mi;
451 const MODE_INFO *const left_mi = xd->left_mi;
452 const BLOCK_SIZE bsize = mbmi->sb_type;
453 int i, n, palette_ctx = 0;
454 PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
455
456 if (mbmi->mode == DC_PRED) {
457 if (above_mi)
458 palette_ctx += (above_mi->mbmi.palette_mode_info.palette_size[0] > 0);
459 if (left_mi)
460 palette_ctx += (left_mi->mbmi.palette_mode_info.palette_size[0] > 0);
clang-format67948d32016-09-07 22:40:40 -0700461 if (aom_read(r, av1_default_palette_y_mode_prob[bsize - BLOCK_8X8]
462 [palette_ctx])) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700463 pmi->palette_size[0] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700464 aom_read_tree(r, av1_palette_size_tree,
465 av1_default_palette_y_size_prob[bsize - BLOCK_8X8]) +
Yaowu Xuc27fc142016-08-22 16:08:15 -0700466 2;
467 n = pmi->palette_size[0];
468 for (i = 0; i < n; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700469 pmi->palette_colors[i] = aom_read_literal(r, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700470
471 xd->plane[0].color_index_map[0] = read_uniform(r, n);
472 assert(xd->plane[0].color_index_map[0] < n);
473 }
474 }
475
476 if (mbmi->uv_mode == DC_PRED) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700477 if (aom_read(r,
478 av1_default_palette_uv_mode_prob[pmi->palette_size[0] > 0])) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700479 pmi->palette_size[1] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700480 aom_read_tree(r, av1_palette_size_tree,
481 av1_default_palette_uv_size_prob[bsize - BLOCK_8X8]) +
Yaowu Xuc27fc142016-08-22 16:08:15 -0700482 2;
483 n = pmi->palette_size[1];
484 for (i = 0; i < n; ++i) {
485 pmi->palette_colors[PALETTE_MAX_SIZE + i] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700486 aom_read_literal(r, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700487 pmi->palette_colors[2 * PALETTE_MAX_SIZE + i] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700488 aom_read_literal(r, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700489 }
490 xd->plane[1].color_index_map[0] = read_uniform(r, n);
491 assert(xd->plane[1].color_index_map[0] < n);
492 }
493 }
494}
495
496#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -0700497static void read_ext_intra_mode_info(AV1_COMMON *const cm,
498 MACROBLOCKD *const xd, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700499 MODE_INFO *const mi = xd->mi[0];
500 MB_MODE_INFO *const mbmi = &mi->mbmi;
501 FRAME_COUNTS *counts = xd->counts;
502
503#if !ALLOW_FILTER_INTRA_MODES
504 return;
505#endif
506 if (mbmi->mode == DC_PRED && mbmi->palette_mode_info.palette_size[0] == 0) {
507 mbmi->ext_intra_mode_info.use_ext_intra_mode[0] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700508 aom_read(r, cm->fc->ext_intra_probs[0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700509 if (mbmi->ext_intra_mode_info.use_ext_intra_mode[0]) {
510 mbmi->ext_intra_mode_info.ext_intra_mode[0] =
511 read_uniform(r, FILTER_INTRA_MODES);
512 }
513 if (counts)
514 ++counts->ext_intra[0][mbmi->ext_intra_mode_info.use_ext_intra_mode[0]];
515 }
516 if (mbmi->uv_mode == DC_PRED &&
517 mbmi->palette_mode_info.palette_size[1] == 0) {
518 mbmi->ext_intra_mode_info.use_ext_intra_mode[1] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700519 aom_read(r, cm->fc->ext_intra_probs[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700520 if (mbmi->ext_intra_mode_info.use_ext_intra_mode[1]) {
521 mbmi->ext_intra_mode_info.ext_intra_mode[1] =
522 read_uniform(r, FILTER_INTRA_MODES);
523 }
524 if (counts)
525 ++counts->ext_intra[1][mbmi->ext_intra_mode_info.use_ext_intra_mode[1]];
526 }
527}
528
Yaowu Xuf883b422016-08-30 14:01:10 -0700529static void read_intra_angle_info(AV1_COMMON *const cm, MACROBLOCKD *const xd,
530 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700531 MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
532 const BLOCK_SIZE bsize = mbmi->sb_type;
Yaowu Xuf883b422016-08-30 14:01:10 -0700533 const int ctx = av1_get_pred_context_intra_interp(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700534 int p_angle;
535
536 if (bsize < BLOCK_8X8) return;
537
538 if (mbmi->mode != DC_PRED && mbmi->mode != TM_PRED) {
539 mbmi->angle_delta[0] =
540 read_uniform(r, 2 * MAX_ANGLE_DELTAS + 1) - MAX_ANGLE_DELTAS;
541 p_angle = mode_to_angle_map[mbmi->mode] + mbmi->angle_delta[0] * ANGLE_STEP;
Yaowu Xuf883b422016-08-30 14:01:10 -0700542 if (av1_is_intra_filter_switchable(p_angle)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700543 FRAME_COUNTS *counts = xd->counts;
Yaowu Xuf883b422016-08-30 14:01:10 -0700544 mbmi->intra_filter = aom_read_tree(r, av1_intra_filter_tree,
545 cm->fc->intra_filter_probs[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700546 if (counts) ++counts->intra_filter[ctx][mbmi->intra_filter];
547 } else {
548 mbmi->intra_filter = INTRA_FILTER_LINEAR;
549 }
550 }
551
552 if (mbmi->uv_mode != DC_PRED && mbmi->uv_mode != TM_PRED) {
553 mbmi->angle_delta[1] =
554 read_uniform(r, 2 * MAX_ANGLE_DELTAS + 1) - MAX_ANGLE_DELTAS;
555 }
556}
557#endif // CONFIG_EXT_INTRA
558
Yaowu Xuf883b422016-08-30 14:01:10 -0700559static void read_intra_frame_mode_info(AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700560 MACROBLOCKD *const xd, int mi_row,
Yaowu Xuf883b422016-08-30 14:01:10 -0700561 int mi_col, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700562 MODE_INFO *const mi = xd->mi[0];
563 MB_MODE_INFO *const mbmi = &mi->mbmi;
564 const MODE_INFO *above_mi = xd->above_mi;
565 const MODE_INFO *left_mi = xd->left_mi;
566 const BLOCK_SIZE bsize = mbmi->sb_type;
567 int i;
568 const int mi_offset = mi_row * cm->mi_cols + mi_col;
569 const int bw = xd->plane[0].n4_w >> 1;
570 const int bh = xd->plane[0].n4_h >> 1;
571
572 // TODO(slavarnway): move x_mis, y_mis into xd ?????
Yaowu Xuf883b422016-08-30 14:01:10 -0700573 const int x_mis = AOMMIN(cm->mi_cols - mi_col, bw);
574 const int y_mis = AOMMIN(cm->mi_rows - mi_row, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700575
576 mbmi->segment_id = read_intra_segment_id(cm, xd, mi_offset, x_mis, y_mis, r);
577 mbmi->skip = read_skip(cm, xd, mbmi->segment_id, r);
578 mbmi->tx_size = read_tx_size_intra(cm, xd, r);
579 mbmi->ref_frame[0] = INTRA_FRAME;
580 mbmi->ref_frame[1] = NONE;
581
582 switch (bsize) {
583 case BLOCK_4X4:
584 for (i = 0; i < 4; ++i)
585 mi->bmi[i].as_mode =
586 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, i));
587 mbmi->mode = mi->bmi[3].as_mode;
588 break;
589 case BLOCK_4X8:
590 mi->bmi[0].as_mode = mi->bmi[2].as_mode =
591 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
592 mi->bmi[1].as_mode = mi->bmi[3].as_mode = mbmi->mode =
593 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 1));
594 break;
595 case BLOCK_8X4:
596 mi->bmi[0].as_mode = mi->bmi[1].as_mode =
597 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
598 mi->bmi[2].as_mode = mi->bmi[3].as_mode = mbmi->mode =
599 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 2));
600 break;
601 default:
602 mbmi->mode =
603 read_intra_mode(r, get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
604 }
605
606 mbmi->uv_mode = read_intra_mode_uv(cm, xd, r, mbmi->mode);
607#if CONFIG_EXT_INTRA
608 read_intra_angle_info(cm, xd, r);
609#endif // CONFIG_EXT_INTRA
610 mbmi->palette_mode_info.palette_size[0] = 0;
611 mbmi->palette_mode_info.palette_size[1] = 0;
612 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
613 read_palette_mode_info(cm, xd, r);
614#if CONFIG_EXT_INTRA
615 mbmi->ext_intra_mode_info.use_ext_intra_mode[0] = 0;
616 mbmi->ext_intra_mode_info.use_ext_intra_mode[1] = 0;
617 if (bsize >= BLOCK_8X8) read_ext_intra_mode_info(cm, xd, r);
618#endif // CONFIG_EXT_INTRA
619
620 if (!FIXED_TX_TYPE) {
621#if CONFIG_EXT_TX
622 if (get_ext_tx_types(mbmi->tx_size, mbmi->sb_type, 0) > 1 &&
623 cm->base_qindex > 0 && !mbmi->skip &&
624 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP) &&
625 ALLOW_INTRA_EXT_TX) {
626 FRAME_COUNTS *counts = xd->counts;
627 int eset = get_ext_tx_set(mbmi->tx_size, mbmi->sb_type, 0);
628 if (eset > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700629 mbmi->tx_type = aom_read_tree(
630 r, av1_ext_tx_intra_tree[eset],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700631 cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode]);
632 if (counts)
clang-format67948d32016-09-07 22:40:40 -0700633 ++counts->intra_ext_tx[eset][mbmi->tx_size][mbmi->mode]
634 [mbmi->tx_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700635 }
636 } else {
637 mbmi->tx_type = DCT_DCT;
638 }
639#else
640 if (mbmi->tx_size < TX_32X32 && cm->base_qindex > 0 && !mbmi->skip &&
641 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
642 FRAME_COUNTS *counts = xd->counts;
643 TX_TYPE tx_type_nom = intra_mode_to_tx_type_context[mbmi->mode];
644 mbmi->tx_type =
Yaowu Xuf883b422016-08-30 14:01:10 -0700645 aom_read_tree(r, av1_ext_tx_tree,
646 cm->fc->intra_ext_tx_prob[mbmi->tx_size][tx_type_nom]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700647 if (counts)
648 ++counts->intra_ext_tx[mbmi->tx_size][tx_type_nom][mbmi->tx_type];
649 } else {
650 mbmi->tx_type = DCT_DCT;
651 }
652#endif // CONFIG_EXT_TX
653 }
654}
655
Yaowu Xuf883b422016-08-30 14:01:10 -0700656static int read_mv_component(aom_reader *r, const nmv_component *mvcomp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700657 int usehp) {
658 int mag, d, fr, hp;
Yaowu Xuf883b422016-08-30 14:01:10 -0700659 const int sign = aom_read(r, mvcomp->sign);
660 const int mv_class = aom_read_tree(r, av1_mv_class_tree, mvcomp->classes);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700661 const int class0 = mv_class == MV_CLASS_0;
662
663 // Integer part
664 if (class0) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700665 d = aom_read_tree(r, av1_mv_class0_tree, mvcomp->class0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700666 mag = 0;
667 } else {
668 int i;
669 const int n = mv_class + CLASS0_BITS - 1; // number of bits
670
671 d = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700672 for (i = 0; i < n; ++i) d |= aom_read(r, mvcomp->bits[i]) << i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700673 mag = CLASS0_SIZE << (mv_class + 2);
674 }
675
676 // Fractional part
Yaowu Xuf883b422016-08-30 14:01:10 -0700677 fr = aom_read_tree(r, av1_mv_fp_tree,
678 class0 ? mvcomp->class0_fp[d] : mvcomp->fp);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700679
680 // High precision part (if hp is not used, the default value of the hp is 1)
Yaowu Xuf883b422016-08-30 14:01:10 -0700681 hp = usehp ? aom_read(r, class0 ? mvcomp->class0_hp : mvcomp->hp) : 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700682
683 // Result
684 mag += ((d << 3) | (fr << 1) | hp) + 1;
685 return sign ? -mag : mag;
686}
687
Yaowu Xuf883b422016-08-30 14:01:10 -0700688static INLINE void read_mv(aom_reader *r, MV *mv, const MV *ref,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700689#if CONFIG_REF_MV
690 int is_compound,
691#endif
692 const nmv_context *ctx, nmv_context_counts *counts,
693 int allow_hp) {
694 MV_JOINT_TYPE joint_type;
Yaowu Xuf883b422016-08-30 14:01:10 -0700695 const int use_hp = allow_hp && av1_use_mv_hp(ref);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700696 MV diff = { 0, 0 };
697
698#if CONFIG_REF_MV && !CONFIG_EXT_INTER
699 if (is_compound) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700700 int is_zero_rmv = aom_read(r, ctx->zero_rmv);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700701 if (is_zero_rmv) {
702 joint_type = MV_JOINT_ZERO;
703 } else {
704 joint_type =
Yaowu Xuf883b422016-08-30 14:01:10 -0700705 (MV_JOINT_TYPE)aom_read_tree(r, av1_mv_joint_tree, ctx->joints);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700706 }
707 } else {
708 joint_type =
Yaowu Xuf883b422016-08-30 14:01:10 -0700709 (MV_JOINT_TYPE)aom_read_tree(r, av1_mv_joint_tree, ctx->joints);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700710 }
711#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700712 joint_type = (MV_JOINT_TYPE)aom_read_tree(r, av1_mv_joint_tree, ctx->joints);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700713#endif
714
715#if CONFIG_REF_MV && CONFIG_EXT_INTER
716 (void)is_compound;
717#endif
718
719 if (mv_joint_vertical(joint_type))
720 diff.row = read_mv_component(r, &ctx->comps[0], use_hp);
721
722 if (mv_joint_horizontal(joint_type))
723 diff.col = read_mv_component(r, &ctx->comps[1], use_hp);
724
Yaowu Xuf883b422016-08-30 14:01:10 -0700725 av1_inc_mv(&diff, counts, use_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700726
727 mv->row = ref->row + diff.row;
728 mv->col = ref->col + diff.col;
729}
730
Yaowu Xuf883b422016-08-30 14:01:10 -0700731static REFERENCE_MODE read_block_reference_mode(AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700732 const MACROBLOCKD *xd,
Yaowu Xuf883b422016-08-30 14:01:10 -0700733 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700734 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700735 const int ctx = av1_get_reference_mode_context(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700736 const REFERENCE_MODE mode =
Yaowu Xuf883b422016-08-30 14:01:10 -0700737 (REFERENCE_MODE)aom_read(r, cm->fc->comp_inter_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700738 FRAME_COUNTS *counts = xd->counts;
739 if (counts) ++counts->comp_inter[ctx][mode];
740 return mode; // SINGLE_REFERENCE or COMPOUND_REFERENCE
741 } else {
742 return cm->reference_mode;
743 }
744}
745
746// Read the referncence frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700747static void read_ref_frames(AV1_COMMON *const cm, MACROBLOCKD *const xd,
748 aom_reader *r, int segment_id,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700749 MV_REFERENCE_FRAME ref_frame[2]) {
750 FRAME_CONTEXT *const fc = cm->fc;
751 FRAME_COUNTS *counts = xd->counts;
752
753 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
754 ref_frame[0] = (MV_REFERENCE_FRAME)get_segdata(&cm->seg, segment_id,
755 SEG_LVL_REF_FRAME);
756 ref_frame[1] = NONE;
757 } else {
758 const REFERENCE_MODE mode = read_block_reference_mode(cm, xd, r);
759 // FIXME(rbultje) I'm pretty sure this breaks segmentation ref frame coding
760 if (mode == COMPOUND_REFERENCE) {
761#if CONFIG_EXT_REFS
762 const int idx = cm->ref_frame_sign_bias[cm->comp_bwd_ref[0]];
763#else
764 const int idx = cm->ref_frame_sign_bias[cm->comp_fixed_ref];
765#endif // CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -0700766 const int ctx = av1_get_pred_context_comp_ref_p(cm, xd);
767 const int bit = aom_read(r, fc->comp_ref_prob[ctx][0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700768
769 if (counts) ++counts->comp_ref[ctx][0][bit];
770
771#if CONFIG_EXT_REFS
772 // Decode forward references.
773 if (!bit) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700774 const int ctx1 = av1_get_pred_context_comp_ref_p1(cm, xd);
775 const int bit1 = aom_read(r, fc->comp_ref_prob[ctx1][1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700776 if (counts) ++counts->comp_ref[ctx1][1][bit1];
777 ref_frame[!idx] = cm->comp_fwd_ref[bit1 ? 0 : 1];
778 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700779 const int ctx2 = av1_get_pred_context_comp_ref_p2(cm, xd);
780 const int bit2 = aom_read(r, fc->comp_ref_prob[ctx2][2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700781 if (counts) ++counts->comp_ref[ctx2][2][bit2];
782 ref_frame[!idx] = cm->comp_fwd_ref[bit2 ? 3 : 2];
783 }
784
785 // Decode backward references.
786 {
Yaowu Xuf883b422016-08-30 14:01:10 -0700787 const int ctx_bwd = av1_get_pred_context_comp_bwdref_p(cm, xd);
788 const int bit_bwd = aom_read(r, fc->comp_bwdref_prob[ctx_bwd][0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700789 if (counts) ++counts->comp_bwdref[ctx_bwd][0][bit_bwd];
790 ref_frame[idx] = cm->comp_bwd_ref[bit_bwd];
791 }
792#else
793 ref_frame[!idx] = cm->comp_var_ref[bit];
794 ref_frame[idx] = cm->comp_fixed_ref;
795#endif // CONFIG_EXT_REFS
796 } else if (mode == SINGLE_REFERENCE) {
797#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -0700798 const int ctx0 = av1_get_pred_context_single_ref_p1(xd);
799 const int bit0 = aom_read(r, fc->single_ref_prob[ctx0][0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700800 if (counts) ++counts->single_ref[ctx0][0][bit0];
801
802 if (bit0) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700803 const int ctx1 = av1_get_pred_context_single_ref_p2(xd);
804 const int bit1 = aom_read(r, fc->single_ref_prob[ctx1][1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700805 if (counts) ++counts->single_ref[ctx1][1][bit1];
806 ref_frame[0] = bit1 ? ALTREF_FRAME : BWDREF_FRAME;
807 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700808 const int ctx2 = av1_get_pred_context_single_ref_p3(xd);
809 const int bit2 = aom_read(r, fc->single_ref_prob[ctx2][2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700810 if (counts) ++counts->single_ref[ctx2][2][bit2];
811 if (bit2) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700812 const int ctx4 = av1_get_pred_context_single_ref_p5(xd);
813 const int bit4 = aom_read(r, fc->single_ref_prob[ctx4][4]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700814 if (counts) ++counts->single_ref[ctx4][4][bit4];
815 ref_frame[0] = bit4 ? GOLDEN_FRAME : LAST3_FRAME;
816 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700817 const int ctx3 = av1_get_pred_context_single_ref_p4(xd);
818 const int bit3 = aom_read(r, fc->single_ref_prob[ctx3][3]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700819 if (counts) ++counts->single_ref[ctx3][3][bit3];
820 ref_frame[0] = bit3 ? LAST2_FRAME : LAST_FRAME;
821 }
822 }
823#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700824 const int ctx0 = av1_get_pred_context_single_ref_p1(xd);
825 const int bit0 = aom_read(r, fc->single_ref_prob[ctx0][0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700826 if (counts) ++counts->single_ref[ctx0][0][bit0];
827
828 if (bit0) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700829 const int ctx1 = av1_get_pred_context_single_ref_p2(xd);
830 const int bit1 = aom_read(r, fc->single_ref_prob[ctx1][1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700831 if (counts) ++counts->single_ref[ctx1][1][bit1];
832 ref_frame[0] = bit1 ? ALTREF_FRAME : GOLDEN_FRAME;
833 } else {
834 ref_frame[0] = LAST_FRAME;
835 }
836#endif // CONFIG_EXT_REFS
837
838 ref_frame[1] = NONE;
839 } else {
840 assert(0 && "Invalid prediction mode.");
841 }
842 }
843}
844
845#if CONFIG_OBMC || CONFIG_WARPED_MOTION
Yaowu Xuf883b422016-08-30 14:01:10 -0700846static MOTION_VARIATION read_motvar_block(AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700847 MACROBLOCKD *const xd,
Yaowu Xuf883b422016-08-30 14:01:10 -0700848 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700849 BLOCK_SIZE bsize = xd->mi[0]->mbmi.sb_type;
850 FRAME_COUNTS *counts = xd->counts;
851 MOTION_VARIATION motvar;
852
853 if (is_motvar_allowed(&xd->mi[0]->mbmi)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700854 motvar = (MOTION_VARIATION)aom_read_tree(r, av1_motvar_tree,
855 cm->fc->motvar_prob[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700856 if (counts) ++counts->motvar[bsize][motvar];
857 return motvar;
858 } else {
859 return SIMPLE_TRANSLATION;
860 }
861}
862#endif // CONFIG_OBMC || CONFIG_WARPED_MOTION
863
James Zern7b9407a2016-05-18 23:48:05 -0700864static INLINE InterpFilter read_interp_filter(AV1_COMMON *const cm,
865 MACROBLOCKD *const xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700866#if CONFIG_DUAL_FILTER
James Zern7b9407a2016-05-18 23:48:05 -0700867 int dir,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700868#endif
James Zern7b9407a2016-05-18 23:48:05 -0700869 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700870#if CONFIG_EXT_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -0700871 if (!av1_is_interp_needed(xd)) return EIGHTTAP_REGULAR;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700872#endif
873 if (cm->interp_filter != SWITCHABLE) {
874 return cm->interp_filter;
875 } else {
876#if CONFIG_DUAL_FILTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700877 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700878#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700879 const int ctx = av1_get_pred_context_switchable_interp(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700880#endif
881 FRAME_COUNTS *counts = xd->counts;
James Zern7b9407a2016-05-18 23:48:05 -0700882 const InterpFilter type = (InterpFilter)aom_read_tree(
Yaowu Xuf883b422016-08-30 14:01:10 -0700883 r, av1_switchable_interp_tree, cm->fc->switchable_interp_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700884 if (counts) ++counts->switchable_interp[ctx][type];
885 return type;
886 }
887}
888
Yaowu Xuf883b422016-08-30 14:01:10 -0700889static void read_intra_block_mode_info(AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700890 MACROBLOCKD *const xd, MODE_INFO *mi,
Yaowu Xuf883b422016-08-30 14:01:10 -0700891 aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700892 MB_MODE_INFO *const mbmi = &mi->mbmi;
893 const BLOCK_SIZE bsize = mi->mbmi.sb_type;
894 int i;
895
896 mbmi->ref_frame[0] = INTRA_FRAME;
897 mbmi->ref_frame[1] = NONE;
898
899 switch (bsize) {
900 case BLOCK_4X4:
901 for (i = 0; i < 4; ++i)
902 mi->bmi[i].as_mode = read_intra_mode_y(cm, xd, r, 0);
903 mbmi->mode = mi->bmi[3].as_mode;
904 break;
905 case BLOCK_4X8:
906 mi->bmi[0].as_mode = mi->bmi[2].as_mode = read_intra_mode_y(cm, xd, r, 0);
907 mi->bmi[1].as_mode = mi->bmi[3].as_mode = mbmi->mode =
908 read_intra_mode_y(cm, xd, r, 0);
909 break;
910 case BLOCK_8X4:
911 mi->bmi[0].as_mode = mi->bmi[1].as_mode = read_intra_mode_y(cm, xd, r, 0);
912 mi->bmi[2].as_mode = mi->bmi[3].as_mode = mbmi->mode =
913 read_intra_mode_y(cm, xd, r, 0);
914 break;
915 default:
916 mbmi->mode = read_intra_mode_y(cm, xd, r, size_group_lookup[bsize]);
917 }
918
919 mbmi->uv_mode = read_intra_mode_uv(cm, xd, r, mbmi->mode);
920#if CONFIG_EXT_INTRA
921 read_intra_angle_info(cm, xd, r);
922#endif // CONFIG_EXT_INTRA
923 mbmi->palette_mode_info.palette_size[0] = 0;
924 mbmi->palette_mode_info.palette_size[1] = 0;
925 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
926 read_palette_mode_info(cm, xd, r);
927#if CONFIG_EXT_INTRA
928 mbmi->ext_intra_mode_info.use_ext_intra_mode[0] = 0;
929 mbmi->ext_intra_mode_info.use_ext_intra_mode[1] = 0;
930 if (bsize >= BLOCK_8X8) read_ext_intra_mode_info(cm, xd, r);
931#endif // CONFIG_EXT_INTRA
932}
933
934static INLINE int is_mv_valid(const MV *mv) {
935 return mv->row > MV_LOW && mv->row < MV_UPP && mv->col > MV_LOW &&
936 mv->col < MV_UPP;
937}
938
Yaowu Xuf883b422016-08-30 14:01:10 -0700939static INLINE int assign_mv(AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700940 PREDICTION_MODE mode,
Sarah Parkere5299862016-08-16 14:57:37 -0700941 MV_REFERENCE_FRAME ref_frame[2],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700942#if CONFIG_REF_MV
943 int block,
944#endif
945 int_mv mv[2], int_mv ref_mv[2],
946 int_mv nearest_mv[2], int_mv near_mv[2],
Yaowu Xuf883b422016-08-30 14:01:10 -0700947 int is_compound, int allow_hp, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700948 int i;
949 int ret = 1;
950#if CONFIG_REF_MV
951 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
952 BLOCK_SIZE bsize = mbmi->sb_type;
953 int_mv *pred_mv =
Yaowu Xuf5bbbfa2016-09-26 09:13:38 -0700954 (bsize >= BLOCK_8X8) ? mbmi->pred_mv : xd->mi[0]->bmi[block].pred_mv;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700955#endif
Sarah Parkere5299862016-08-16 14:57:37 -0700956 (void)ref_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700957
958 switch (mode) {
959#if CONFIG_EXT_INTER
960 case NEWFROMNEARMV:
961#endif // CONFIG_EXT_INTER
962 case NEWMV: {
963 FRAME_COUNTS *counts = xd->counts;
964#if !CONFIG_REF_MV
965 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
966#endif
967 for (i = 0; i < 1 + is_compound; ++i) {
968#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700969 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[mbmi->ref_frame[i]],
970 xd->ref_mv_stack[mbmi->ref_frame[i]]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700971 nmv_context_counts *const mv_counts =
972 counts ? &counts->mv[nmv_ctx] : NULL;
973 read_mv(r, &mv[i].as_mv, &ref_mv[i].as_mv,
974#if CONFIG_REF_MV
975 is_compound,
976#endif
977 &cm->fc->nmvc[nmv_ctx], mv_counts, allow_hp);
978#else
979 read_mv(r, &mv[i].as_mv, &ref_mv[i].as_mv, &cm->fc->nmvc, mv_counts,
980 allow_hp);
981#endif
982 ret = ret && is_mv_valid(&mv[i].as_mv);
983
984#if CONFIG_REF_MV
985 pred_mv[i].as_int = ref_mv[i].as_int;
986#endif
987 }
988 break;
989 }
990 case NEARESTMV: {
991 mv[0].as_int = nearest_mv[0].as_int;
992 if (is_compound) mv[1].as_int = nearest_mv[1].as_int;
993
994#if CONFIG_REF_MV
995 pred_mv[0].as_int = nearest_mv[0].as_int;
996 if (is_compound) pred_mv[1].as_int = nearest_mv[1].as_int;
997#endif
998 break;
999 }
1000 case NEARMV: {
1001 mv[0].as_int = near_mv[0].as_int;
1002 if (is_compound) mv[1].as_int = near_mv[1].as_int;
1003
1004#if CONFIG_REF_MV
1005 pred_mv[0].as_int = near_mv[0].as_int;
1006 if (is_compound) pred_mv[1].as_int = near_mv[1].as_int;
1007#endif
1008 break;
1009 }
1010 case ZEROMV: {
Sarah Parkere5299862016-08-16 14:57:37 -07001011#if CONFIG_GLOBAL_MOTION
1012 mv[0].as_int =
1013 cm->global_motion[ref_frame[0]].motion_params.wmmat[0].as_int;
1014 if (is_compound)
1015 mv[1].as_int =
1016 cm->global_motion[ref_frame[1]].motion_params.wmmat[0].as_int;
1017#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001018 mv[0].as_int = 0;
1019 if (is_compound) mv[1].as_int = 0;
Sarah Parkere5299862016-08-16 14:57:37 -07001020#endif // CONFIG_GLOBAL_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001021
1022#if CONFIG_REF_MV
1023 pred_mv[0].as_int = 0;
1024 if (is_compound) pred_mv[1].as_int = 0;
1025#endif
1026 break;
1027 }
1028#if CONFIG_EXT_INTER
1029 case NEW_NEWMV: {
1030 FRAME_COUNTS *counts = xd->counts;
1031#if !CONFIG_REF_MV
1032 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
1033#endif
1034 assert(is_compound);
1035 for (i = 0; i < 2; ++i) {
1036#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001037 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[mbmi->ref_frame[i]],
1038 xd->ref_mv_stack[mbmi->ref_frame[i]]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001039 nmv_context_counts *const mv_counts =
1040 counts ? &counts->mv[nmv_ctx] : NULL;
1041 read_mv(r, &mv[i].as_mv, &ref_mv[i].as_mv, is_compound,
1042 &cm->fc->nmvc[nmv_ctx], mv_counts, allow_hp);
1043#else
1044 read_mv(r, &mv[i].as_mv, &ref_mv[i].as_mv, &cm->fc->nmvc, mv_counts,
1045 allow_hp);
1046#endif
1047 ret = ret && is_mv_valid(&mv[i].as_mv);
1048 }
1049 break;
1050 }
1051 case NEAREST_NEARESTMV: {
1052 assert(is_compound);
1053 mv[0].as_int = nearest_mv[0].as_int;
1054 mv[1].as_int = nearest_mv[1].as_int;
1055 break;
1056 }
1057 case NEAREST_NEARMV: {
1058 assert(is_compound);
1059 mv[0].as_int = nearest_mv[0].as_int;
1060 mv[1].as_int = near_mv[1].as_int;
1061 break;
1062 }
1063 case NEAR_NEARESTMV: {
1064 assert(is_compound);
1065 mv[0].as_int = near_mv[0].as_int;
1066 mv[1].as_int = nearest_mv[1].as_int;
1067 break;
1068 }
1069 case NEAR_NEARMV: {
1070 assert(is_compound);
1071 mv[0].as_int = near_mv[0].as_int;
1072 mv[1].as_int = near_mv[1].as_int;
1073 break;
1074 }
1075 case NEW_NEARESTMV: {
1076 FRAME_COUNTS *counts = xd->counts;
1077#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001078 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[mbmi->ref_frame[0]],
1079 xd->ref_mv_stack[mbmi->ref_frame[0]]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001080 nmv_context_counts *const mv_counts =
1081 counts ? &counts->mv[nmv_ctx] : NULL;
1082 read_mv(r, &mv[0].as_mv, &ref_mv[0].as_mv, is_compound,
1083 &cm->fc->nmvc[nmv_ctx], mv_counts, allow_hp);
1084#else
1085 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
1086 read_mv(r, &mv[0].as_mv, &ref_mv[0].as_mv, &cm->fc->nmvc, mv_counts,
1087 allow_hp);
1088#endif
1089 assert(is_compound);
1090 ret = ret && is_mv_valid(&mv[0].as_mv);
1091 mv[1].as_int = nearest_mv[1].as_int;
1092 break;
1093 }
1094 case NEAREST_NEWMV: {
1095 FRAME_COUNTS *counts = xd->counts;
1096#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001097 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[mbmi->ref_frame[1]],
1098 xd->ref_mv_stack[mbmi->ref_frame[1]]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001099 nmv_context_counts *const mv_counts =
1100 counts ? &counts->mv[nmv_ctx] : NULL;
1101 mv[0].as_int = nearest_mv[0].as_int;
1102 read_mv(r, &mv[1].as_mv, &ref_mv[1].as_mv, is_compound,
1103 &cm->fc->nmvc[nmv_ctx], mv_counts, allow_hp);
1104#else
1105 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
1106 mv[0].as_int = nearest_mv[0].as_int;
1107 read_mv(r, &mv[1].as_mv, &ref_mv[1].as_mv, &cm->fc->nmvc, mv_counts,
1108 allow_hp);
1109#endif
1110 assert(is_compound);
1111 ret = ret && is_mv_valid(&mv[1].as_mv);
1112 break;
1113 }
1114 case NEAR_NEWMV: {
1115 FRAME_COUNTS *counts = xd->counts;
1116#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001117 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[mbmi->ref_frame[1]],
1118 xd->ref_mv_stack[mbmi->ref_frame[1]]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001119 nmv_context_counts *const mv_counts =
1120 counts ? &counts->mv[nmv_ctx] : NULL;
1121 mv[0].as_int = near_mv[0].as_int;
1122 read_mv(r, &mv[1].as_mv, &ref_mv[1].as_mv, is_compound,
1123 &cm->fc->nmvc[nmv_ctx], mv_counts, allow_hp);
1124#else
1125 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
1126 mv[0].as_int = near_mv[0].as_int;
1127 read_mv(r, &mv[1].as_mv, &ref_mv[1].as_mv, &cm->fc->nmvc, mv_counts,
1128 allow_hp);
1129#endif
1130 assert(is_compound);
1131
1132 ret = ret && is_mv_valid(&mv[1].as_mv);
1133 break;
1134 }
1135 case NEW_NEARMV: {
1136 FRAME_COUNTS *counts = xd->counts;
1137#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001138 int nmv_ctx = av1_nmv_ctx(xd->ref_mv_count[mbmi->ref_frame[0]],
1139 xd->ref_mv_stack[mbmi->ref_frame[0]]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001140 nmv_context_counts *const mv_counts =
1141 counts ? &counts->mv[nmv_ctx] : NULL;
1142 read_mv(r, &mv[0].as_mv, &ref_mv[0].as_mv, is_compound,
1143 &cm->fc->nmvc[nmv_ctx], mv_counts, allow_hp);
1144#else
1145 nmv_context_counts *const mv_counts = counts ? &counts->mv : NULL;
1146 read_mv(r, &mv[0].as_mv, &ref_mv[0].as_mv, &cm->fc->nmvc, mv_counts,
1147 allow_hp);
1148#endif
1149 assert(is_compound);
1150 ret = ret && is_mv_valid(&mv[0].as_mv);
1151 mv[1].as_int = near_mv[1].as_int;
1152 break;
1153 }
1154 case ZERO_ZEROMV: {
1155 assert(is_compound);
1156 mv[0].as_int = 0;
1157 mv[1].as_int = 0;
1158 break;
1159 }
1160#endif // CONFIG_EXT_INTER
1161 default: { return 0; }
1162 }
1163 return ret;
1164}
1165
Yaowu Xuf883b422016-08-30 14:01:10 -07001166static int read_is_inter_block(AV1_COMMON *const cm, MACROBLOCKD *const xd,
1167 int segment_id, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001168 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
1169 return get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) != INTRA_FRAME;
1170 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001171 const int ctx = av1_get_intra_inter_context(xd);
1172 const int is_inter = aom_read(r, cm->fc->intra_inter_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001173 FRAME_COUNTS *counts = xd->counts;
1174 if (counts) ++counts->intra_inter[ctx][is_inter];
1175 return is_inter;
1176 }
1177}
1178
1179static void fpm_sync(void *const data, int mi_row) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001180 AV1Decoder *const pbi = (AV1Decoder *)data;
1181 av1_frameworker_wait(pbi->frame_worker_owner, pbi->common.prev_frame,
1182 mi_row << pbi->common.mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001183}
1184
Yaowu Xuf883b422016-08-30 14:01:10 -07001185static void read_inter_block_mode_info(AV1Decoder *const pbi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001186 MACROBLOCKD *const xd,
1187 MODE_INFO *const mi,
1188#if (CONFIG_OBMC || CONFIG_EXT_INTER) && CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07001189 int mi_row, int mi_col, aom_reader *r,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001190 int supertx_enabled) {
1191#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001192 int mi_row, int mi_col, aom_reader *r) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001193#endif // CONFIG_OBMC && CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07001194 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001195 MB_MODE_INFO *const mbmi = &mi->mbmi;
1196 const BLOCK_SIZE bsize = mbmi->sb_type;
1197 const int allow_hp = cm->allow_high_precision_mv;
1198 int_mv nearestmv[2], nearmv[2];
1199 int_mv ref_mvs[MODE_CTX_REF_FRAMES][MAX_MV_REF_CANDIDATES];
1200#if CONFIG_EXT_INTER
1201 int mv_idx;
1202#endif // CONFIG_EXT_INTER
1203 int ref, is_compound;
1204 int16_t inter_mode_ctx[MODE_CTX_REF_FRAMES];
1205#if CONFIG_REF_MV && CONFIG_EXT_INTER
1206 int16_t compound_inter_mode_ctx[MODE_CTX_REF_FRAMES];
1207#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1208 int16_t mode_ctx = 0;
1209 MV_REFERENCE_FRAME ref_frame;
1210
1211 mbmi->palette_mode_info.palette_size[0] = 0;
1212 mbmi->palette_mode_info.palette_size[1] = 0;
1213
1214 read_ref_frames(cm, xd, r, mbmi->segment_id, mbmi->ref_frame);
1215 is_compound = has_second_ref(mbmi);
1216
1217 for (ref = 0; ref < 1 + is_compound; ++ref) {
1218 MV_REFERENCE_FRAME frame = mbmi->ref_frame[ref];
1219 RefBuffer *ref_buf = &cm->frame_refs[frame - LAST_FRAME];
1220
1221 xd->block_refs[ref] = ref_buf;
Yaowu Xuf883b422016-08-30 14:01:10 -07001222 if ((!av1_is_valid_scale(&ref_buf->sf)))
1223 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001224 "Reference frame has invalid dimensions");
Yaowu Xuf883b422016-08-30 14:01:10 -07001225 av1_setup_pre_planes(xd, ref, ref_buf->buf, mi_row, mi_col, &ref_buf->sf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001226 }
1227
1228 for (ref_frame = LAST_FRAME; ref_frame < MODE_CTX_REF_FRAMES; ++ref_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001229 av1_find_mv_refs(cm, xd, mi, ref_frame,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001230#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001231 &xd->ref_mv_count[ref_frame], xd->ref_mv_stack[ref_frame],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001232#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001233 compound_inter_mode_ctx,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001234#endif // CONFIG_EXT_INTER
1235#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001236 ref_mvs[ref_frame], mi_row, mi_col, fpm_sync, (void *)pbi,
1237 inter_mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001238 }
1239
1240#if CONFIG_REF_MV
1241#if CONFIG_EXT_INTER
1242 if (is_compound)
1243 mode_ctx = compound_inter_mode_ctx[mbmi->ref_frame[0]];
1244 else
1245#endif // CONFIG_EXT_INTER
1246 mode_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -07001247 av1_mode_context_analyzer(inter_mode_ctx, mbmi->ref_frame, bsize, -1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001248 mbmi->ref_mv_idx = 0;
1249#else
1250 mode_ctx = inter_mode_ctx[mbmi->ref_frame[0]];
1251#endif
1252
1253 if (segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1254 mbmi->mode = ZEROMV;
1255 if (bsize < BLOCK_8X8) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001256 aom_internal_error(xd->error_info, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001257 "Invalid usage of segement feature on small blocks");
1258 return;
1259 }
1260 } else {
1261 if (bsize >= BLOCK_8X8) {
1262#if CONFIG_EXT_INTER
1263 if (is_compound)
1264 mbmi->mode = read_inter_compound_mode(cm, xd, r, mode_ctx);
1265 else
1266#endif // CONFIG_EXT_INTER
1267 mbmi->mode = read_inter_mode(cm, xd,
1268#if CONFIG_REF_MV && CONFIG_EXT_INTER
1269 mbmi,
1270#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1271 r, mode_ctx);
1272#if CONFIG_REF_MV
1273 if (mbmi->mode == NEARMV || mbmi->mode == NEWMV)
1274 read_drl_idx(cm, xd, mbmi, r);
1275#endif
1276 }
1277 }
1278
1279#if CONFIG_EXT_INTER
1280 if (bsize < BLOCK_8X8 ||
1281 (mbmi->mode != ZEROMV && mbmi->mode != ZERO_ZEROMV)) {
1282#else
1283 if (bsize < BLOCK_8X8 || mbmi->mode != ZEROMV) {
1284#endif // CONFIG_EXT_INTER
1285 for (ref = 0; ref < 1 + is_compound; ++ref) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001286 av1_find_best_ref_mvs(allow_hp, ref_mvs[mbmi->ref_frame[ref]],
1287 &nearestmv[ref], &nearmv[ref]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001288 }
1289 }
1290
1291#if CONFIG_REF_MV
1292 if (mbmi->ref_mv_idx > 0) {
1293 int_mv cur_mv =
1294 xd->ref_mv_stack[mbmi->ref_frame[0]][1 + mbmi->ref_mv_idx].this_mv;
1295 nearmv[0] = cur_mv;
1296 }
1297
1298#if CONFIG_EXT_INTER
1299 if (is_compound && bsize >= BLOCK_8X8 && mbmi->mode != ZERO_ZEROMV) {
1300#else
1301 if (is_compound && bsize >= BLOCK_8X8 && mbmi->mode != NEWMV &&
1302 mbmi->mode != ZEROMV) {
1303#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001304 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001305
1306#if CONFIG_EXT_INTER
1307 if (xd->ref_mv_count[ref_frame_type] > 0) {
1308#else
1309 if (xd->ref_mv_count[ref_frame_type] == 1 && mbmi->mode == NEARESTMV) {
1310#endif // CONFIG_EXT_INTER
1311#if CONFIG_EXT_INTER
1312 if (mbmi->mode == NEAREST_NEARESTMV) {
1313#endif // CONFIG_EXT_INTER
1314 nearestmv[0] = xd->ref_mv_stack[ref_frame_type][0].this_mv;
1315 nearestmv[1] = xd->ref_mv_stack[ref_frame_type][0].comp_mv;
1316 lower_mv_precision(&nearestmv[0].as_mv, allow_hp);
1317 lower_mv_precision(&nearestmv[1].as_mv, allow_hp);
1318#if CONFIG_EXT_INTER
1319 } else if (mbmi->mode == NEAREST_NEWMV || mbmi->mode == NEAREST_NEARMV) {
1320 nearestmv[0] = xd->ref_mv_stack[ref_frame_type][0].this_mv;
1321 lower_mv_precision(&nearestmv[0].as_mv, allow_hp);
1322 } else if (mbmi->mode == NEW_NEARESTMV || mbmi->mode == NEAR_NEARESTMV) {
1323 nearestmv[1] = xd->ref_mv_stack[ref_frame_type][0].comp_mv;
1324 lower_mv_precision(&nearestmv[1].as_mv, allow_hp);
1325 }
1326#endif // CONFIG_EXT_INTER
1327 }
1328
1329#if CONFIG_EXT_INTER
1330 if (xd->ref_mv_count[ref_frame_type] > 1) {
1331 if (mbmi->mode == NEAR_NEWMV || mbmi->mode == NEAR_NEARESTMV ||
1332 mbmi->mode == NEAR_NEARMV) {
1333 nearmv[0] = xd->ref_mv_stack[ref_frame_type][1].this_mv;
1334 lower_mv_precision(&nearmv[0].as_mv, allow_hp);
1335 }
1336
1337 if (mbmi->mode == NEW_NEARMV || mbmi->mode == NEAREST_NEARMV ||
1338 mbmi->mode == NEAR_NEARMV) {
1339 nearmv[1] = xd->ref_mv_stack[ref_frame_type][1].comp_mv;
1340 lower_mv_precision(&nearmv[1].as_mv, allow_hp);
1341 }
1342 }
1343#else
1344 if (xd->ref_mv_count[ref_frame_type] > 1) {
1345 int ref_mv_idx = 1 + mbmi->ref_mv_idx;
1346 nearestmv[0] = xd->ref_mv_stack[ref_frame_type][0].this_mv;
1347 nearestmv[1] = xd->ref_mv_stack[ref_frame_type][0].comp_mv;
1348 nearmv[0] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].this_mv;
1349 nearmv[1] = xd->ref_mv_stack[ref_frame_type][ref_mv_idx].comp_mv;
1350 }
1351#endif // CONFIG_EXT_INTER
1352 }
1353#endif
1354
1355#if !CONFIG_EXT_INTERP && !CONFIG_DUAL_FILTER
1356 mbmi->interp_filter = read_interp_filter(cm, xd, r);
1357#endif // !CONFIG_EXT_INTERP && !CONFIG_DUAL_FILTER
1358
1359 if (bsize < BLOCK_8X8) {
1360 const int num_4x4_w = 1 << xd->bmode_blocks_wl;
1361 const int num_4x4_h = 1 << xd->bmode_blocks_hl;
1362 int idx, idy;
1363 PREDICTION_MODE b_mode;
1364 int_mv nearest_sub8x8[2], near_sub8x8[2];
1365#if CONFIG_EXT_INTER
1366 int_mv ref_mv[2][2];
1367#endif // CONFIG_EXT_INTER
1368 for (idy = 0; idy < 2; idy += num_4x4_h) {
1369 for (idx = 0; idx < 2; idx += num_4x4_w) {
1370 int_mv block[2];
1371 const int j = idy * 2 + idx;
1372 int_mv ref_mv_s8[2];
1373#if CONFIG_REF_MV
1374#if CONFIG_EXT_INTER
1375 if (!is_compound)
1376#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001377 mode_ctx = av1_mode_context_analyzer(inter_mode_ctx, mbmi->ref_frame,
1378 bsize, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001379#endif
1380#if CONFIG_EXT_INTER
1381 if (is_compound)
1382 b_mode = read_inter_compound_mode(cm, xd, r, mode_ctx);
1383 else
1384#endif // CONFIG_EXT_INTER
1385 b_mode = read_inter_mode(cm, xd,
1386#if CONFIG_REF_MV && CONFIG_EXT_INTER
1387 mbmi,
1388#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1389 r, mode_ctx);
1390
1391#if CONFIG_EXT_INTER
1392 mv_idx = (b_mode == NEWFROMNEARMV) ? 1 : 0;
1393
1394 if (b_mode != ZEROMV && b_mode != ZERO_ZEROMV) {
1395#else
1396 if (b_mode != ZEROMV) {
1397#endif // CONFIG_EXT_INTER
1398#if CONFIG_REF_MV
1399 CANDIDATE_MV ref_mv_stack[2][MAX_REF_MV_STACK_SIZE];
1400 uint8_t ref_mv_count[2];
1401#endif
1402 for (ref = 0; ref < 1 + is_compound; ++ref)
1403#if CONFIG_EXT_INTER
1404 {
1405 int_mv mv_ref_list[MAX_MV_REF_CANDIDATES];
Yaowu Xuf883b422016-08-30 14:01:10 -07001406 av1_update_mv_context(xd, mi, mbmi->ref_frame[ref], mv_ref_list, j,
1407 mi_row, mi_col, NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001408#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001409 av1_append_sub8x8_mvs_for_idx(cm, xd, j, ref, mi_row, mi_col,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001410#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001411 ref_mv_stack[ref], &ref_mv_count[ref],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001412#endif
1413#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001414 mv_ref_list,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001415#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001416 &nearest_sub8x8[ref],
1417 &near_sub8x8[ref]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001418#if CONFIG_EXT_INTER
1419 if (have_newmv_in_inter_mode(b_mode)) {
1420 mv_ref_list[0].as_int = nearest_sub8x8[ref].as_int;
1421 mv_ref_list[1].as_int = near_sub8x8[ref].as_int;
Yaowu Xuf883b422016-08-30 14:01:10 -07001422 av1_find_best_ref_mvs(allow_hp, mv_ref_list, &ref_mv[0][ref],
1423 &ref_mv[1][ref]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001424 }
1425 }
1426#endif // CONFIG_EXT_INTER
1427 }
1428
1429 for (ref = 0; ref < 1 + is_compound && b_mode != ZEROMV; ++ref) {
1430#if CONFIG_REF_MV
1431 ref_mv_s8[ref] = nearest_sub8x8[ref];
1432 lower_mv_precision(&ref_mv_s8[ref].as_mv, allow_hp);
1433#else
1434 ref_mv_s8[ref] = nearestmv[ref];
1435#endif
1436 }
1437#if CONFIG_EXT_INTER
1438 (void)ref_mv_s8;
1439#endif
1440
Sarah Parkere5299862016-08-16 14:57:37 -07001441 if (!assign_mv(cm, xd, b_mode, mbmi->ref_frame,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001442#if CONFIG_REF_MV
1443 j,
1444#endif
1445 block,
1446#if CONFIG_EXT_INTER
1447 ref_mv[mv_idx],
1448#else
1449 ref_mv_s8,
1450#endif // CONFIG_EXT_INTER
1451 nearest_sub8x8, near_sub8x8, is_compound, allow_hp, r)) {
1452 xd->corrupted |= 1;
1453 break;
1454 };
1455
1456 mi->bmi[j].as_mv[0].as_int = block[0].as_int;
1457 if (is_compound) mi->bmi[j].as_mv[1].as_int = block[1].as_int;
1458
1459 if (num_4x4_h == 2) mi->bmi[j + 2] = mi->bmi[j];
1460 if (num_4x4_w == 2) mi->bmi[j + 1] = mi->bmi[j];
1461 }
1462 }
1463
1464#if CONFIG_REF_MV
Yaowu Xuf5bbbfa2016-09-26 09:13:38 -07001465 mbmi->pred_mv[0].as_int = mi->bmi[3].pred_mv[0].as_int;
1466 mbmi->pred_mv[1].as_int = mi->bmi[3].pred_mv[1].as_int;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001467#endif
1468 mi->mbmi.mode = b_mode;
1469
1470 mbmi->mv[0].as_int = mi->bmi[3].as_mv[0].as_int;
1471 mbmi->mv[1].as_int = mi->bmi[3].as_mv[1].as_int;
1472 } else {
1473 int ref;
1474 int_mv ref_mv[2];
1475 ref_mv[0] = nearestmv[0];
1476 ref_mv[1] = nearestmv[1];
1477
1478 for (ref = 0; ref < 1 + is_compound && mbmi->mode == NEWMV; ++ref) {
1479#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001480 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001481 if (xd->ref_mv_count[ref_frame_type] > 1) {
1482 ref_mv[ref] =
1483 (ref == 0)
1484 ? xd->ref_mv_stack[ref_frame_type][mbmi->ref_mv_idx].this_mv
1485 : xd->ref_mv_stack[ref_frame_type][mbmi->ref_mv_idx].comp_mv;
1486 clamp_mv_ref(&ref_mv[ref].as_mv, xd->n8_w << 3, xd->n8_h << 3, xd);
1487 }
1488#endif
1489 nearestmv[ref] = ref_mv[ref];
1490 }
1491
1492 xd->corrupted |=
Sarah Parkere5299862016-08-16 14:57:37 -07001493 !assign_mv(cm, xd, mbmi->mode, mbmi->ref_frame,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001494#if CONFIG_REF_MV
1495 0,
1496#endif
1497 mbmi->mv,
1498#if CONFIG_EXT_INTER
1499 mbmi->mode == NEWFROMNEARMV ? nearmv : nearestmv,
1500#else
1501 ref_mv,
1502#endif // CONFIG_EXT_INTER
1503 nearestmv, nearmv, is_compound, allow_hp, r);
1504 }
1505
1506#if CONFIG_EXT_INTER
1507 mbmi->use_wedge_interintra = 0;
1508 if (cm->reference_mode != COMPOUND_REFERENCE &&
1509#if CONFIG_SUPERTX
1510 !supertx_enabled &&
1511#endif
1512 is_interintra_allowed(mbmi)) {
1513 const int bsize_group = size_group_lookup[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -07001514 const int interintra = aom_read(r, cm->fc->interintra_prob[bsize_group]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001515 if (xd->counts) xd->counts->interintra[bsize_group][interintra]++;
1516 assert(mbmi->ref_frame[1] == NONE);
1517 if (interintra) {
1518 const INTERINTRA_MODE interintra_mode =
1519 read_interintra_mode(cm, xd, r, bsize_group);
1520 mbmi->ref_frame[1] = INTRA_FRAME;
1521 mbmi->interintra_mode = interintra_mode;
1522#if CONFIG_EXT_INTRA
1523 mbmi->ext_intra_mode_info.use_ext_intra_mode[0] = 0;
1524 mbmi->ext_intra_mode_info.use_ext_intra_mode[1] = 0;
1525 mbmi->angle_delta[0] = 0;
1526 mbmi->angle_delta[1] = 0;
1527 mbmi->intra_filter = INTRA_FILTER_LINEAR;
1528#endif // CONFIG_EXT_INTRA
1529 if (is_interintra_wedge_used(bsize)) {
1530 mbmi->use_wedge_interintra =
Yaowu Xuf883b422016-08-30 14:01:10 -07001531 aom_read(r, cm->fc->wedge_interintra_prob[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001532 if (xd->counts)
1533 xd->counts->wedge_interintra[bsize][mbmi->use_wedge_interintra]++;
1534 if (mbmi->use_wedge_interintra) {
1535 mbmi->interintra_wedge_index =
Yaowu Xuf883b422016-08-30 14:01:10 -07001536 aom_read_literal(r, get_wedge_bits_lookup(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001537 mbmi->interintra_wedge_sign = 0;
1538 }
1539 }
1540 }
1541 }
1542#endif // CONFIG_EXT_INTER
1543
1544#if CONFIG_OBMC || CONFIG_WARPED_MOTION
1545 mbmi->motion_variation = SIMPLE_TRANSLATION;
1546#if CONFIG_SUPERTX
1547 if (!supertx_enabled)
1548#endif // CONFIG_SUPERTX
1549#if CONFIG_EXT_INTER
1550 if (mbmi->ref_frame[1] != INTRA_FRAME)
1551#endif // CONFIG_EXT_INTER
1552 mbmi->motion_variation = read_motvar_block(cm, xd, r);
1553#endif // CONFIG_OBMC || CONFIG_WARPED_MOTION
1554
1555#if CONFIG_EXT_INTER
1556 mbmi->use_wedge_interinter = 0;
1557 if (cm->reference_mode != SINGLE_REFERENCE &&
1558 is_inter_compound_mode(mbmi->mode) &&
1559#if CONFIG_OBMC || CONFIG_WARPED_MOTION
1560 !(is_motvar_allowed(mbmi) &&
1561 mbmi->motion_variation != SIMPLE_TRANSLATION) &&
1562#endif // CONFIG_OBMC || CONFIG_WARPED_MOTION
1563 is_interinter_wedge_used(bsize)) {
1564 mbmi->use_wedge_interinter =
Yaowu Xuf883b422016-08-30 14:01:10 -07001565 aom_read(r, cm->fc->wedge_interinter_prob[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001566 if (xd->counts)
1567 xd->counts->wedge_interinter[bsize][mbmi->use_wedge_interinter]++;
1568 if (mbmi->use_wedge_interinter) {
1569 mbmi->interinter_wedge_index =
Yaowu Xuf883b422016-08-30 14:01:10 -07001570 aom_read_literal(r, get_wedge_bits_lookup(bsize));
1571 mbmi->interinter_wedge_sign = aom_read_bit(r);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001572 }
1573 }
1574#endif // CONFIG_EXT_INTER
1575
1576#if CONFIG_DUAL_FILTER
1577 for (ref = 0; ref < 2; ++ref) {
1578 mbmi->interp_filter[ref] = (cm->interp_filter == SWITCHABLE)
1579 ? EIGHTTAP_REGULAR
1580 : cm->interp_filter;
1581
1582 if (has_subpel_mv_component(xd->mi[0], xd, ref) ||
1583 (mbmi->ref_frame[1] > INTRA_FRAME &&
1584 has_subpel_mv_component(xd->mi[0], xd, ref + 2)))
1585 mbmi->interp_filter[ref] = read_interp_filter(cm, xd, ref, r);
1586 }
1587 // The index system worsk as:
1588 // (0, 1) -> (vertical, horizontal) filter types for the first ref frame.
1589 // (2, 3) -> (vertical, horizontal) filter types for the second ref frame.
1590 mbmi->interp_filter[2] = mbmi->interp_filter[0];
1591 mbmi->interp_filter[3] = mbmi->interp_filter[1];
1592#else
1593#if CONFIG_EXT_INTERP
1594 mbmi->interp_filter = read_interp_filter(cm, xd, r);
1595#endif // CONFIG_EXT_INTERP
1596#endif // CONFIG_DUAL_FILTER
1597}
1598
Yaowu Xuf883b422016-08-30 14:01:10 -07001599static void read_inter_frame_mode_info(AV1Decoder *const pbi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001600 MACROBLOCKD *const xd,
1601#if CONFIG_SUPERTX
1602 int supertx_enabled,
1603#endif // CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07001604 int mi_row, int mi_col, aom_reader *r) {
1605 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001606 MODE_INFO *const mi = xd->mi[0];
1607 MB_MODE_INFO *const mbmi = &mi->mbmi;
1608 int inter_block = 1;
1609#if CONFIG_VAR_TX
1610 BLOCK_SIZE bsize = mbmi->sb_type;
1611#endif // CONFIG_VAR_TX
1612
1613 mbmi->mv[0].as_int = 0;
1614 mbmi->mv[1].as_int = 0;
1615 mbmi->segment_id = read_inter_segment_id(cm, xd, mi_row, mi_col, r);
1616#if CONFIG_SUPERTX
1617 if (!supertx_enabled) {
1618#endif // CONFIG_SUPERTX
1619 mbmi->skip = read_skip(cm, xd, mbmi->segment_id, r);
1620 inter_block = read_is_inter_block(cm, xd, mbmi->segment_id, r);
1621
1622#if CONFIG_VAR_TX
1623 xd->above_txfm_context = cm->above_txfm_context + mi_col;
1624 xd->left_txfm_context =
1625 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
1626 if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT && !mbmi->skip &&
1627 inter_block) {
1628 const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
1629 const BLOCK_SIZE txb_size = txsize_to_bsize[max_tx_size];
1630 const int bs = num_4x4_blocks_wide_lookup[txb_size];
1631 const int width = num_4x4_blocks_wide_lookup[bsize];
1632 const int height = num_4x4_blocks_high_lookup[bsize];
1633 int idx, idy;
Yue Chena1e48dc2016-08-29 17:29:33 -07001634 int tx_size_cat = inter_tx_size_cat_lookup[bsize];
1635#if CONFIG_EXT_TX && CONFIG_RECT_TX
1636 int is_rect_tx_allowed = inter_block && is_rect_tx_allowed_bsize(bsize);
1637 int use_rect_tx = 0;
1638
1639 if (is_rect_tx_allowed) {
1640 use_rect_tx = aom_read(r, cm->fc->rect_tx_prob[tx_size_cat]);
1641 if (xd->counts) {
1642 ++xd->counts->rect_tx[tx_size_cat][use_rect_tx];
1643 }
1644 }
1645
1646 if (use_rect_tx) {
1647 mbmi->tx_size = max_txsize_rect_lookup[bsize];
1648 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, xd);
1649 } else {
1650#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
1651 for (idy = 0; idy < height; idy += bs)
1652 for (idx = 0; idx < width; idx += bs)
1653 read_tx_size_vartx(cm, xd, mbmi, xd->counts, max_tx_size, idy, idx,
1654 r);
1655#if CONFIG_EXT_TX && CONFIG_RECT_TX
1656 }
1657#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001658 if (xd->counts) {
1659 const int ctx = get_tx_size_context(xd);
clang-format67948d32016-09-07 22:40:40 -07001660 ++xd->counts->tx_size[tx_size_cat][ctx]
1661 [txsize_sqr_up_map[mbmi->tx_size]];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001662 }
1663 } else {
1664 if (inter_block)
1665 mbmi->tx_size = read_tx_size_inter(cm, xd, !mbmi->skip, r);
1666 else
1667 mbmi->tx_size = read_tx_size_intra(cm, xd, r);
1668
1669 if (inter_block) {
1670 const int width = num_4x4_blocks_wide_lookup[bsize];
1671 const int height = num_4x4_blocks_high_lookup[bsize];
1672 int idx, idy;
1673 for (idy = 0; idy < height; ++idy)
1674 for (idx = 0; idx < width; ++idx)
1675 mbmi->inter_tx_size[idy >> 1][idx >> 1] = mbmi->tx_size;
1676 }
1677
1678 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, xd);
1679 }
1680#else
1681 if (inter_block)
1682 mbmi->tx_size = read_tx_size_inter(cm, xd, !mbmi->skip, r);
1683 else
1684 mbmi->tx_size = read_tx_size_intra(cm, xd, r);
1685#endif // CONFIG_VAR_TX
1686#if CONFIG_SUPERTX
1687 }
1688#if CONFIG_VAR_TX
1689 else if (inter_block) {
1690 const int width = num_4x4_blocks_wide_lookup[bsize];
1691 const int height = num_4x4_blocks_high_lookup[bsize];
1692 int idx, idy;
1693 xd->mi[0]->mbmi.tx_size = xd->supertx_size;
1694 for (idy = 0; idy < height; ++idy)
1695 for (idx = 0; idx < width; ++idx)
1696 xd->mi[0]->mbmi.inter_tx_size[idy >> 1][idx >> 1] = xd->supertx_size;
1697 }
1698#endif // CONFIG_VAR_TX
1699#endif // CONFIG_SUPERTX
1700
1701 if (inter_block)
1702 read_inter_block_mode_info(pbi, xd,
1703#if (CONFIG_OBMC || CONFIG_EXT_INTER) && CONFIG_SUPERTX
1704
1705 mi, mi_row, mi_col, r, supertx_enabled);
1706#else
1707 mi, mi_row, mi_col, r);
1708#endif // CONFIG_OBMC && CONFIG_SUPERTX
1709 else
1710 read_intra_block_mode_info(cm, xd, mi, r);
1711
1712 if (!FIXED_TX_TYPE) {
1713#if CONFIG_EXT_TX
1714 if (get_ext_tx_types(mbmi->tx_size, mbmi->sb_type, inter_block) > 1 &&
1715 cm->base_qindex > 0 && !mbmi->skip &&
1716#if CONFIG_SUPERTX
1717 !supertx_enabled &&
1718#endif // CONFIG_SUPERTX
1719 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1720 int eset = get_ext_tx_set(mbmi->tx_size, mbmi->sb_type, inter_block);
1721 FRAME_COUNTS *counts = xd->counts;
1722
1723 if (inter_block) {
1724 if (eset > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001725 mbmi->tx_type = aom_read_tree(
1726 r, av1_ext_tx_inter_tree[eset],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001727 cm->fc->inter_ext_tx_prob[eset][txsize_sqr_map[mbmi->tx_size]]);
1728 if (counts)
1729 ++counts->inter_ext_tx[eset][txsize_sqr_map[mbmi->tx_size]]
1730 [mbmi->tx_type];
1731 }
1732 } else if (ALLOW_INTRA_EXT_TX) {
1733 if (eset > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001734 mbmi->tx_type = aom_read_tree(
1735 r, av1_ext_tx_intra_tree[eset],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001736 cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode]);
1737 if (counts)
1738 ++counts->intra_ext_tx[eset][mbmi->tx_size][mbmi->mode]
1739 [mbmi->tx_type];
1740 }
1741 }
1742 } else {
1743 mbmi->tx_type = DCT_DCT;
1744 }
1745#else
1746 if (mbmi->tx_size < TX_32X32 && cm->base_qindex > 0 && !mbmi->skip &&
1747#if CONFIG_SUPERTX
1748 !supertx_enabled &&
1749#endif // CONFIG_SUPERTX
1750 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1751 FRAME_COUNTS *counts = xd->counts;
1752 if (inter_block) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001753 mbmi->tx_type = aom_read_tree(r, av1_ext_tx_tree,
1754 cm->fc->inter_ext_tx_prob[mbmi->tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001755 if (counts) ++counts->inter_ext_tx[mbmi->tx_size][mbmi->tx_type];
1756 } else {
1757 const TX_TYPE tx_type_nom = intra_mode_to_tx_type_context[mbmi->mode];
Yaowu Xuf883b422016-08-30 14:01:10 -07001758 mbmi->tx_type = aom_read_tree(
1759 r, av1_ext_tx_tree,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001760 cm->fc->intra_ext_tx_prob[mbmi->tx_size][tx_type_nom]);
1761 if (counts)
1762 ++counts->intra_ext_tx[mbmi->tx_size][tx_type_nom][mbmi->tx_type];
1763 }
1764 } else {
1765 mbmi->tx_type = DCT_DCT;
1766 }
1767#endif // CONFIG_EXT_TX
1768 }
1769}
1770
Yaowu Xuf883b422016-08-30 14:01:10 -07001771void av1_read_mode_info(AV1Decoder *const pbi, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001772#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07001773 int supertx_enabled,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001774#endif // CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -07001775 int mi_row, int mi_col, aom_reader *r, int x_mis,
1776 int y_mis) {
1777 AV1_COMMON *const cm = &pbi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001778 MODE_INFO *const mi = xd->mi[0];
1779 MV_REF *frame_mvs = cm->cur_frame->mvs + mi_row * cm->mi_cols + mi_col;
1780 int w, h;
1781
1782 if (frame_is_intra_only(cm)) {
1783 read_intra_frame_mode_info(cm, xd, mi_row, mi_col, r);
1784#if CONFIG_REF_MV
1785 for (h = 0; h < y_mis; ++h) {
1786 MV_REF *const frame_mv = frame_mvs + h * cm->mi_cols;
1787 for (w = 0; w < x_mis; ++w) {
1788 MV_REF *const mv = frame_mv + w;
1789 mv->ref_frame[0] = NONE;
1790 mv->ref_frame[1] = NONE;
1791 }
1792 }
1793#endif
1794 } else {
1795 read_inter_frame_mode_info(pbi, xd,
1796#if CONFIG_SUPERTX
1797 supertx_enabled,
1798#endif // CONFIG_SUPERTX
1799 mi_row, mi_col, r);
1800 for (h = 0; h < y_mis; ++h) {
1801 MV_REF *const frame_mv = frame_mvs + h * cm->mi_cols;
1802 for (w = 0; w < x_mis; ++w) {
1803 MV_REF *const mv = frame_mv + w;
1804 mv->ref_frame[0] = mi->mbmi.ref_frame[0];
1805 mv->ref_frame[1] = mi->mbmi.ref_frame[1];
1806 mv->mv[0].as_int = mi->mbmi.mv[0].as_int;
1807 mv->mv[1].as_int = mi->mbmi.mv[1].as_int;
1808 }
1809 }
1810 }
1811}