blob: 47d34c00f10b037d687c8fa962107f50390136aa [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include "av1/common/mvref_common.h"
Yue Chen69f18e12016-09-08 14:48:15 -070013#if CONFIG_WARPED_MOTION
14#include "av1/common/warped_motion.h"
15#endif // CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -070016
17#if CONFIG_REF_MV
18
19static uint8_t add_ref_mv_candidate(
20 const MODE_INFO *const candidate_mi, const MB_MODE_INFO *const candidate,
21 const MV_REFERENCE_FRAME rf[2], uint8_t *refmv_count,
22 CANDIDATE_MV *ref_mv_stack, const int use_hp, int len, int block, int col) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070023 int index = 0, ref;
24 int newmv_count = 0;
Jingning Han8570b352016-12-14 11:05:10 -080025#if CONFIG_CB4X4
26 const int unify_bsize = 1;
27#else
28 const int unify_bsize = 0;
29#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070030
Yaowu Xuc27fc142016-08-22 16:08:15 -070031 if (rf[1] == NONE) {
32 // single reference frame
33 for (ref = 0; ref < 2; ++ref) {
34 if (candidate->ref_frame[ref] == rf[0]) {
35 int_mv this_refmv = get_sub_block_mv(candidate_mi, ref, col, block);
36 lower_mv_precision(&this_refmv.as_mv, use_hp);
37
38 for (index = 0; index < *refmv_count; ++index)
39 if (ref_mv_stack[index].this_mv.as_int == this_refmv.as_int) break;
40
Yaowu Xu439286a2016-09-27 10:13:33 -070041 if (index < *refmv_count) ref_mv_stack[index].weight += 2 * len;
Yaowu Xuc27fc142016-08-22 16:08:15 -070042
43 // Add a new item to the list.
44 if (index == *refmv_count) {
45 ref_mv_stack[index].this_mv = this_refmv;
Jingning Han3f338832016-11-18 11:01:48 -080046 ref_mv_stack[index].pred_diff[0] = av1_get_pred_diff_ctx(
47 get_sub_block_pred_mv(candidate_mi, ref, col, block), this_refmv);
Yaowu Xu439286a2016-09-27 10:13:33 -070048 ref_mv_stack[index].weight = 2 * len;
Yaowu Xuc27fc142016-08-22 16:08:15 -070049 ++(*refmv_count);
50
51#if CONFIG_EXT_INTER
52 if (candidate->mode == NEWMV || candidate->mode == NEWFROMNEARMV)
53#else
54 if (candidate->mode == NEWMV)
55#endif // CONFIG_EXT_INTER
56 ++newmv_count;
57 }
58
Jingning Han8570b352016-12-14 11:05:10 -080059 if (candidate_mi->mbmi.sb_type < BLOCK_8X8 && block >= 0 &&
60 !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070061 int alt_block = 3 - block;
62 this_refmv = get_sub_block_mv(candidate_mi, ref, col, alt_block);
63 lower_mv_precision(&this_refmv.as_mv, use_hp);
64
65 for (index = 0; index < *refmv_count; ++index)
66 if (ref_mv_stack[index].this_mv.as_int == this_refmv.as_int) break;
67
Yaowu Xu439286a2016-09-27 10:13:33 -070068 if (index < *refmv_count) ref_mv_stack[index].weight += len;
Yaowu Xuc27fc142016-08-22 16:08:15 -070069
70 // Add a new item to the list.
71 if (index == *refmv_count) {
72 ref_mv_stack[index].this_mv = this_refmv;
Jingning Han3f338832016-11-18 11:01:48 -080073 ref_mv_stack[index].pred_diff[0] = av1_get_pred_diff_ctx(
74 get_sub_block_pred_mv(candidate_mi, ref, col, alt_block),
75 this_refmv);
Yaowu Xu439286a2016-09-27 10:13:33 -070076 ref_mv_stack[index].weight = len;
Yaowu Xuc27fc142016-08-22 16:08:15 -070077 ++(*refmv_count);
78
79#if CONFIG_EXT_INTER
80 if (candidate->mode == NEWMV || candidate->mode == NEWFROMNEARMV)
81#else
82 if (candidate->mode == NEWMV)
83#endif // CONFIG_EXT_INTER
84 ++newmv_count;
85 }
86 }
87 }
88 }
89 } else {
90 // compound reference frame
91 if (candidate->ref_frame[0] == rf[0] && candidate->ref_frame[1] == rf[1]) {
92 int_mv this_refmv[2];
93
94 for (ref = 0; ref < 2; ++ref) {
95 this_refmv[ref] = get_sub_block_mv(candidate_mi, ref, col, block);
96 lower_mv_precision(&this_refmv[ref].as_mv, use_hp);
97 }
98
99 for (index = 0; index < *refmv_count; ++index)
100 if ((ref_mv_stack[index].this_mv.as_int == this_refmv[0].as_int) &&
101 (ref_mv_stack[index].comp_mv.as_int == this_refmv[1].as_int))
102 break;
103
Yaowu Xu439286a2016-09-27 10:13:33 -0700104 if (index < *refmv_count) ref_mv_stack[index].weight += 2 * len;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700105
106 // Add a new item to the list.
107 if (index == *refmv_count) {
108 ref_mv_stack[index].this_mv = this_refmv[0];
109 ref_mv_stack[index].comp_mv = this_refmv[1];
Jingning Han3f338832016-11-18 11:01:48 -0800110 ref_mv_stack[index].pred_diff[0] = av1_get_pred_diff_ctx(
111 get_sub_block_pred_mv(candidate_mi, 0, col, block), this_refmv[0]);
112 ref_mv_stack[index].pred_diff[1] = av1_get_pred_diff_ctx(
113 get_sub_block_pred_mv(candidate_mi, 1, col, block), this_refmv[1]);
Yaowu Xu439286a2016-09-27 10:13:33 -0700114 ref_mv_stack[index].weight = 2 * len;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700115 ++(*refmv_count);
116
117#if CONFIG_EXT_INTER
118 if (candidate->mode == NEW_NEWMV)
119#else
120 if (candidate->mode == NEWMV)
121#endif // CONFIG_EXT_INTER
122 ++newmv_count;
123 }
124
Jingning Han8570b352016-12-14 11:05:10 -0800125 if (candidate_mi->mbmi.sb_type < BLOCK_8X8 && block >= 0 &&
126 !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700127 int alt_block = 3 - block;
128 this_refmv[0] = get_sub_block_mv(candidate_mi, 0, col, alt_block);
129 this_refmv[1] = get_sub_block_mv(candidate_mi, 1, col, alt_block);
130
131 for (ref = 0; ref < 2; ++ref)
132 lower_mv_precision(&this_refmv[ref].as_mv, use_hp);
133
134 for (index = 0; index < *refmv_count; ++index)
135 if (ref_mv_stack[index].this_mv.as_int == this_refmv[0].as_int &&
136 ref_mv_stack[index].comp_mv.as_int == this_refmv[1].as_int)
137 break;
138
Yaowu Xu439286a2016-09-27 10:13:33 -0700139 if (index < *refmv_count) ref_mv_stack[index].weight += len;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700140
141 // Add a new item to the list.
142 if (index == *refmv_count) {
143 ref_mv_stack[index].this_mv = this_refmv[0];
144 ref_mv_stack[index].comp_mv = this_refmv[1];
Jingning Han3f338832016-11-18 11:01:48 -0800145 ref_mv_stack[index].pred_diff[0] = av1_get_pred_diff_ctx(
146 get_sub_block_pred_mv(candidate_mi, 0, col, block),
147 this_refmv[0]);
148 ref_mv_stack[index].pred_diff[0] = av1_get_pred_diff_ctx(
149 get_sub_block_pred_mv(candidate_mi, 1, col, block),
150 this_refmv[1]);
Yaowu Xu439286a2016-09-27 10:13:33 -0700151 ref_mv_stack[index].weight = len;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700152 ++(*refmv_count);
153
154#if CONFIG_EXT_INTER
155 if (candidate->mode == NEW_NEWMV)
156#else
157 if (candidate->mode == NEWMV)
158#endif // CONFIG_EXT_INTER
159 ++newmv_count;
160 }
161 }
162 }
163 }
164 return newmv_count;
165}
166
Yaowu Xuf883b422016-08-30 14:01:10 -0700167static uint8_t scan_row_mbmi(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700168 const int mi_row, const int mi_col, int block,
169 const MV_REFERENCE_FRAME rf[2], int row_offset,
170 CANDIDATE_MV *ref_mv_stack, uint8_t *refmv_count) {
171 const TileInfo *const tile = &xd->tile;
172 int i;
173 uint8_t newmv_count = 0;
Jingning Hanc016df82016-12-09 12:37:28 -0800174 const int mi_offset = mi_size_wide[BLOCK_8X8];
175#if CONFIG_CB4X4
176 // TODO(jingning): Revisit this part after cb4x4 is stable.
177 row_offset *= 2;
178#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700179
180 for (i = 0; i < xd->n8_w && *refmv_count < MAX_REF_MV_STACK_SIZE;) {
181 POSITION mi_pos;
Jingning Hanc016df82016-12-09 12:37:28 -0800182#if CONFIG_CB4X4
183 const int use_step_16 = (xd->n8_w >= 16);
184#else
Jingning Han75e513f2016-09-30 09:10:26 -0700185 const int use_step_16 = (xd->n8_w >= 8);
Jingning Hanc016df82016-12-09 12:37:28 -0800186#endif
Jingning Han75e513f2016-09-30 09:10:26 -0700187
Yaowu Xuc27fc142016-08-22 16:08:15 -0700188 mi_pos.row = row_offset;
189 mi_pos.col = i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700190 if (is_inside(tile, mi_col, mi_row, &mi_pos)) {
191 const MODE_INFO *const candidate_mi =
192 xd->mi[mi_pos.row * xd->mi_stride + mi_pos.col];
193 const MB_MODE_INFO *const candidate = &candidate_mi->mbmi;
Jingning Hanc709e1f2016-12-06 14:48:09 -0800194 int len = AOMMIN(xd->n8_w, mi_size_wide[candidate->sb_type]);
Jingning Han24ea91d2016-12-13 11:29:23 -0800195 if (use_step_16) len = AOMMAX(mi_size_wide[BLOCK_16X16], len);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700196 newmv_count += add_ref_mv_candidate(
197 candidate_mi, candidate, rf, refmv_count, ref_mv_stack,
198 cm->allow_high_precision_mv, len, block, mi_pos.col);
199 i += len;
200 } else {
Jingning Han75e513f2016-09-30 09:10:26 -0700201 if (use_step_16)
Jingning Hanc016df82016-12-09 12:37:28 -0800202 i += (mi_offset << 1);
Jingning Han75e513f2016-09-30 09:10:26 -0700203 else
Jingning Hanc016df82016-12-09 12:37:28 -0800204 i += mi_offset;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700205 }
206 }
207
208 return newmv_count;
209}
210
Yaowu Xuf883b422016-08-30 14:01:10 -0700211static uint8_t scan_col_mbmi(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700212 const int mi_row, const int mi_col, int block,
213 const MV_REFERENCE_FRAME rf[2], int col_offset,
214 CANDIDATE_MV *ref_mv_stack, uint8_t *refmv_count) {
215 const TileInfo *const tile = &xd->tile;
216 int i;
217 uint8_t newmv_count = 0;
Jingning Hanc016df82016-12-09 12:37:28 -0800218 const int mi_offset = mi_size_wide[BLOCK_8X8];
219#if CONFIG_CB4X4
220 col_offset *= 2;
221#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700222
223 for (i = 0; i < xd->n8_h && *refmv_count < MAX_REF_MV_STACK_SIZE;) {
224 POSITION mi_pos;
Jingning Hanc016df82016-12-09 12:37:28 -0800225#if CONFIG_CB4X4
226 const int use_step_16 = (xd->n8_h >= 16);
227#else
Jingning Han75e513f2016-09-30 09:10:26 -0700228 const int use_step_16 = (xd->n8_h >= 8);
Jingning Hanc016df82016-12-09 12:37:28 -0800229#endif
Jingning Han75e513f2016-09-30 09:10:26 -0700230
Yaowu Xuc27fc142016-08-22 16:08:15 -0700231 mi_pos.row = i;
232 mi_pos.col = col_offset;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700233 if (is_inside(tile, mi_col, mi_row, &mi_pos)) {
234 const MODE_INFO *const candidate_mi =
235 xd->mi[mi_pos.row * xd->mi_stride + mi_pos.col];
236 const MB_MODE_INFO *const candidate = &candidate_mi->mbmi;
Jingning Hanc709e1f2016-12-06 14:48:09 -0800237 int len = AOMMIN(xd->n8_h, mi_size_high[candidate->sb_type]);
Jingning Han24ea91d2016-12-13 11:29:23 -0800238 if (use_step_16) len = AOMMAX(mi_size_high[BLOCK_16X16], len);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700239 newmv_count += add_ref_mv_candidate(
240 candidate_mi, candidate, rf, refmv_count, ref_mv_stack,
241 cm->allow_high_precision_mv, len, block, mi_pos.col);
242 i += len;
243 } else {
Jingning Han75e513f2016-09-30 09:10:26 -0700244 if (use_step_16)
Jingning Hanc016df82016-12-09 12:37:28 -0800245 i += (mi_offset << 1);
Jingning Han75e513f2016-09-30 09:10:26 -0700246 else
Jingning Hanc016df82016-12-09 12:37:28 -0800247 i += mi_offset;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700248 }
249 }
250
251 return newmv_count;
252}
253
Yaowu Xuf883b422016-08-30 14:01:10 -0700254static uint8_t scan_blk_mbmi(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700255 const int mi_row, const int mi_col, int block,
256 const MV_REFERENCE_FRAME rf[2], int row_offset,
257 int col_offset, CANDIDATE_MV *ref_mv_stack,
258 uint8_t *refmv_count) {
259 const TileInfo *const tile = &xd->tile;
260 POSITION mi_pos;
261 uint8_t newmv_count = 0;
262
263 mi_pos.row = row_offset;
264 mi_pos.col = col_offset;
265
266 if (is_inside(tile, mi_col, mi_row, &mi_pos) &&
267 *refmv_count < MAX_REF_MV_STACK_SIZE) {
268 const MODE_INFO *const candidate_mi =
269 xd->mi[mi_pos.row * xd->mi_stride + mi_pos.col];
270 const MB_MODE_INFO *const candidate = &candidate_mi->mbmi;
Jingning Hanc016df82016-12-09 12:37:28 -0800271 const int len = mi_size_wide[BLOCK_8X8];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700272
273 newmv_count += add_ref_mv_candidate(
274 candidate_mi, candidate, rf, refmv_count, ref_mv_stack,
275 cm->allow_high_precision_mv, len, block, mi_pos.col);
276 } // Analyze a single 8x8 block motion information.
Yaowu Xu4306b6e2016-09-27 12:55:32 -0700277
Yaowu Xuc27fc142016-08-22 16:08:15 -0700278 return newmv_count;
279}
280
281static int has_top_right(const MACROBLOCKD *xd, int mi_row, int mi_col,
282 int bs) {
Jingning Han3eb1f072016-11-17 15:17:25 -0800283 const int mask_row = mi_row & MAX_MIB_MASK;
284 const int mask_col = mi_col & MAX_MIB_MASK;
285
Yaowu Xuc27fc142016-08-22 16:08:15 -0700286 // In a split partition all apart from the bottom right has a top right
Jingning Han3eb1f072016-11-17 15:17:25 -0800287 int has_tr = !((mask_row & bs) && (mask_col & bs));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700288
289 // bs > 0 and bs is a power of 2
290 assert(bs > 0 && !(bs & (bs - 1)));
291
292 // For each 4x4 group of blocks, when the bottom right is decoded the blocks
293 // to the right have not been decoded therefore the bottom right does
294 // not have a top right
295 while (bs < MAX_MIB_SIZE) {
Jingning Han3eb1f072016-11-17 15:17:25 -0800296 if (mask_col & bs) {
297 if ((mask_col & (2 * bs)) && (mask_row & (2 * bs))) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700298 has_tr = 0;
299 break;
300 }
301 } else {
302 break;
303 }
304 bs <<= 1;
305 }
306
307 // The left hand of two vertical rectangles always has a top right (as the
308 // block above will have been decoded)
309 if (xd->n8_w < xd->n8_h)
310 if (!xd->is_sec_rect) has_tr = 1;
311
312 // The bottom of two horizontal rectangles never has a top right (as the block
313 // to the right won't have been decoded)
314 if (xd->n8_w > xd->n8_h)
315 if (xd->is_sec_rect) has_tr = 0;
316
317#if CONFIG_EXT_PARTITION_TYPES
318 // The bottom left square of a Vertical A does not have a top right as it is
319 // decoded before the right hand rectangle of the partition
320 if (xd->mi[0]->mbmi.partition == PARTITION_VERT_A)
Jingning Han3eb1f072016-11-17 15:17:25 -0800321 if ((mask_row & bs) && !(mask_col & bs)) has_tr = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700322#endif // CONFIG_EXT_PARTITION_TYPES
323
324 return has_tr;
325}
326
Yaowu Xudc035da2016-09-27 10:29:34 -0700327static int add_col_ref_mv(const AV1_COMMON *cm,
328 const MV_REF *prev_frame_mvs_base,
329 const MACROBLOCKD *xd, int mi_row, int mi_col,
330 MV_REFERENCE_FRAME ref_frame, int blk_row,
331 int blk_col, uint8_t *refmv_count,
332 CANDIDATE_MV *ref_mv_stack, int16_t *mode_context) {
333 const MV_REF *prev_frame_mvs =
334 prev_frame_mvs_base + blk_row * cm->mi_cols + blk_col;
335 POSITION mi_pos;
336 int ref, idx;
337 int coll_blk_count = 0;
Jingning Hanc016df82016-12-09 12:37:28 -0800338 const int weight_unit = mi_size_wide[BLOCK_8X8];
Yaowu Xudc035da2016-09-27 10:29:34 -0700339
340 mi_pos.row = blk_row;
341 mi_pos.col = blk_col;
342
343 if (!is_inside(&xd->tile, mi_col, mi_row, &mi_pos)) return coll_blk_count;
344
345 for (ref = 0; ref < 2; ++ref) {
346 if (prev_frame_mvs->ref_frame[ref] == ref_frame) {
347 int_mv this_refmv = prev_frame_mvs->mv[ref];
348 lower_mv_precision(&this_refmv.as_mv, cm->allow_high_precision_mv);
349
350 if (abs(this_refmv.as_mv.row) >= 16 || abs(this_refmv.as_mv.col) >= 16)
351 mode_context[ref_frame] |= (1 << ZEROMV_OFFSET);
352
353 for (idx = 0; idx < *refmv_count; ++idx)
354 if (this_refmv.as_int == ref_mv_stack[idx].this_mv.as_int) break;
355
Jingning Hanc016df82016-12-09 12:37:28 -0800356 if (idx < *refmv_count) ref_mv_stack[idx].weight += 2 * weight_unit;
Yaowu Xudc035da2016-09-27 10:29:34 -0700357
358 if (idx == *refmv_count && *refmv_count < MAX_REF_MV_STACK_SIZE) {
359 ref_mv_stack[idx].this_mv.as_int = this_refmv.as_int;
Jingning Han3f338832016-11-18 11:01:48 -0800360 ref_mv_stack[idx].pred_diff[0] =
361 av1_get_pred_diff_ctx(prev_frame_mvs->pred_mv[ref], this_refmv);
Jingning Hanc016df82016-12-09 12:37:28 -0800362 ref_mv_stack[idx].weight = 2 * weight_unit;
Yaowu Xudc035da2016-09-27 10:29:34 -0700363 ++(*refmv_count);
364 }
365
366 ++coll_blk_count;
367 }
368 }
369
370 return coll_blk_count;
371}
372
Yaowu Xuf883b422016-08-30 14:01:10 -0700373static void setup_ref_mv_list(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700374 MV_REFERENCE_FRAME ref_frame,
375 uint8_t *refmv_count, CANDIDATE_MV *ref_mv_stack,
376 int_mv *mv_ref_list, int block, int mi_row,
377 int mi_col, int16_t *mode_context) {
378 int idx, nearest_refmv_count = 0;
379 uint8_t newmv_count = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700380 CANDIDATE_MV tmp_mv;
381 int len, nr_len;
382
383 const MV_REF *const prev_frame_mvs_base =
384 cm->use_prev_frame_mvs
385 ? cm->prev_frame->mvs + mi_row * cm->mi_cols + mi_col
386 : NULL;
387
Yaowu Xudc035da2016-09-27 10:29:34 -0700388 const int bs = AOMMAX(xd->n8_w, xd->n8_h);
389 const int has_tr = has_top_right(xd, mi_row, mi_col, bs);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700390 MV_REFERENCE_FRAME rf[2];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700391
Yaowu Xudc035da2016-09-27 10:29:34 -0700392 av1_set_ref_frame(rf, ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700393 mode_context[ref_frame] = 0;
394 *refmv_count = 0;
395
396 // Scan the first above row mode info.
Yaowu Xudc035da2016-09-27 10:29:34 -0700397 newmv_count += scan_row_mbmi(cm, xd, mi_row, mi_col, block, rf, -1,
398 ref_mv_stack, refmv_count);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700399 // Scan the first left column mode info.
400 newmv_count += scan_col_mbmi(cm, xd, mi_row, mi_col, block, rf, -1,
401 ref_mv_stack, refmv_count);
402
403 // Check top-right boundary
404 if (has_tr)
Jingning Hanea9cf092016-10-28 14:19:12 -0700405 newmv_count += scan_blk_mbmi(cm, xd, mi_row, mi_col, block, rf, -1,
406 xd->n8_w, ref_mv_stack, refmv_count);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700407
408 nearest_refmv_count = *refmv_count;
409
Yaowu Xudc035da2016-09-27 10:29:34 -0700410 for (idx = 0; idx < nearest_refmv_count; ++idx)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700411 ref_mv_stack[idx].weight += REF_CAT_LEVEL;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700412
413 if (prev_frame_mvs_base && cm->show_frame && cm->last_show_frame &&
414 rf[1] == NONE) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700415 int blk_row, blk_col;
Yaowu Xudc035da2016-09-27 10:29:34 -0700416 int coll_blk_count = 0;
Jingning Hanc016df82016-12-09 12:37:28 -0800417 const int mi_step = mi_size_wide[BLOCK_16X16];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700418
Jingning Hanc016df82016-12-09 12:37:28 -0800419 for (blk_row = 0; blk_row < xd->n8_h; blk_row += mi_step) {
420 for (blk_col = 0; blk_col < xd->n8_w; blk_col += mi_step) {
Yaowu Xudc035da2016-09-27 10:29:34 -0700421 coll_blk_count += add_col_ref_mv(
422 cm, prev_frame_mvs_base, xd, mi_row, mi_col, ref_frame, blk_row,
423 blk_col, refmv_count, ref_mv_stack, mode_context);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700424 }
425 }
Yaowu Xudc035da2016-09-27 10:29:34 -0700426 if (coll_blk_count == 0) mode_context[ref_frame] |= (1 << ZEROMV_OFFSET);
427 } else {
428 mode_context[ref_frame] |= (1 << ZEROMV_OFFSET);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700429 }
430
Jingning Han24e0a182016-11-20 22:34:12 -0800431 // Scan the second outer area.
Dengca8d24d2016-10-17 14:06:35 +0800432 scan_blk_mbmi(cm, xd, mi_row, mi_col, block, rf, -1, -1, ref_mv_stack,
433 refmv_count);
434 for (idx = 2; idx <= 3; ++idx) {
435 scan_row_mbmi(cm, xd, mi_row, mi_col, block, rf, -idx, ref_mv_stack,
436 refmv_count);
437 scan_col_mbmi(cm, xd, mi_row, mi_col, block, rf, -idx, ref_mv_stack,
438 refmv_count);
439 }
440 scan_col_mbmi(cm, xd, mi_row, mi_col, block, rf, -4, ref_mv_stack,
441 refmv_count);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700442
443 switch (nearest_refmv_count) {
444 case 0:
445 mode_context[ref_frame] |= 0;
446 if (*refmv_count >= 1) mode_context[ref_frame] |= 1;
447
448 if (*refmv_count == 1)
449 mode_context[ref_frame] |= (1 << REFMV_OFFSET);
450 else if (*refmv_count >= 2)
451 mode_context[ref_frame] |= (2 << REFMV_OFFSET);
452 break;
453 case 1:
454 mode_context[ref_frame] |= (newmv_count > 0) ? 2 : 3;
455
456 if (*refmv_count == 1)
457 mode_context[ref_frame] |= (3 << REFMV_OFFSET);
458 else if (*refmv_count >= 2)
459 mode_context[ref_frame] |= (4 << REFMV_OFFSET);
460 break;
461
462 case 2:
463 default:
464 if (newmv_count >= 2)
465 mode_context[ref_frame] |= 4;
466 else if (newmv_count == 1)
467 mode_context[ref_frame] |= 5;
468 else
469 mode_context[ref_frame] |= 6;
470
471 mode_context[ref_frame] |= (5 << REFMV_OFFSET);
472 break;
473 }
474
475 // Rank the likelihood and assign nearest and near mvs.
476 len = nearest_refmv_count;
477 while (len > 0) {
478 nr_len = 0;
479 for (idx = 1; idx < len; ++idx) {
480 if (ref_mv_stack[idx - 1].weight < ref_mv_stack[idx].weight) {
481 tmp_mv = ref_mv_stack[idx - 1];
482 ref_mv_stack[idx - 1] = ref_mv_stack[idx];
483 ref_mv_stack[idx] = tmp_mv;
484 nr_len = idx;
485 }
486 }
487 len = nr_len;
488 }
489
490 len = *refmv_count;
491 while (len > nearest_refmv_count) {
492 nr_len = nearest_refmv_count;
493 for (idx = nearest_refmv_count + 1; idx < len; ++idx) {
494 if (ref_mv_stack[idx - 1].weight < ref_mv_stack[idx].weight) {
495 tmp_mv = ref_mv_stack[idx - 1];
496 ref_mv_stack[idx - 1] = ref_mv_stack[idx];
497 ref_mv_stack[idx] = tmp_mv;
498 nr_len = idx;
499 }
500 }
501 len = nr_len;
502 }
503
Yaowu Xuc27fc142016-08-22 16:08:15 -0700504 if (rf[1] > NONE) {
505 for (idx = 0; idx < *refmv_count; ++idx) {
Jingning Hanff6ee6a2016-12-07 09:55:21 -0800506 clamp_mv_ref(&ref_mv_stack[idx].this_mv.as_mv, xd->n8_w << MI_SIZE_LOG2,
507 xd->n8_h << MI_SIZE_LOG2, xd);
508 clamp_mv_ref(&ref_mv_stack[idx].comp_mv.as_mv, xd->n8_w << MI_SIZE_LOG2,
509 xd->n8_h << MI_SIZE_LOG2, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700510 }
511 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700512 for (idx = 0; idx < AOMMIN(MAX_MV_REF_CANDIDATES, *refmv_count); ++idx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700513 mv_ref_list[idx].as_int = ref_mv_stack[idx].this_mv.as_int;
Jingning Hanff6ee6a2016-12-07 09:55:21 -0800514 clamp_mv_ref(&mv_ref_list[idx].as_mv, xd->n8_w << MI_SIZE_LOG2,
515 xd->n8_h << MI_SIZE_LOG2, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700516 }
517 }
518}
519#endif
520
521// This function searches the neighbourhood of a given MB/SB
522// to try and find candidate reference vectors.
Yaowu Xuf883b422016-08-30 14:01:10 -0700523static void find_mv_refs_idx(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700524 MODE_INFO *mi, MV_REFERENCE_FRAME ref_frame,
525 int_mv *mv_ref_list, int block, int mi_row,
526 int mi_col, find_mv_refs_sync sync,
David Barkercdcac6d2016-12-01 17:04:16 +0000527 void *const data, int16_t *mode_context,
528 int_mv zeromv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700529 const int *ref_sign_bias = cm->ref_frame_sign_bias;
530 int i, refmv_count = 0;
Yaowu Xudd28be82016-11-21 10:15:30 -0800531#if !CONFIG_REF_MV
Yaowu Xuc27fc142016-08-22 16:08:15 -0700532 const POSITION *const mv_ref_search = mv_ref_blocks[mi->mbmi.sb_type];
Dengca8d24d2016-10-17 14:06:35 +0800533#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700534 int different_ref_found = 0;
535 int context_counter = 0;
536 const MV_REF *const prev_frame_mvs =
537 cm->use_prev_frame_mvs
538 ? cm->prev_frame->mvs + mi_row * cm->mi_cols + mi_col
539 : NULL;
540 const TileInfo *const tile = &xd->tile;
Jingning Hanc016df82016-12-09 12:37:28 -0800541 const BLOCK_SIZE bsize = mi->mbmi.sb_type;
542 const int bw = block_size_wide[AOMMAX(bsize, BLOCK_8X8)];
543 const int bh = block_size_high[AOMMAX(bsize, BLOCK_8X8)];
Yaowu Xudd28be82016-11-21 10:15:30 -0800544#if CONFIG_REF_MV
Dengca8d24d2016-10-17 14:06:35 +0800545 POSITION mv_ref_search[MVREF_NEIGHBOURS];
Jingning Hanc016df82016-12-09 12:37:28 -0800546 const int num_8x8_blocks_wide = num_8x8_blocks_wide_lookup[bsize];
547 const int num_8x8_blocks_high = num_8x8_blocks_high_lookup[bsize];
Dengca8d24d2016-10-17 14:06:35 +0800548 mv_ref_search[0].row = num_8x8_blocks_high - 1;
549 mv_ref_search[0].col = -1;
550 mv_ref_search[1].row = -1;
551 mv_ref_search[1].col = num_8x8_blocks_wide - 1;
552 mv_ref_search[2].row = -1;
553 mv_ref_search[2].col = (num_8x8_blocks_wide - 1) >> 1;
554 mv_ref_search[3].row = (num_8x8_blocks_high - 1) >> 1;
555 mv_ref_search[3].col = -1;
556 mv_ref_search[4].row = -1;
557 mv_ref_search[4].col = -1;
558 mv_ref_search[5].row = -1;
559 mv_ref_search[5].col = num_8x8_blocks_wide;
560 mv_ref_search[6].row = num_8x8_blocks_high;
561 mv_ref_search[6].col = -1;
562 mv_ref_search[7].row = -1;
563 mv_ref_search[7].col = -3;
564 mv_ref_search[8].row = num_8x8_blocks_high - 1;
565 mv_ref_search[8].col = -3;
Jingning Hanc016df82016-12-09 12:37:28 -0800566
567#if CONFIG_CB4X4
568 for (i = 0; i < MVREF_NEIGHBOURS; ++i) {
569 mv_ref_search[i].row *= 2;
570 mv_ref_search[i].col *= 2;
571 }
572#endif // CONFIG_CB4X4
573#endif // CONFIG_REF_MV
Yaowu Xuc27fc142016-08-22 16:08:15 -0700574
575 // The nearest 2 blocks are treated differently
576 // if the size < 8x8 we get the mv from the bmi substructure,
577 // and we also need to keep a mode count.
578 for (i = 0; i < 2; ++i) {
579 const POSITION *const mv_ref = &mv_ref_search[i];
580 if (is_inside(tile, mi_col, mi_row, mv_ref)) {
581 const MODE_INFO *const candidate_mi =
582 xd->mi[mv_ref->col + mv_ref->row * xd->mi_stride];
583 const MB_MODE_INFO *const candidate = &candidate_mi->mbmi;
584 // Keep counts for entropy encoding.
585 context_counter += mode_2_counter[candidate->mode];
586 different_ref_found = 1;
587
588 if (candidate->ref_frame[0] == ref_frame)
589 ADD_MV_REF_LIST(get_sub_block_mv(candidate_mi, 0, mv_ref->col, block),
590 refmv_count, mv_ref_list, bw, bh, xd, Done);
591 else if (candidate->ref_frame[1] == ref_frame)
592 ADD_MV_REF_LIST(get_sub_block_mv(candidate_mi, 1, mv_ref->col, block),
593 refmv_count, mv_ref_list, bw, bh, xd, Done);
594 }
595 }
596
597 // Check the rest of the neighbors in much the same way
598 // as before except we don't need to keep track of sub blocks or
599 // mode counts.
600 for (; i < MVREF_NEIGHBOURS; ++i) {
601 const POSITION *const mv_ref = &mv_ref_search[i];
602 if (is_inside(tile, mi_col, mi_row, mv_ref)) {
603 const MB_MODE_INFO *const candidate =
Yaowu Xuf0602bb2016-12-14 09:30:03 -0800604 !xd->mi[mv_ref->col + mv_ref->row * xd->mi_stride]
605 ? NULL
606 : &xd->mi[mv_ref->col + mv_ref->row * xd->mi_stride]->mbmi;
Yaowu Xudd28be82016-11-21 10:15:30 -0800607#if CONFIG_REF_MV
Yaowu Xu750955b2016-10-31 08:55:27 -0700608 if (candidate == NULL) continue;
Jingning Han423ecd02016-12-07 12:01:35 -0800609 if ((mi_row % MAX_MIB_SIZE) + mv_ref->row >= MAX_MIB_SIZE ||
610 (mi_col % MAX_MIB_SIZE) + mv_ref->col >= MAX_MIB_SIZE)
Dengca8d24d2016-10-17 14:06:35 +0800611 continue;
612#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700613 different_ref_found = 1;
614
615 if (candidate->ref_frame[0] == ref_frame)
616 ADD_MV_REF_LIST(candidate->mv[0], refmv_count, mv_ref_list, bw, bh, xd,
617 Done);
618 else if (candidate->ref_frame[1] == ref_frame)
619 ADD_MV_REF_LIST(candidate->mv[1], refmv_count, mv_ref_list, bw, bh, xd,
620 Done);
621 }
622 }
623
624// TODO(hkuang): Remove this sync after fixing pthread_cond_broadcast
625// on windows platform. The sync here is unncessary if use_perv_frame_mvs
626// is 0. But after removing it, there will be hang in the unit test on windows
627// due to several threads waiting for a thread's signal.
628#if defined(_WIN32) && !HAVE_PTHREAD_H
629 if (cm->frame_parallel_decode && sync != NULL) {
630 sync(data, mi_row);
631 }
632#endif
633
634 // Check the last frame's mode and mv info.
635 if (cm->use_prev_frame_mvs) {
636 // Synchronize here for frame parallel decode if sync function is provided.
637 if (cm->frame_parallel_decode && sync != NULL) {
638 sync(data, mi_row);
639 }
640
641 if (prev_frame_mvs->ref_frame[0] == ref_frame) {
642 ADD_MV_REF_LIST(prev_frame_mvs->mv[0], refmv_count, mv_ref_list, bw, bh,
643 xd, Done);
644 } else if (prev_frame_mvs->ref_frame[1] == ref_frame) {
645 ADD_MV_REF_LIST(prev_frame_mvs->mv[1], refmv_count, mv_ref_list, bw, bh,
646 xd, Done);
647 }
648 }
649
650 // Since we couldn't find 2 mvs from the same reference frame
651 // go back through the neighbors and find motion vectors from
652 // different reference frames.
653 if (different_ref_found) {
654 for (i = 0; i < MVREF_NEIGHBOURS; ++i) {
655 const POSITION *mv_ref = &mv_ref_search[i];
656 if (is_inside(tile, mi_col, mi_row, mv_ref)) {
657 const MB_MODE_INFO *const candidate =
Yaowu Xuf0602bb2016-12-14 09:30:03 -0800658 !xd->mi[mv_ref->col + mv_ref->row * xd->mi_stride]
659 ? NULL
660 : &xd->mi[mv_ref->col + mv_ref->row * xd->mi_stride]->mbmi;
Yaowu Xudd28be82016-11-21 10:15:30 -0800661#if CONFIG_REF_MV
Yaowu Xu750955b2016-10-31 08:55:27 -0700662 if (candidate == NULL) continue;
Jingning Han423ecd02016-12-07 12:01:35 -0800663 if ((mi_row % MAX_MIB_SIZE) + mv_ref->row >= MAX_MIB_SIZE ||
664 (mi_col % MAX_MIB_SIZE) + mv_ref->col >= MAX_MIB_SIZE)
Dengca8d24d2016-10-17 14:06:35 +0800665 continue;
666#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700667
668 // If the candidate is INTRA we don't want to consider its mv.
669 IF_DIFF_REF_FRAME_ADD_MV(candidate, ref_frame, ref_sign_bias,
670 refmv_count, mv_ref_list, bw, bh, xd, Done);
671 }
672 }
673 }
674
675 // Since we still don't have a candidate we'll try the last frame.
676 if (cm->use_prev_frame_mvs) {
677 if (prev_frame_mvs->ref_frame[0] != ref_frame &&
678 prev_frame_mvs->ref_frame[0] > INTRA_FRAME) {
679 int_mv mv = prev_frame_mvs->mv[0];
680 if (ref_sign_bias[prev_frame_mvs->ref_frame[0]] !=
681 ref_sign_bias[ref_frame]) {
682 mv.as_mv.row *= -1;
683 mv.as_mv.col *= -1;
684 }
685 ADD_MV_REF_LIST(mv, refmv_count, mv_ref_list, bw, bh, xd, Done);
686 }
687
688 if (prev_frame_mvs->ref_frame[1] > INTRA_FRAME &&
689 prev_frame_mvs->ref_frame[1] != ref_frame) {
690 int_mv mv = prev_frame_mvs->mv[1];
691 if (ref_sign_bias[prev_frame_mvs->ref_frame[1]] !=
692 ref_sign_bias[ref_frame]) {
693 mv.as_mv.row *= -1;
694 mv.as_mv.col *= -1;
695 }
696 ADD_MV_REF_LIST(mv, refmv_count, mv_ref_list, bw, bh, xd, Done);
697 }
698 }
699
700Done:
701 if (mode_context)
702 mode_context[ref_frame] = counter_to_context[context_counter];
703 for (i = refmv_count; i < MAX_MV_REF_CANDIDATES; ++i)
David Barkercdcac6d2016-12-01 17:04:16 +0000704 mv_ref_list[i].as_int = zeromv.as_int;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700705}
706
707#if CONFIG_EXT_INTER
708// This function keeps a mode count for a given MB/SB
Yaowu Xuf883b422016-08-30 14:01:10 -0700709void av1_update_mv_context(const MACROBLOCKD *xd, MODE_INFO *mi,
710 MV_REFERENCE_FRAME ref_frame, int_mv *mv_ref_list,
711 int block, int mi_row, int mi_col,
712 int16_t *mode_context) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700713 int i, refmv_count = 0;
Yaowu Xudd28be82016-11-21 10:15:30 -0800714#if !CONFIG_REF_MV
Yaowu Xuc27fc142016-08-22 16:08:15 -0700715 const POSITION *const mv_ref_search = mv_ref_blocks[mi->mbmi.sb_type];
Yaowu Xu33f210c2016-11-17 17:42:20 -0800716#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700717 int context_counter = 0;
718 const int bw = num_8x8_blocks_wide_lookup[mi->mbmi.sb_type] << 3;
719 const int bh = num_8x8_blocks_high_lookup[mi->mbmi.sb_type] << 3;
720 const TileInfo *const tile = &xd->tile;
Yaowu Xudd28be82016-11-21 10:15:30 -0800721#if CONFIG_REF_MV
Yaowu Xu33f210c2016-11-17 17:42:20 -0800722 POSITION mv_ref_search[MVREF_NEIGHBOURS];
723 const int num_8x8_blocks_wide = bw >> 3;
724 const int num_8x8_blocks_high = bh >> 3;
725 mv_ref_search[0].row = num_8x8_blocks_high - 1;
726 mv_ref_search[0].col = -1;
727 mv_ref_search[1].row = -1;
728 mv_ref_search[1].col = num_8x8_blocks_wide - 1;
729 mv_ref_search[2].row = -1;
730 mv_ref_search[2].col = (num_8x8_blocks_wide - 1) >> 1;
731 mv_ref_search[3].row = (num_8x8_blocks_high - 1) >> 1;
732 mv_ref_search[3].col = -1;
733 mv_ref_search[4].row = -1;
734 mv_ref_search[4].col = -1;
735 mv_ref_search[5].row = -1;
736 mv_ref_search[5].col = num_8x8_blocks_wide;
737 mv_ref_search[6].row = num_8x8_blocks_high;
738 mv_ref_search[6].col = -1;
739 mv_ref_search[7].row = -1;
740 mv_ref_search[7].col = -3;
741 mv_ref_search[8].row = num_8x8_blocks_high - 1;
742 mv_ref_search[8].col = -3;
743#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700744
745 // Blank the reference vector list
746 memset(mv_ref_list, 0, sizeof(*mv_ref_list) * MAX_MV_REF_CANDIDATES);
747
748 // The nearest 2 blocks are examined only.
749 // If the size < 8x8, we get the mv from the bmi substructure;
750 for (i = 0; i < 2; ++i) {
751 const POSITION *const mv_ref = &mv_ref_search[i];
752 if (is_inside(tile, mi_col, mi_row, mv_ref)) {
753 const MODE_INFO *const candidate_mi =
754 xd->mi[mv_ref->col + mv_ref->row * xd->mi_stride];
755 const MB_MODE_INFO *const candidate = &candidate_mi->mbmi;
756
757 // Keep counts for entropy encoding.
758 context_counter += mode_2_counter[candidate->mode];
759
760 if (candidate->ref_frame[0] == ref_frame) {
761 ADD_MV_REF_LIST(get_sub_block_mv(candidate_mi, 0, mv_ref->col, block),
762 refmv_count, mv_ref_list, bw, bh, xd, Done);
763 } else if (candidate->ref_frame[1] == ref_frame) {
764 ADD_MV_REF_LIST(get_sub_block_mv(candidate_mi, 1, mv_ref->col, block),
765 refmv_count, mv_ref_list, bw, bh, xd, Done);
766 }
767 }
768 }
769
770Done:
771
772 if (mode_context)
773 mode_context[ref_frame] = counter_to_context[context_counter];
774}
775#endif // CONFIG_EXT_INTER
776
Yaowu Xuf883b422016-08-30 14:01:10 -0700777void av1_find_mv_refs(const AV1_COMMON *cm, const MACROBLOCKD *xd,
778 MODE_INFO *mi, MV_REFERENCE_FRAME ref_frame,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700779#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700780 uint8_t *ref_mv_count, CANDIDATE_MV *ref_mv_stack,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700781#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700782 int16_t *compound_mode_context,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700783#endif // CONFIG_EXT_INTER
784#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700785 int_mv *mv_ref_list, int mi_row, int mi_col,
786 find_mv_refs_sync sync, void *const data,
787 int16_t *mode_context) {
David Barkercdcac6d2016-12-01 17:04:16 +0000788 int_mv zeromv[2];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700789#if CONFIG_REF_MV
David Barkercdcac6d2016-12-01 17:04:16 +0000790 int idx, all_zero = 1;
791#endif
792#if CONFIG_GLOBAL_MOTION
793 MV_REFERENCE_FRAME rf[2];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700794#endif
795#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700796 av1_update_mv_context(xd, mi, ref_frame, mv_ref_list, -1, mi_row, mi_col,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700797#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700798 compound_mode_context);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700799#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700800 mode_context);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700801#endif // CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -0700802#endif // CONFIG_EXT_INTER
David Barkercdcac6d2016-12-01 17:04:16 +0000803
804#if CONFIG_GLOBAL_MOTION
805 av1_set_ref_frame(rf, ref_frame);
806 zeromv[0].as_int = gm_get_motion_vector(&cm->global_motion[rf[0]],
807 cm->allow_high_precision_mv)
808 .as_int;
809 zeromv[1].as_int = (rf[1] != NONE)
810 ? gm_get_motion_vector(&cm->global_motion[rf[1]],
811 cm->allow_high_precision_mv)
812 .as_int
813 : 0;
814#else
815 zeromv[0].as_int = zeromv[1].as_int = 0;
816#endif
817
Yaowu Xu4306b6e2016-09-27 12:55:32 -0700818#if CONFIG_REF_MV
819 if (ref_frame <= ALTREF_FRAME)
Debargha Mukherjee37afe712016-11-30 22:01:06 -0800820#endif // CONFIG_REF_MV
David Barkercdcac6d2016-12-01 17:04:16 +0000821 find_mv_refs_idx(cm, xd, mi, ref_frame, mv_ref_list, -1, mi_row, mi_col,
822 sync, data, mode_context, zeromv[0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700823
824#if CONFIG_REF_MV
825 setup_ref_mv_list(cm, xd, ref_frame, ref_mv_count, ref_mv_stack, mv_ref_list,
826 -1, mi_row, mi_col, mode_context);
David Barkercdcac6d2016-12-01 17:04:16 +0000827 /* Note: If global motion is enabled, then we want to set the ALL_ZERO flag
828 iff all of the MVs we could generate with NEARMV/NEARESTMV are equivalent
829 to the global motion vector.
830 Note: For the following to work properly, the encoder can't throw away
831 any global motion models after calling this function, even if they are
832 unused. Instead we rely on the recode loop: If any non-IDENTITY model
833 is unused, the whole frame will be re-encoded without it.
834 The problem is that, otherwise, we can end up in the following situation:
835 * Encoder has a global motion model with nonzero translational part,
836 and all candidate MVs are zero. So the ALL_ZERO flag is unset.
837 * Encoder throws away global motion because it is never used.
838 * Decoder sees that there is no global motion and all candidate MVs are
839 zero, so sets the ALL_ZERO flag.
840 * This leads to an encode/decode mismatch.
841 */
Yaowu Xu4306b6e2016-09-27 12:55:32 -0700842 if (*ref_mv_count >= 2) {
843 for (idx = 0; idx < AOMMIN(3, *ref_mv_count); ++idx) {
David Barkercdcac6d2016-12-01 17:04:16 +0000844 if (ref_mv_stack[idx].this_mv.as_int != zeromv[0].as_int) all_zero = 0;
Yaowu Xu4306b6e2016-09-27 12:55:32 -0700845 if (ref_frame > ALTREF_FRAME)
David Barkercdcac6d2016-12-01 17:04:16 +0000846 if (ref_mv_stack[idx].comp_mv.as_int != zeromv[1].as_int) all_zero = 0;
Yaowu Xu4306b6e2016-09-27 12:55:32 -0700847 }
848 } else if (ref_frame <= ALTREF_FRAME) {
849 for (idx = 0; idx < MAX_MV_REF_CANDIDATES; ++idx)
David Barkercdcac6d2016-12-01 17:04:16 +0000850 if (mv_ref_list[idx].as_int != zeromv[0].as_int) all_zero = 0;
Yaowu Xu4306b6e2016-09-27 12:55:32 -0700851 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700852
853 if (all_zero) mode_context[ref_frame] |= (1 << ALL_ZERO_FLAG_OFFSET);
David Barkercdcac6d2016-12-01 17:04:16 +0000854#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700855}
856
Yaowu Xuf883b422016-08-30 14:01:10 -0700857void av1_find_best_ref_mvs(int allow_hp, int_mv *mvlist, int_mv *nearest_mv,
858 int_mv *near_mv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700859 int i;
860 // Make sure all the candidates are properly clamped etc
861 for (i = 0; i < MAX_MV_REF_CANDIDATES; ++i) {
862 lower_mv_precision(&mvlist[i].as_mv, allow_hp);
863 }
864 *nearest_mv = mvlist[0];
865 *near_mv = mvlist[1];
866}
867
Urvang Joshi52648442016-10-13 17:27:51 -0700868void av1_append_sub8x8_mvs_for_idx(const AV1_COMMON *cm, MACROBLOCKD *xd,
869 int block, int ref, int mi_row, int mi_col,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700870#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700871 CANDIDATE_MV *ref_mv_stack,
872 uint8_t *ref_mv_count,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700873#endif
874#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700875 int_mv *mv_list,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700876#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700877 int_mv *nearest_mv, int_mv *near_mv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700878#if !CONFIG_EXT_INTER
879 int_mv mv_list[MAX_MV_REF_CANDIDATES];
880#endif // !CONFIG_EXT_INTER
881 MODE_INFO *const mi = xd->mi[0];
882 b_mode_info *bmi = mi->bmi;
883 int n;
David Barkercdcac6d2016-12-01 17:04:16 +0000884 int_mv zeromv;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700885#if CONFIG_REF_MV
886 CANDIDATE_MV tmp_mv;
887 uint8_t idx;
888 uint8_t above_count = 0, left_count = 0;
889 MV_REFERENCE_FRAME rf[2] = { mi->mbmi.ref_frame[ref], NONE };
890 *ref_mv_count = 0;
891#endif
892
893 assert(MAX_MV_REF_CANDIDATES == 2);
894
David Barkercdcac6d2016-12-01 17:04:16 +0000895#if CONFIG_GLOBAL_MOTION
896 zeromv.as_int =
897 gm_get_motion_vector(&cm->global_motion[ref], cm->allow_high_precision_mv)
898 .as_int;
899#else
900 zeromv.as_int = 0;
901#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700902 find_mv_refs_idx(cm, xd, mi, mi->mbmi.ref_frame[ref], mv_list, block, mi_row,
David Barkercdcac6d2016-12-01 17:04:16 +0000903 mi_col, NULL, NULL, NULL, zeromv);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700904
905#if CONFIG_REF_MV
906 scan_blk_mbmi(cm, xd, mi_row, mi_col, block, rf, -1, 0, ref_mv_stack,
907 ref_mv_count);
908 above_count = *ref_mv_count;
909
910 scan_blk_mbmi(cm, xd, mi_row, mi_col, block, rf, 0, -1, ref_mv_stack,
911 ref_mv_count);
912 left_count = *ref_mv_count - above_count;
913
914 if (above_count > 1 && left_count > 0) {
915 tmp_mv = ref_mv_stack[1];
916 ref_mv_stack[1] = ref_mv_stack[above_count];
917 ref_mv_stack[above_count] = tmp_mv;
918 }
919
920 for (idx = 0; idx < *ref_mv_count; ++idx)
Jingning Hanff6ee6a2016-12-07 09:55:21 -0800921 clamp_mv_ref(&ref_mv_stack[idx].this_mv.as_mv, xd->n8_w << MI_SIZE_LOG2,
922 xd->n8_h << MI_SIZE_LOG2, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700923
Yaowu Xuf883b422016-08-30 14:01:10 -0700924 for (idx = 0; idx < AOMMIN(MAX_MV_REF_CANDIDATES, *ref_mv_count); ++idx)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700925 mv_list[idx].as_int = ref_mv_stack[idx].this_mv.as_int;
926#endif
927
928 near_mv->as_int = 0;
929 switch (block) {
930 case 0:
931 nearest_mv->as_int = mv_list[0].as_int;
932 near_mv->as_int = mv_list[1].as_int;
933 break;
934 case 1:
935 case 2:
936 nearest_mv->as_int = bmi[0].as_mv[ref].as_int;
937 for (n = 0; n < MAX_MV_REF_CANDIDATES; ++n)
938 if (nearest_mv->as_int != mv_list[n].as_int) {
939 near_mv->as_int = mv_list[n].as_int;
940 break;
941 }
942 break;
943 case 3: {
944 int_mv candidates[2 + MAX_MV_REF_CANDIDATES];
945 candidates[0] = bmi[1].as_mv[ref];
946 candidates[1] = bmi[0].as_mv[ref];
947 candidates[2] = mv_list[0];
948 candidates[3] = mv_list[1];
949
950 nearest_mv->as_int = bmi[2].as_mv[ref].as_int;
951 for (n = 0; n < 2 + MAX_MV_REF_CANDIDATES; ++n)
952 if (nearest_mv->as_int != candidates[n].as_int) {
953 near_mv->as_int = candidates[n].as_int;
954 break;
955 }
956 break;
957 }
958 default: assert(0 && "Invalid block index.");
959 }
960}
Yue Chen69f18e12016-09-08 14:48:15 -0700961
962#if CONFIG_WARPED_MOTION
963int findSamples(const AV1_COMMON *cm, MACROBLOCKD *xd, int mi_row, int mi_col,
964 double *pts, double *pts_inref) {
965 MB_MODE_INFO *const mbmi0 = &(xd->mi[0]->mbmi);
966 int ref_frame = mbmi0->ref_frame[0];
967 int up_available = xd->up_available;
968 int left_available = xd->left_available;
969 int i, mi_step, np = 0;
970 int mvasint[100];
971 int mvnumber = 0;
972 int global_offset_c = mi_col * 8;
973 int global_offset_r = mi_row * 8;
974 int samples_per_neighbor = 4;
975
976 // scan the above row
977 if (up_available) {
978 for (i = 0; i < AOMMIN(xd->n8_w, cm->mi_cols - mi_col); i += mi_step) {
979 int mi_row_offset = -1;
980 int mi_col_offset = i;
981
982 MODE_INFO *mi = xd->mi[mi_col_offset + mi_row_offset * xd->mi_stride];
983 MB_MODE_INFO *mbmi = &mi->mbmi;
984
985 mi_step = AOMMIN(xd->n8_w, num_8x8_blocks_wide_lookup[mbmi->sb_type]);
986
987 if (mbmi->ref_frame[0] == ref_frame && mbmi->ref_frame[1] == NONE) {
David Barkercdcac6d2016-12-01 17:04:16 +0000988 int bw = block_size_wide[mbmi->sb_type];
989 int bh = block_size_high[mbmi->sb_type];
Yue Chen69f18e12016-09-08 14:48:15 -0700990 int mv_row = mbmi->mv[0].as_mv.row;
991 int mv_col = mbmi->mv[0].as_mv.col;
992 int cr_offset = -AOMMAX(bh, 8) / 2 - 1;
993 int cc_offset = i * 8 + AOMMAX(bw, 8) / 2 - 1;
994 int j;
995 int pixelperblock = samples_per_neighbor;
996
997 mvasint[mvnumber] = mbmi->mv[0].as_int;
998 mvnumber++;
999
1000 for (j = 0; j < pixelperblock; j++) {
1001 int r_offset = j / 2;
1002 int c_offset = j % 2;
1003
1004 pts[0] = (double)(cc_offset + c_offset + global_offset_c);
1005 pts[1] = (double)(cr_offset + r_offset + global_offset_r);
1006
1007 if (mbmi->motion_mode == WARPED_CAUSAL) {
1008 int ipts[2], ipts_inref[2];
1009 ipts[0] = cc_offset + c_offset + global_offset_c;
1010 ipts[1] = cr_offset + r_offset + global_offset_r;
1011
1012 project_points(&mbmi->wm_params[0], ipts, ipts_inref, 1, 2, 2, 0,
1013 0);
1014 pts_inref[0] =
1015 (double)ipts_inref[0] / (double)WARPEDPIXEL_PREC_SHIFTS;
1016 pts_inref[1] =
1017 (double)ipts_inref[1] / (double)WARPEDPIXEL_PREC_SHIFTS;
1018 } else {
1019 pts_inref[0] = pts[0] + (double)(mv_col)*0.125;
1020 pts_inref[1] = pts[1] + (double)(mv_row)*0.125;
1021 }
1022
1023 pts += 2;
1024 pts_inref += 2;
1025 }
1026 np += pixelperblock;
1027 }
1028 }
1029 }
1030
1031 // scan the left column
1032 if (left_available) {
1033 for (i = 0; i < AOMMIN(xd->n8_h, cm->mi_rows - mi_row); i += mi_step) {
1034 int mi_row_offset = i;
1035 int mi_col_offset = -1;
1036
1037 MODE_INFO *mi = xd->mi[mi_col_offset + mi_row_offset * xd->mi_stride];
1038 MB_MODE_INFO *mbmi = &mi->mbmi;
1039
1040 mi_step = AOMMIN(xd->n8_h, num_8x8_blocks_high_lookup[mbmi->sb_type]);
1041
1042 if (mbmi->ref_frame[0] == ref_frame && mbmi->ref_frame[1] == NONE) {
David Barkercdcac6d2016-12-01 17:04:16 +00001043 int bw = block_size_wide[mbmi->sb_type];
1044 int bh = block_size_high[mbmi->sb_type];
Yue Chen69f18e12016-09-08 14:48:15 -07001045 int mv_row = mbmi->mv[0].as_mv.row;
1046 int mv_col = mbmi->mv[0].as_mv.col;
1047 int cr_offset = i * 8 + AOMMAX(bh, 8) / 2 - 1;
1048 int cc_offset = -AOMMAX(bw, 8) / 2 - 1;
1049 int j;
1050 int pixelperblock = samples_per_neighbor;
1051
1052 mvasint[mvnumber] = mbmi->mv[0].as_int;
1053 mvnumber++;
1054
1055 for (j = 0; j < pixelperblock; j++) {
1056 int r_offset = j / 2;
1057 int c_offset = j % 2;
1058
1059 pts[0] = (double)(cc_offset + c_offset + global_offset_c);
1060 pts[1] = (double)(cr_offset + r_offset + global_offset_r);
1061
1062 if (mbmi->motion_mode == WARPED_CAUSAL) {
1063 int ipts[2], ipts_inref[2];
1064 ipts[0] = cc_offset + c_offset + global_offset_c;
1065 ipts[1] = cr_offset + r_offset + global_offset_r;
1066
1067 project_points(&mbmi->wm_params[0], ipts, ipts_inref, 1, 2, 2, 0,
1068 0);
1069 pts_inref[0] =
1070 (double)ipts_inref[0] / (double)WARPEDPIXEL_PREC_SHIFTS;
1071 pts_inref[1] =
1072 (double)ipts_inref[1] / (double)WARPEDPIXEL_PREC_SHIFTS;
1073 } else {
1074 pts_inref[0] = pts[0] + (double)(mv_col)*0.125;
1075 pts_inref[1] = pts[1] + (double)(mv_row)*0.125;
1076 }
1077
1078 pts += 2;
1079 pts_inref += 2;
1080 }
1081 np += pixelperblock;
1082 }
1083 }
1084 }
1085
1086 if (left_available && up_available) {
1087 int mi_row_offset = -1;
1088 int mi_col_offset = -1;
1089
1090 MODE_INFO *mi = xd->mi[mi_col_offset + mi_row_offset * xd->mi_stride];
1091 MB_MODE_INFO *mbmi = &mi->mbmi;
1092
1093 if (mbmi->ref_frame[0] == ref_frame && mbmi->ref_frame[1] == NONE) {
David Barkercdcac6d2016-12-01 17:04:16 +00001094 int bw = block_size_wide[mbmi->sb_type];
1095 int bh = block_size_high[mbmi->sb_type];
Yue Chen69f18e12016-09-08 14:48:15 -07001096 int mv_row = mbmi->mv[0].as_mv.row;
1097 int mv_col = mbmi->mv[0].as_mv.col;
1098 int cr_offset = -AOMMAX(bh, 8) / 2 - 1;
1099 int cc_offset = -AOMMAX(bw, 8) / 2 - 1;
1100 int j;
1101 int pixelperblock = samples_per_neighbor;
1102
1103 mvasint[mvnumber] = mbmi->mv[0].as_int;
1104 mvnumber++;
1105
1106 for (j = 0; j < pixelperblock; j++) {
1107 int r_offset = j / 2;
1108 int c_offset = j % 2;
1109
1110 pts[0] = (double)(cc_offset + c_offset + global_offset_c);
1111 pts[1] = (double)(cr_offset + r_offset + global_offset_r);
1112
1113 if (mbmi->motion_mode == WARPED_CAUSAL) {
1114 int ipts[2], ipts_inref[2];
1115 ipts[0] = cc_offset + c_offset + global_offset_c;
1116 ipts[1] = cr_offset + r_offset + global_offset_r;
1117
1118 project_points(&mbmi->wm_params[0], ipts, ipts_inref, 1, 2, 2, 0, 0);
1119 pts_inref[0] =
1120 (double)ipts_inref[0] / (double)WARPEDPIXEL_PREC_SHIFTS;
1121 pts_inref[1] =
1122 (double)ipts_inref[1] / (double)WARPEDPIXEL_PREC_SHIFTS;
1123 } else {
1124 pts_inref[0] = pts[0] + (double)(mv_col)*0.125;
1125 pts_inref[1] = pts[1] + (double)(mv_row)*0.125;
1126 }
1127
1128 pts += 2;
1129 pts_inref += 2;
1130 }
1131 np += pixelperblock;
1132 }
1133 }
1134
1135 for (i = 0; i < (mvnumber - 1); ++i) {
1136 if (mvasint[i] != mvasint[i + 1]) break;
1137 }
1138
1139 if (np == 0 || i == (mvnumber - 1)) {
1140 return 0;
1141 } else {
1142 MODE_INFO *mi = xd->mi[0];
1143 MB_MODE_INFO *mbmi = &mi->mbmi;
David Barkercdcac6d2016-12-01 17:04:16 +00001144 int bw = block_size_wide[mbmi->sb_type];
1145 int bh = block_size_high[mbmi->sb_type];
Yue Chen69f18e12016-09-08 14:48:15 -07001146 int mv_row = mbmi->mv[0].as_mv.row;
1147 int mv_col = mbmi->mv[0].as_mv.col;
1148 int cr_offset = AOMMAX(bh, 8) / 2 - 1;
1149 int cc_offset = AOMMAX(bw, 8) / 2 - 1;
1150 int j;
1151 int pixelperblock = samples_per_neighbor;
1152
1153 for (j = 0; j < pixelperblock; j++) {
1154 int r_offset = j / 2;
1155 int c_offset = j % 2;
1156
1157 pts[0] = (double)(cc_offset + c_offset + global_offset_c);
1158 pts[1] = (double)(cr_offset + r_offset + global_offset_r);
1159
1160 pts_inref[0] = pts[0] + (double)(mv_col)*0.125;
1161 pts_inref[1] = pts[1] + (double)(mv_row)*0.125;
1162
1163 pts += 2;
1164 pts_inref += 2;
1165 }
1166 np += pixelperblock;
1167 }
1168
1169 return np;
1170}
1171#endif // CONFIG_WARPED_MOTION