blob: 78e07da39375946366fd63d0b4d856b5e240a494 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Urvang Joshi8a02d762016-07-28 15:51:12 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Urvang Joshi8a02d762016-07-28 15:51:12 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <limits.h>
13#include <math.h>
14#include <stdio.h>
15
Tom Finegan60e653d2018-05-22 11:34:58 -070016#include "config/aom_config.h"
Tom Finegan44702c82018-05-22 13:00:39 -070017#include "config/aom_dsp_rtcd.h"
18#include "config/aom_scale_rtcd.h"
Yaowu Xufa3721d2018-07-30 14:38:49 -070019#include "config/av1_rtcd.h"
20
21#include "aom_dsp/aom_dsp_common.h"
22#include "aom_dsp/aom_filter.h"
23#if CONFIG_DENOISE
24#include "aom_dsp/grain_table.h"
25#include "aom_dsp/noise_util.h"
26#include "aom_dsp/noise_model.h"
27#endif
28#include "aom_dsp/psnr.h"
29#if CONFIG_INTERNAL_STATS
30#include "aom_dsp/ssim.h"
31#endif
32#include "aom_ports/aom_timer.h"
33#include "aom_ports/mem.h"
34#include "aom_ports/system_state.h"
35#include "aom_scale/aom_scale.h"
36#if CONFIG_BITSTREAM_DEBUG || CONFIG_MISMATCH_DEBUG
37#include "aom_util/debug_util.h"
38#endif // CONFIG_BITSTREAM_DEBUG || CONFIG_MISMATCH_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070039
40#include "av1/common/alloccommon.h"
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010041#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/filter.h"
43#include "av1/common/idct.h"
44#include "av1/common/reconinter.h"
45#include "av1/common/reconintra.h"
Fergus Simpsond0565002017-03-27 16:51:52 -070046#include "av1/common/resize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#include "av1/common/tile_common.h"
48
Ravi Chaudharyc5e74692018-10-08 16:05:38 +053049#include "av1/encoder/av1_multi_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/encoder/aq_complexity.h"
51#include "av1/encoder/aq_cyclicrefresh.h"
52#include "av1/encoder/aq_variance.h"
53#include "av1/encoder/bitstream.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070054#include "av1/encoder/context_tree.h"
55#include "av1/encoder/encodeframe.h"
56#include "av1/encoder/encodemv.h"
David Turner056f7cd2019-01-07 17:48:13 +000057#include "av1/encoder/encode_strategy.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070058#include "av1/encoder/encoder.h"
Angie Chiangf0fbf9d2017-03-15 15:01:22 -070059#include "av1/encoder/encodetxb.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070060#include "av1/encoder/ethread.h"
61#include "av1/encoder/firstpass.h"
Yaowu Xufa3721d2018-07-30 14:38:49 -070062#include "av1/encoder/grain_test_vectors.h"
RogerZhoucc5d35d2017-08-07 22:20:15 -070063#include "av1/encoder/hash_motion.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070064#include "av1/encoder/mbgraph.h"
65#include "av1/encoder/picklpf.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070066#include "av1/encoder/pickrst.h"
Debargha Mukherjee7166f222017-09-05 21:32:42 -070067#include "av1/encoder/random.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070068#include "av1/encoder/ratectrl.h"
69#include "av1/encoder/rd.h"
Debargha Mukherjeedf713102018-10-02 12:33:32 -070070#include "av1/encoder/rdopt.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070071#include "av1/encoder/segmentation.h"
72#include "av1/encoder/speed_features.h"
73#include "av1/encoder/temporal_filter.h"
Yue Chen7cae98f2018-08-24 10:43:16 -070074#include "av1/encoder/reconinter_enc.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070075
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +000076#define DEFAULT_EXPLICIT_ORDER_HINT_BITS 7
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +000077
Andrey Norkin795ba872018-03-06 13:24:14 -080078// av1 uses 10,000,000 ticks/second as time stamp
79#define TICKS_PER_SEC 10000000LL
Andrey Norkin795ba872018-03-06 13:24:14 -080080
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -080081#if CONFIG_ENTROPY_STATS
82FRAME_COUNTS aggregate_fc;
83#endif // CONFIG_ENTROPY_STATS
84
Yaowu Xuc27fc142016-08-22 16:08:15 -070085#define AM_SEGMENT_ID_INACTIVE 7
86#define AM_SEGMENT_ID_ACTIVE 0
87
Johannb0ef6ff2018-02-08 14:32:21 -080088// Whether to use high precision mv for altref computation.
89#define ALTREF_HIGH_PRECISION_MV 1
90
91// Q threshold for high precision mv. Choose a very high value for now so that
92// HIGH_PRECISION is always chosen.
93#define HIGH_PRECISION_MV_QTHRESH 200
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -070094
Yaowu Xuc27fc142016-08-22 16:08:15 -070095// #define OUTPUT_YUV_REC
Yaowu Xuc27fc142016-08-22 16:08:15 -070096#ifdef OUTPUT_YUV_SKINMAP
97FILE *yuv_skinmap_file = NULL;
98#endif
99#ifdef OUTPUT_YUV_REC
100FILE *yuv_rec_file;
101#define FILE_NAME_LEN 100
102#endif
103
Debargha Mukherjeedf713102018-10-02 12:33:32 -0700104// Estimate if the source frame is screen content, based on the portion of
105// blocks that have no more than 4 (experimentally selected) luma colors.
106static int is_screen_content(const uint8_t *src, int use_hbd, int bd,
107 int stride, int width, int height) {
108 assert(src != NULL);
109 int counts = 0;
110 const int blk_w = 16;
111 const int blk_h = 16;
112 const int limit = 4;
113 for (int r = 0; r + blk_h <= height; r += blk_h) {
114 for (int c = 0; c + blk_w <= width; c += blk_w) {
115 int count_buf[1 << 12]; // Maximum (1 << 12) color levels.
116 const int n_colors =
117 use_hbd ? av1_count_colors_highbd(src + r * stride + c, stride, blk_w,
118 blk_h, bd, count_buf)
119 : av1_count_colors(src + r * stride + c, stride, blk_w, blk_h,
120 count_buf);
121 if (n_colors > 1 && n_colors <= limit) counts++;
122 }
123 }
124 // The threshold is 10%.
125 return counts * blk_h * blk_w * 10 > width * height;
126}
127
Yaowu Xuf883b422016-08-30 14:01:10 -0700128static INLINE void Scale2Ratio(AOM_SCALING mode, int *hr, int *hs) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700129 switch (mode) {
130 case NORMAL:
131 *hr = 1;
132 *hs = 1;
133 break;
134 case FOURFIVE:
135 *hr = 4;
136 *hs = 5;
137 break;
138 case THREEFIVE:
139 *hr = 3;
140 *hs = 5;
141 break;
142 case ONETWO:
143 *hr = 1;
144 *hs = 2;
145 break;
146 default:
147 *hr = 1;
148 *hs = 1;
149 assert(0);
150 break;
151 }
152}
153
154// Mark all inactive blocks as active. Other segmentation features may be set
155// so memset cannot be used, instead only inactive blocks should be reset.
Yaowu Xuf883b422016-08-30 14:01:10 -0700156static void suppress_active_map(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700157 unsigned char *const seg_map = cpi->segmentation_map;
158 int i;
159 if (cpi->active_map.enabled || cpi->active_map.update)
160 for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
161 if (seg_map[i] == AM_SEGMENT_ID_INACTIVE)
162 seg_map[i] = AM_SEGMENT_ID_ACTIVE;
163}
164
Yaowu Xuf883b422016-08-30 14:01:10 -0700165static void apply_active_map(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700166 struct segmentation *const seg = &cpi->common.seg;
167 unsigned char *const seg_map = cpi->segmentation_map;
168 const unsigned char *const active_map = cpi->active_map.map;
169 int i;
170
171 assert(AM_SEGMENT_ID_ACTIVE == CR_SEGMENT_ID_BASE);
172
173 if (frame_is_intra_only(&cpi->common)) {
174 cpi->active_map.enabled = 0;
175 cpi->active_map.update = 1;
176 }
177
178 if (cpi->active_map.update) {
179 if (cpi->active_map.enabled) {
180 for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
181 if (seg_map[i] == AM_SEGMENT_ID_ACTIVE) seg_map[i] = active_map[i];
Yaowu Xuf883b422016-08-30 14:01:10 -0700182 av1_enable_segmentation(seg);
183 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
Cheng Chend8184da2017-09-26 18:15:22 -0700184 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_H);
185 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_V);
186 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_U);
187 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_V);
188
189 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_H,
190 -MAX_LOOP_FILTER);
191 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_V,
192 -MAX_LOOP_FILTER);
193 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_U,
194 -MAX_LOOP_FILTER);
195 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_V,
196 -MAX_LOOP_FILTER);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700197 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700198 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
Cheng Chend8184da2017-09-26 18:15:22 -0700199 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_H);
200 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_V);
201 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_U);
202 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_V);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700203 if (seg->enabled) {
204 seg->update_data = 1;
205 seg->update_map = 1;
206 }
207 }
208 cpi->active_map.update = 0;
209 }
210}
211
Yaowu Xuf883b422016-08-30 14:01:10 -0700212int av1_set_active_map(AV1_COMP *cpi, unsigned char *new_map_16x16, int rows,
213 int cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700214 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols) {
215 unsigned char *const active_map_8x8 = cpi->active_map.map;
216 const int mi_rows = cpi->common.mi_rows;
217 const int mi_cols = cpi->common.mi_cols;
Jingning Han9d533022017-04-07 10:14:42 -0700218 const int row_scale = mi_size_high[BLOCK_16X16] == 2 ? 1 : 2;
219 const int col_scale = mi_size_wide[BLOCK_16X16] == 2 ? 1 : 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700220 cpi->active_map.update = 1;
221 if (new_map_16x16) {
222 int r, c;
223 for (r = 0; r < mi_rows; ++r) {
224 for (c = 0; c < mi_cols; ++c) {
225 active_map_8x8[r * mi_cols + c] =
Jingning Han9d533022017-04-07 10:14:42 -0700226 new_map_16x16[(r >> row_scale) * cols + (c >> col_scale)]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700227 ? AM_SEGMENT_ID_ACTIVE
228 : AM_SEGMENT_ID_INACTIVE;
229 }
230 }
231 cpi->active_map.enabled = 1;
232 } else {
233 cpi->active_map.enabled = 0;
234 }
235 return 0;
236 } else {
237 return -1;
238 }
239}
240
Yaowu Xuf883b422016-08-30 14:01:10 -0700241int av1_get_active_map(AV1_COMP *cpi, unsigned char *new_map_16x16, int rows,
242 int cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700243 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols &&
244 new_map_16x16) {
245 unsigned char *const seg_map_8x8 = cpi->segmentation_map;
246 const int mi_rows = cpi->common.mi_rows;
247 const int mi_cols = cpi->common.mi_cols;
Jingning Han9d533022017-04-07 10:14:42 -0700248 const int row_scale = mi_size_high[BLOCK_16X16] == 2 ? 1 : 2;
249 const int col_scale = mi_size_wide[BLOCK_16X16] == 2 ? 1 : 2;
250
Yaowu Xuc27fc142016-08-22 16:08:15 -0700251 memset(new_map_16x16, !cpi->active_map.enabled, rows * cols);
252 if (cpi->active_map.enabled) {
253 int r, c;
254 for (r = 0; r < mi_rows; ++r) {
255 for (c = 0; c < mi_cols; ++c) {
256 // Cyclic refresh segments are considered active despite not having
257 // AM_SEGMENT_ID_ACTIVE
Jingning Han9d533022017-04-07 10:14:42 -0700258 new_map_16x16[(r >> row_scale) * cols + (c >> col_scale)] |=
Yaowu Xuc27fc142016-08-22 16:08:15 -0700259 seg_map_8x8[r * mi_cols + c] != AM_SEGMENT_ID_INACTIVE;
260 }
261 }
262 }
263 return 0;
264 } else {
265 return -1;
266 }
267}
268
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800269// Compute the horizontal frequency components' energy in a frame
270// by calculuating the 16x4 Horizontal DCT. This is to be used to
271// decide the superresolution parameters.
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800272void analyze_hor_freq(const AV1_COMP *cpi, double *energy) {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800273 uint64_t freq_energy[16] = { 0 };
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800274 const YV12_BUFFER_CONFIG *buf = cpi->source;
275 const int bd = cpi->td.mb.e_mbd.bd;
276 const int width = buf->y_crop_width;
277 const int height = buf->y_crop_height;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800278 DECLARE_ALIGNED(16, int32_t, coeff[16 * 4]);
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800279 int n = 0;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800280 memset(freq_energy, 0, sizeof(freq_energy));
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800281 if (buf->flags & YV12_FLAG_HIGHBITDEPTH) {
282 const int16_t *src16 = (const int16_t *)CONVERT_TO_SHORTPTR(buf->y_buffer);
283 for (int i = 0; i < height - 4; i += 4) {
284 for (int j = 0; j < width - 16; j += 16) {
285 av1_fwd_txfm2d_16x4(src16 + i * buf->y_stride + j, coeff, buf->y_stride,
286 H_DCT, bd);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800287 for (int k = 1; k < 16; ++k) {
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800288 const uint64_t this_energy =
289 ((int64_t)coeff[k] * coeff[k]) +
290 ((int64_t)coeff[k + 16] * coeff[k + 16]) +
291 ((int64_t)coeff[k + 32] * coeff[k + 32]) +
292 ((int64_t)coeff[k + 48] * coeff[k + 48]);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800293 freq_energy[k] += ROUND_POWER_OF_TWO(this_energy, 2 + 2 * (bd - 8));
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800294 }
295 n++;
296 }
297 }
298 } else {
Debargha Mukherjeeac28c722018-11-14 22:09:46 -0800299 assert(bd == 8);
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800300 DECLARE_ALIGNED(16, int16_t, src16[16 * 4]);
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800301 for (int i = 0; i < height - 4; i += 4) {
302 for (int j = 0; j < width - 16; j += 16) {
303 for (int ii = 0; ii < 4; ++ii)
304 for (int jj = 0; jj < 16; ++jj)
305 src16[ii * 16 + jj] =
306 buf->y_buffer[(i + ii) * buf->y_stride + (j + jj)];
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800307 av1_fwd_txfm2d_16x4(src16, coeff, 16, H_DCT, bd);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800308 for (int k = 1; k < 16; ++k) {
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800309 const uint64_t this_energy =
310 ((int64_t)coeff[k] * coeff[k]) +
311 ((int64_t)coeff[k + 16] * coeff[k + 16]) +
312 ((int64_t)coeff[k + 32] * coeff[k + 32]) +
313 ((int64_t)coeff[k + 48] * coeff[k + 48]);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800314 freq_energy[k] += ROUND_POWER_OF_TWO(this_energy, 2);
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800315 }
316 n++;
317 }
318 }
319 }
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800320 if (n) {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800321 for (int k = 1; k < 16; ++k) energy[k] = (double)freq_energy[k] / n;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800322 // Convert to cumulative energy
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800323 for (int k = 14; k > 0; --k) energy[k] += energy[k + 1];
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800324 } else {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800325 for (int k = 1; k < 16; ++k) energy[k] = 1e+20;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800326 }
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800327}
328
Yaowu Xu45295c32018-03-29 12:06:10 -0700329static void set_high_precision_mv(AV1_COMP *cpi, int allow_high_precision_mv,
330 int cur_frame_force_integer_mv) {
James Zern01a9d702017-08-25 19:09:33 +0000331 MACROBLOCK *const mb = &cpi->td.mb;
Hui Su50361152018-03-02 11:01:42 -0800332 cpi->common.allow_high_precision_mv =
333 allow_high_precision_mv && cur_frame_force_integer_mv == 0;
Rupert Swarbricka84faf22017-12-11 13:56:40 +0000334 const int copy_hp =
335 cpi->common.allow_high_precision_mv && cur_frame_force_integer_mv == 0;
Jingning Hanf050fc12018-03-09 14:53:33 -0800336 int *(*src)[2] = copy_hp ? &mb->nmvcost_hp : &mb->nmvcost;
337 mb->mv_cost_stack = *src;
James Zern01a9d702017-08-25 19:09:33 +0000338}
339
Yaowu Xuf883b422016-08-30 14:01:10 -0700340static BLOCK_SIZE select_sb_size(const AV1_COMP *const cpi) {
Urvang Joshie4530f82018-01-09 11:43:37 -0800341 const AV1_COMMON *const cm = &cpi->common;
342
Yaowu Xuf883b422016-08-30 14:01:10 -0700343 if (cpi->oxcf.superblock_size == AOM_SUPERBLOCK_SIZE_64X64)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700344 return BLOCK_64X64;
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100345#if CONFIG_FILEOPTIONS
Urvang Joshie4530f82018-01-09 11:43:37 -0800346 if (cm->options && cm->options->ext_partition)
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100347#endif
348 if (cpi->oxcf.superblock_size == AOM_SUPERBLOCK_SIZE_128X128)
349 return BLOCK_128X128;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700350
Yaowu Xuf883b422016-08-30 14:01:10 -0700351 assert(cpi->oxcf.superblock_size == AOM_SUPERBLOCK_SIZE_DYNAMIC);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700352
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100353// TODO(any): Possibly could improve this with a heuristic.
354#if CONFIG_FILEOPTIONS
Urvang Joshie4530f82018-01-09 11:43:37 -0800355 if (cm->options && !cm->options->ext_partition) return BLOCK_64X64;
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100356#endif
Urvang Joshie4530f82018-01-09 11:43:37 -0800357
Urvang Joshiaab74432018-06-01 12:06:22 -0700358 // When superres / resize is on, 'cm->width / height' can change between
359 // calls, so we don't apply this heuristic there. Also, this heuristic gives
360 // compression gain for speed >= 2 only.
361 if (cpi->oxcf.superres_mode == SUPERRES_NONE &&
362 cpi->oxcf.resize_mode == RESIZE_NONE && cpi->oxcf.speed >= 2) {
Urvang Joshie4530f82018-01-09 11:43:37 -0800363 return (cm->width >= 480 && cm->height >= 360) ? BLOCK_128X128
364 : BLOCK_64X64;
365 }
366
Yaowu Xuc27fc142016-08-22 16:08:15 -0700367 return BLOCK_128X128;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700368}
369
David Turner99e990e2018-12-10 12:54:26 +0000370static int get_current_frame_ref_type(const AV1_COMP *const cpi) {
371 const AV1_COMMON *const cm = &cpi->common;
372 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
373 // We choose the reference "type" of this frame from the flags which indicate
374 // which reference frames will be refreshed by it. More than one of these
375 // flags may be set, so the order here implies an order of precedence.
376
377 if (frame_is_intra_only(cm) || cm->error_resilient_mode ||
378 cm->force_primary_ref_none)
379 return REGULAR_FRAME;
380 else if (gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE)
381 return EXT_ARF_FRAME;
382 else if (cpi->refresh_alt_ref_frame)
383 return ARF_FRAME;
384 else if (cpi->rc.is_src_frame_alt_ref)
385 return OVERLAY_FRAME;
386 else if (cpi->refresh_golden_frame)
387 return GLD_FRAME;
388 else if (cpi->refresh_bwd_ref_frame)
389 return BRF_FRAME;
390 else
391 return REGULAR_FRAME;
392}
393
Yaowu Xuf883b422016-08-30 14:01:10 -0700394static void setup_frame(AV1_COMP *cpi) {
395 AV1_COMMON *const cm = &cpi->common;
Johannb0ef6ff2018-02-08 14:32:21 -0800396 // Set up entropy context depending on frame type. The decoder mandates
397 // the use of the default context, index 0, for keyframes and inter
398 // frames where the error_resilient_mode or intra_only flag is set. For
399 // other inter-frames the encoder currently uses only two contexts;
400 // context 1 for ALTREF frames and context 0 for the others.
Soo-Chul Han85e8c792018-01-21 01:58:15 -0500401
Thomas Daede51020e12017-12-14 20:12:44 -0800402 cm->primary_ref_frame = PRIMARY_REF_NONE;
Sarah Parker50b6d6e2018-04-11 19:21:54 -0700403 if (frame_is_intra_only(cm) || cm->error_resilient_mode ||
404 cm->force_primary_ref_none) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700405 av1_setup_past_independence(cm);
Thomas Daede51020e12017-12-14 20:12:44 -0800406 for (int i = 0; i < REF_FRAMES; i++) {
David Turner99e990e2018-12-10 12:54:26 +0000407 cpi->fb_of_context_type[i] = -1;
Thomas Daede51020e12017-12-14 20:12:44 -0800408 }
David Turner99e990e2018-12-10 12:54:26 +0000409 cpi->fb_of_context_type[REGULAR_FRAME] =
David Turnera21966b2018-12-05 14:48:49 +0000410 cm->show_frame ? get_ref_frame_map_idx(cm, GOLDEN_FRAME)
411 : get_ref_frame_map_idx(cm, ALTREF_FRAME);
Yunqing Wang19aefd12018-05-14 15:38:57 -0700412 } else {
David Turner99e990e2018-12-10 12:54:26 +0000413 int wanted_fb = cpi->fb_of_context_type[get_current_frame_ref_type(cpi)];
Thomas Daede51020e12017-12-14 20:12:44 -0800414 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
David Turnera21966b2018-12-05 14:48:49 +0000415 int fb = get_ref_frame_map_idx(cm, ref_frame);
Thomas Daede51020e12017-12-14 20:12:44 -0800416 if (fb == wanted_fb) {
417 cm->primary_ref_frame = ref_frame - LAST_FRAME;
418 }
419 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700420 }
421
David Turnerd2a592e2018-11-16 14:59:31 +0000422 if (cm->current_frame.frame_type == KEY_FRAME && cm->show_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700423 cpi->refresh_golden_frame = 1;
424 cpi->refresh_alt_ref_frame = 1;
Yunqing Wang9538e4d2019-01-07 18:28:08 +0000425 av1_zero(cpi->interp_filter_selected);
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000426 set_sb_size(&cm->seq_params, select_sb_size(cpi));
Tarek AMARAc9813852018-03-05 18:40:18 -0500427 } else if (frame_is_sframe(cm)) {
428 cpi->refresh_golden_frame = 1;
429 cpi->refresh_alt_ref_frame = 1;
Yunqing Wang9538e4d2019-01-07 18:28:08 +0000430 av1_zero(cpi->interp_filter_selected);
Tarek AMARAc9813852018-03-05 18:40:18 -0500431 set_sb_size(&cm->seq_params, select_sb_size(cpi));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700432 } else {
David Turnera21966b2018-12-05 14:48:49 +0000433 const RefCntBuffer *const primary_ref_buf = get_primary_ref_frame_buf(cm);
434 if (primary_ref_buf == NULL) {
David Barkercc615a82018-03-19 14:38:51 +0000435 av1_setup_past_independence(cm);
436 cm->seg.update_map = 1;
437 cm->seg.update_data = 1;
Thomas Daededa4d8b92017-06-05 15:44:14 -0700438 } else {
David Turnera21966b2018-12-05 14:48:49 +0000439 *cm->fc = primary_ref_buf->frame_context;
Thomas Daededa4d8b92017-06-05 15:44:14 -0700440 }
Yunqing Wang9538e4d2019-01-07 18:28:08 +0000441 av1_zero(cpi->interp_filter_selected[0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700442 }
443
David Turnera21966b2018-12-05 14:48:49 +0000444 cm->prev_frame = get_primary_ref_frame_buf(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700445 cpi->vaq_refresh = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700446}
447
Cheng Chen46f30c72017-09-07 11:13:33 -0700448static void enc_setup_mi(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700449 int i;
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530450 int mi_rows_sb_aligned = calc_mi_size(cm->mi_rows);
Yunqing Wang19b9f722018-02-20 16:22:01 -0800451 cm->mi = cm->mip;
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530452 memset(cm->mip, 0, cm->mi_stride * mi_rows_sb_aligned * sizeof(*cm->mip));
Yunqing Wang19b9f722018-02-20 16:22:01 -0800453 cm->prev_mi = cm->prev_mip;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700454 // Clear top border row
455 memset(cm->prev_mip, 0, sizeof(*cm->prev_mip) * cm->mi_stride);
456 // Clear left border column
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530457 for (i = 0; i < mi_rows_sb_aligned; ++i)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700458 memset(&cm->prev_mip[i * cm->mi_stride], 0, sizeof(*cm->prev_mip));
Yunqing Wang19b9f722018-02-20 16:22:01 -0800459 cm->mi_grid_visible = cm->mi_grid_base;
460 cm->prev_mi_grid_visible = cm->prev_mi_grid_base;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700461
462 memset(cm->mi_grid_base, 0,
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530463 cm->mi_stride * mi_rows_sb_aligned * sizeof(*cm->mi_grid_base));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700464}
465
Cheng Chen46f30c72017-09-07 11:13:33 -0700466static int enc_alloc_mi(AV1_COMMON *cm, int mi_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700467 cm->mip = aom_calloc(mi_size, sizeof(*cm->mip));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700468 if (!cm->mip) return 1;
Yaowu Xuf883b422016-08-30 14:01:10 -0700469 cm->prev_mip = aom_calloc(mi_size, sizeof(*cm->prev_mip));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700470 if (!cm->prev_mip) return 1;
471 cm->mi_alloc_size = mi_size;
472
Yue Chen53b53f02018-03-29 14:31:23 -0700473 cm->mi_grid_base =
474 (MB_MODE_INFO **)aom_calloc(mi_size, sizeof(MB_MODE_INFO *));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700475 if (!cm->mi_grid_base) return 1;
476 cm->prev_mi_grid_base =
Yue Chen53b53f02018-03-29 14:31:23 -0700477 (MB_MODE_INFO **)aom_calloc(mi_size, sizeof(MB_MODE_INFO *));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700478 if (!cm->prev_mi_grid_base) return 1;
479
480 return 0;
481}
482
Cheng Chen46f30c72017-09-07 11:13:33 -0700483static void enc_free_mi(AV1_COMMON *cm) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700484 aom_free(cm->mip);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700485 cm->mip = NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -0700486 aom_free(cm->prev_mip);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700487 cm->prev_mip = NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -0700488 aom_free(cm->mi_grid_base);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700489 cm->mi_grid_base = NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -0700490 aom_free(cm->prev_mi_grid_base);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700491 cm->prev_mi_grid_base = NULL;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -0700492 cm->mi_alloc_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700493}
494
Cheng Chen46f30c72017-09-07 11:13:33 -0700495static void swap_mi_and_prev_mi(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700496 // Current mip will be the prev_mip for the next frame.
Yue Chen53b53f02018-03-29 14:31:23 -0700497 MB_MODE_INFO **temp_base = cm->prev_mi_grid_base;
498 MB_MODE_INFO *temp = cm->prev_mip;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700499 cm->prev_mip = cm->mip;
500 cm->mip = temp;
501
502 // Update the upper left visible macroblock ptrs.
Yunqing Wang19b9f722018-02-20 16:22:01 -0800503 cm->mi = cm->mip;
504 cm->prev_mi = cm->prev_mip;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700505
506 cm->prev_mi_grid_base = cm->mi_grid_base;
507 cm->mi_grid_base = temp_base;
Yunqing Wang19b9f722018-02-20 16:22:01 -0800508 cm->mi_grid_visible = cm->mi_grid_base;
509 cm->prev_mi_grid_visible = cm->prev_mi_grid_base;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700510}
511
Yaowu Xuf883b422016-08-30 14:01:10 -0700512void av1_initialize_enc(void) {
Wan-Teh Chang3cac4542018-06-29 10:21:39 -0700513 av1_rtcd();
514 aom_dsp_rtcd();
515 aom_scale_rtcd();
516 av1_init_intra_predictors();
517 av1_init_me_luts();
518 av1_rc_init_minq_luts();
519 av1_init_wedge_masks();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700520}
521
Debargha Mukherjeeccb27262017-09-25 14:19:46 -0700522static void dealloc_context_buffers_ext(AV1_COMP *cpi) {
523 if (cpi->mbmi_ext_base) {
524 aom_free(cpi->mbmi_ext_base);
525 cpi->mbmi_ext_base = NULL;
526 }
527}
528
529static void alloc_context_buffers_ext(AV1_COMP *cpi) {
530 AV1_COMMON *cm = &cpi->common;
531 int mi_size = cm->mi_cols * cm->mi_rows;
532
533 dealloc_context_buffers_ext(cpi);
534 CHECK_MEM_ERROR(cm, cpi->mbmi_ext_base,
535 aom_calloc(mi_size, sizeof(*cpi->mbmi_ext_base)));
536}
537
Yaowu Xuc0ea2582019-01-15 10:17:16 -0800538static void reset_film_grain_chroma_params(aom_film_grain_t *pars) {
539 pars->num_cr_points = 0;
540 pars->cr_mult = 0;
541 pars->cr_luma_mult = 0;
542 memset(pars->scaling_points_cr, 0, sizeof(pars->scaling_points_cr));
543 memset(pars->ar_coeffs_cr, 0, sizeof(pars->ar_coeffs_cr));
544 pars->num_cb_points = 0;
545 pars->cb_mult = 0;
546 pars->cb_luma_mult = 0;
Yaowu Xufda7dcb2019-01-16 13:04:33 -0800547 pars->chroma_scaling_from_luma = 0;
Yaowu Xuc0ea2582019-01-15 10:17:16 -0800548 memset(pars->scaling_points_cb, 0, sizeof(pars->scaling_points_cb));
549 memset(pars->ar_coeffs_cb, 0, sizeof(pars->ar_coeffs_cb));
550}
551
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800552static void update_film_grain_parameters(struct AV1_COMP *cpi,
553 const AV1EncoderConfig *oxcf) {
554 AV1_COMMON *const cm = &cpi->common;
555 cpi->oxcf = *oxcf;
556
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700557 if (cpi->film_grain_table) {
558 aom_film_grain_table_free(cpi->film_grain_table);
559 aom_free(cpi->film_grain_table);
560 cpi->film_grain_table = NULL;
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700561 }
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700562
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800563 if (oxcf->film_grain_test_vector) {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -0700564 cm->seq_params.film_grain_params_present = 1;
David Turnerd2a592e2018-11-16 14:59:31 +0000565 if (cm->current_frame.frame_type == KEY_FRAME) {
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800566 memcpy(&cm->film_grain_params,
567 film_grain_test_vectors + oxcf->film_grain_test_vector - 1,
568 sizeof(cm->film_grain_params));
Yaowu Xuc0ea2582019-01-15 10:17:16 -0800569 if (oxcf->monochrome)
570 reset_film_grain_chroma_params(&cm->film_grain_params);
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700571 cm->film_grain_params.bit_depth = cm->seq_params.bit_depth;
572 if (cm->seq_params.color_range == AOM_CR_FULL_RANGE) {
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800573 cm->film_grain_params.clip_to_restricted_range = 0;
574 }
575 }
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700576 } else if (oxcf->film_grain_table_filename) {
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700577 cpi->film_grain_table = aom_malloc(sizeof(*cpi->film_grain_table));
578 memset(cpi->film_grain_table, 0, sizeof(aom_film_grain_table_t));
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700579
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700580 aom_film_grain_table_read(cpi->film_grain_table,
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700581 oxcf->film_grain_table_filename, &cm->error);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800582 } else {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -0700583 cm->seq_params.film_grain_params_present = 0;
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800584 memset(&cm->film_grain_params, 0, sizeof(cm->film_grain_params));
585 }
586}
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800587
Yaowu Xuf883b422016-08-30 14:01:10 -0700588static void dealloc_compressor_data(AV1_COMP *cpi) {
589 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000590 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700591
Debargha Mukherjeeccb27262017-09-25 14:19:46 -0700592 dealloc_context_buffers_ext(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700593
Yaowu Xuf883b422016-08-30 14:01:10 -0700594 aom_free(cpi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700595 cpi->tile_data = NULL;
596
597 // Delete sementation map
Yaowu Xuf883b422016-08-30 14:01:10 -0700598 aom_free(cpi->segmentation_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700599 cpi->segmentation_map = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700600
Yaowu Xuf883b422016-08-30 14:01:10 -0700601 av1_cyclic_refresh_free(cpi->cyclic_refresh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700602 cpi->cyclic_refresh = NULL;
603
Yaowu Xuf883b422016-08-30 14:01:10 -0700604 aom_free(cpi->active_map.map);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700605 cpi->active_map.map = NULL;
606
Jingning Hand064cf02017-06-01 10:00:39 -0700607 aom_free(cpi->td.mb.above_pred_buf);
608 cpi->td.mb.above_pred_buf = NULL;
609
610 aom_free(cpi->td.mb.left_pred_buf);
611 cpi->td.mb.left_pred_buf = NULL;
612
613 aom_free(cpi->td.mb.wsrc_buf);
614 cpi->td.mb.wsrc_buf = NULL;
615
Ravi Chaudhary5d970f42018-09-25 11:25:32 +0530616#if CONFIG_COLLECT_INTER_MODE_RD_STATS
617 aom_free(cpi->td.mb.inter_modes_info);
618 cpi->td.mb.inter_modes_info = NULL;
619#endif
620
Ravi Chaudhary783d6a32018-08-28 18:21:02 +0530621 for (int i = 0; i < 2; i++)
622 for (int j = 0; j < 2; j++) {
623 aom_free(cpi->td.mb.hash_value_buffer[i][j]);
624 cpi->td.mb.hash_value_buffer[i][j] = NULL;
625 }
Jingning Hand064cf02017-06-01 10:00:39 -0700626 aom_free(cpi->td.mb.mask_buf);
627 cpi->td.mb.mask_buf = NULL;
Jingning Hand064cf02017-06-01 10:00:39 -0700628
Jingning Han6cc1fd32017-10-13 09:05:36 -0700629 aom_free(cm->tpl_mvs);
630 cm->tpl_mvs = NULL;
Jingning Han6cc1fd32017-10-13 09:05:36 -0700631
Yaowu Xuf883b422016-08-30 14:01:10 -0700632 av1_free_ref_frame_buffers(cm->buffer_pool);
Angie Chiangf0fbf9d2017-03-15 15:01:22 -0700633 av1_free_txb_buf(cpi);
Yaowu Xuf883b422016-08-30 14:01:10 -0700634 av1_free_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700635
Yaowu Xuf883b422016-08-30 14:01:10 -0700636 aom_free_frame_buffer(&cpi->last_frame_uf);
Yaowu Xuf883b422016-08-30 14:01:10 -0700637 av1_free_restoration_buffers(cm);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800638 aom_free_frame_buffer(&cpi->trial_frame_rst);
Yaowu Xuf883b422016-08-30 14:01:10 -0700639 aom_free_frame_buffer(&cpi->scaled_source);
640 aom_free_frame_buffer(&cpi->scaled_last_source);
641 aom_free_frame_buffer(&cpi->alt_ref_buffer);
642 av1_lookahead_destroy(cpi->lookahead);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700643
Yaowu Xuf883b422016-08-30 14:01:10 -0700644 aom_free(cpi->tile_tok[0][0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700645 cpi->tile_tok[0][0] = 0;
646
Ravi Chaudhary73cf15b2018-08-30 10:52:51 +0530647 aom_free(cpi->tplist[0][0]);
648 cpi->tplist[0][0] = NULL;
649
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000650 av1_free_pc_tree(&cpi->td, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700651
hui sud9a812b2017-07-06 14:34:37 -0700652 aom_free(cpi->td.mb.palette_buffer);
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700653
Urvang Joshi0a4cfad2018-09-07 11:10:39 -0700654 aom_free(cpi->td.mb.tmp_conv_dst);
655 for (int j = 0; j < 2; ++j) {
656 aom_free(cpi->td.mb.tmp_obmc_bufs[j]);
657 }
658
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700659#if CONFIG_DENOISE
660 if (cpi->denoise_and_model) {
661 aom_denoise_and_model_free(cpi->denoise_and_model);
662 cpi->denoise_and_model = NULL;
663 }
664#endif
665 if (cpi->film_grain_table) {
666 aom_film_grain_table_free(cpi->film_grain_table);
667 cpi->film_grain_table = NULL;
668 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700669}
670
Yaowu Xuf883b422016-08-30 14:01:10 -0700671static void save_coding_context(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700672 CODING_CONTEXT *const cc = &cpi->coding_context;
Yaowu Xuf883b422016-08-30 14:01:10 -0700673 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700674
Sebastien Alaiwane140c502017-04-27 09:52:34 +0200675 // Stores a snapshot of key state variables which can subsequently be
676 // restored with a call to av1_restore_coding_context. These functions are
677 // intended for use in a re-code loop in av1_compress_frame where the
678 // quantizer value is adjusted between loop iterations.
Jingning Hanf050fc12018-03-09 14:53:33 -0800679 av1_copy(cc->nmv_vec_cost, cpi->td.mb.nmv_vec_cost);
680 av1_copy(cc->nmv_costs, cpi->nmv_costs);
681 av1_copy(cc->nmv_costs_hp, cpi->nmv_costs_hp);
James Zern01a9d702017-08-25 19:09:33 +0000682
Yaowu Xuc27fc142016-08-22 16:08:15 -0700683 cc->fc = *cm->fc;
684}
685
Yaowu Xuf883b422016-08-30 14:01:10 -0700686static void restore_coding_context(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700687 CODING_CONTEXT *const cc = &cpi->coding_context;
Yaowu Xuf883b422016-08-30 14:01:10 -0700688 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700689
Sebastien Alaiwane140c502017-04-27 09:52:34 +0200690 // Restore key state variables to the snapshot state stored in the
691 // previous call to av1_save_coding_context.
Jingning Hanf050fc12018-03-09 14:53:33 -0800692 av1_copy(cpi->td.mb.nmv_vec_cost, cc->nmv_vec_cost);
693 av1_copy(cpi->nmv_costs, cc->nmv_costs);
694 av1_copy(cpi->nmv_costs_hp, cc->nmv_costs_hp);
James Zern01a9d702017-08-25 19:09:33 +0000695
Yaowu Xuc27fc142016-08-22 16:08:15 -0700696 *cm->fc = cc->fc;
697}
698
Yaowu Xuf883b422016-08-30 14:01:10 -0700699static void configure_static_seg_features(AV1_COMP *cpi) {
700 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700701 const RATE_CONTROL *const rc = &cpi->rc;
702 struct segmentation *const seg = &cm->seg;
703
704 int high_q = (int)(rc->avg_q > 48.0);
705 int qi_delta;
706
707 // Disable and clear down for KF
David Turnerd2a592e2018-11-16 14:59:31 +0000708 if (cm->current_frame.frame_type == KEY_FRAME) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700709 // Clear down the global segmentation map
710 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
711 seg->update_map = 0;
712 seg->update_data = 0;
713 cpi->static_mb_pct = 0;
714
715 // Disable segmentation
Yaowu Xuf883b422016-08-30 14:01:10 -0700716 av1_disable_segmentation(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700717
718 // Clear down the segment features.
Yaowu Xuf883b422016-08-30 14:01:10 -0700719 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700720 } else if (cpi->refresh_alt_ref_frame) {
721 // If this is an alt ref frame
722 // Clear down the global segmentation map
723 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
724 seg->update_map = 0;
725 seg->update_data = 0;
726 cpi->static_mb_pct = 0;
727
728 // Disable segmentation and individual segment features by default
Yaowu Xuf883b422016-08-30 14:01:10 -0700729 av1_disable_segmentation(seg);
730 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700731
732 // Scan frames from current to arf frame.
733 // This function re-enables segmentation if appropriate.
Yaowu Xuf883b422016-08-30 14:01:10 -0700734 av1_update_mbgraph_stats(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700735
736 // If segmentation was enabled set those features needed for the
737 // arf itself.
738 if (seg->enabled) {
739 seg->update_map = 1;
740 seg->update_data = 1;
741
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700742 qi_delta = av1_compute_qdelta(rc, rc->avg_q, rc->avg_q * 0.875,
743 cm->seq_params.bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700744 av1_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta - 2);
Cheng Chend8184da2017-09-26 18:15:22 -0700745 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_H, -2);
746 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_V, -2);
747 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_U, -2);
748 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_V, -2);
749
750 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_H);
751 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_V);
752 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_U);
753 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_V);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700754
Yaowu Xuf883b422016-08-30 14:01:10 -0700755 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700756 }
757 } else if (seg->enabled) {
758 // All other frames if segmentation has been enabled
759
760 // First normal frame in a valid gf or alt ref group
761 if (rc->frames_since_golden == 0) {
762 // Set up segment features for normal frames in an arf group
763 if (rc->source_alt_ref_active) {
764 seg->update_map = 0;
765 seg->update_data = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700766
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700767 qi_delta = av1_compute_qdelta(rc, rc->avg_q, rc->avg_q * 1.125,
768 cm->seq_params.bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700769 av1_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta + 2);
770 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700771
Cheng Chend8184da2017-09-26 18:15:22 -0700772 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_H, -2);
773 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_V, -2);
774 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_U, -2);
775 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_V, -2);
776
777 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_H);
778 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_V);
779 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_U);
780 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_V);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700781
782 // Segment coding disabled for compred testing
783 if (high_q || (cpi->static_mb_pct == 100)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700784 av1_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
785 av1_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
786 av1_enable_segfeature(seg, 1, SEG_LVL_SKIP);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700787 }
788 } else {
789 // Disable segmentation and clear down features if alt ref
790 // is not active for this group
791
Yaowu Xuf883b422016-08-30 14:01:10 -0700792 av1_disable_segmentation(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700793
794 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
795
796 seg->update_map = 0;
797 seg->update_data = 0;
798
Yaowu Xuf883b422016-08-30 14:01:10 -0700799 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700800 }
801 } else if (rc->is_src_frame_alt_ref) {
802 // Special case where we are coding over the top of a previous
803 // alt ref frame.
804 // Segment coding disabled for compred testing
805
806 // Enable ref frame features for segment 0 as well
Yaowu Xuf883b422016-08-30 14:01:10 -0700807 av1_enable_segfeature(seg, 0, SEG_LVL_REF_FRAME);
808 av1_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700809
810 // All mbs should use ALTREF_FRAME
Yaowu Xuf883b422016-08-30 14:01:10 -0700811 av1_clear_segdata(seg, 0, SEG_LVL_REF_FRAME);
812 av1_set_segdata(seg, 0, SEG_LVL_REF_FRAME, ALTREF_FRAME);
813 av1_clear_segdata(seg, 1, SEG_LVL_REF_FRAME);
814 av1_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700815
816 // Skip all MBs if high Q (0,0 mv and skip coeffs)
817 if (high_q) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700818 av1_enable_segfeature(seg, 0, SEG_LVL_SKIP);
819 av1_enable_segfeature(seg, 1, SEG_LVL_SKIP);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700820 }
821 // Enable data update
822 seg->update_data = 1;
823 } else {
824 // All other frames.
825
826 // No updates.. leave things as they are.
827 seg->update_map = 0;
828 seg->update_data = 0;
829 }
830 }
831}
832
Yaowu Xuf883b422016-08-30 14:01:10 -0700833static void update_reference_segmentation_map(AV1_COMP *cpi) {
834 AV1_COMMON *const cm = &cpi->common;
Yushin Choa7f65922018-04-04 16:06:11 -0700835 MB_MODE_INFO **mi_4x4_ptr = cm->mi_grid_visible;
David Turnerb757ce02018-11-12 15:01:28 +0000836 uint8_t *cache_ptr = cm->cur_frame->seg_map;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700837 int row, col;
838
839 for (row = 0; row < cm->mi_rows; row++) {
Yushin Choa7f65922018-04-04 16:06:11 -0700840 MB_MODE_INFO **mi_4x4 = mi_4x4_ptr;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700841 uint8_t *cache = cache_ptr;
Yushin Choa7f65922018-04-04 16:06:11 -0700842 for (col = 0; col < cm->mi_cols; col++, mi_4x4++, cache++)
843 cache[0] = mi_4x4[0]->segment_id;
844 mi_4x4_ptr += cm->mi_stride;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700845 cache_ptr += cm->mi_cols;
846 }
847}
848
Yaowu Xuf883b422016-08-30 14:01:10 -0700849static void alloc_raw_frame_buffers(AV1_COMP *cpi) {
850 AV1_COMMON *cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700851 const SequenceHeader *const seq_params = &cm->seq_params;
Yaowu Xuf883b422016-08-30 14:01:10 -0700852 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700853
854 if (!cpi->lookahead)
Satish Kumar Suman29909962019-01-09 10:31:21 +0530855 cpi->lookahead = av1_lookahead_init(
856 oxcf->width, oxcf->height, seq_params->subsampling_x,
857 seq_params->subsampling_y, seq_params->use_highbitdepth,
858 oxcf->lag_in_frames, oxcf->border_in_pixels);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700859 if (!cpi->lookahead)
Yaowu Xuf883b422016-08-30 14:01:10 -0700860 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700861 "Failed to allocate lag buffers");
862
863 // TODO(agrange) Check if ARF is enabled and skip allocation if not.
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700864 if (aom_realloc_frame_buffer(
865 &cpi->alt_ref_buffer, oxcf->width, oxcf->height,
866 seq_params->subsampling_x, seq_params->subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530867 seq_params->use_highbitdepth, oxcf->border_in_pixels,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700868 cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700869 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700870 "Failed to allocate altref buffer");
871}
872
Yaowu Xuf883b422016-08-30 14:01:10 -0700873static void alloc_util_frame_buffers(AV1_COMP *cpi) {
874 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700875 const SequenceHeader *const seq_params = &cm->seq_params;
876 if (aom_realloc_frame_buffer(
877 &cpi->last_frame_uf, cm->width, cm->height, seq_params->subsampling_x,
878 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530879 cpi->oxcf.border_in_pixels, cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700880 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700881 "Failed to allocate last frame buffer");
882
Fergus Simpson9cd57cf2017-06-12 17:02:03 -0700883 if (aom_realloc_frame_buffer(
Debargha Mukherjee3a4959f2018-02-26 15:34:03 -0800884 &cpi->trial_frame_rst, cm->superres_upscaled_width,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700885 cm->superres_upscaled_height, seq_params->subsampling_x,
886 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman3b12c002018-12-19 15:27:20 +0530887 AOM_RESTORATION_FRAME_BORDER, cm->byte_alignment, NULL, NULL, NULL))
Debargha Mukherjee874d36d2016-12-14 16:53:17 -0800888 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800889 "Failed to allocate trial restored frame buffer");
Yaowu Xuc27fc142016-08-22 16:08:15 -0700890
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700891 if (aom_realloc_frame_buffer(
892 &cpi->scaled_source, cm->width, cm->height, seq_params->subsampling_x,
893 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530894 cpi->oxcf.border_in_pixels, cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700895 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700896 "Failed to allocate scaled source buffer");
897
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700898 if (aom_realloc_frame_buffer(
899 &cpi->scaled_last_source, cm->width, cm->height,
900 seq_params->subsampling_x, seq_params->subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530901 seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700902 cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700903 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700904 "Failed to allocate scaled last source buffer");
905}
906
Cheng Chen46f30c72017-09-07 11:13:33 -0700907static void alloc_compressor_data(AV1_COMP *cpi) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700908 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000909 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700910
Yaowu Xuf883b422016-08-30 14:01:10 -0700911 av1_alloc_context_buffers(cm, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700912
Ravi Chaudhary73cf15b2018-08-30 10:52:51 +0530913 int mi_rows_aligned_to_sb =
914 ALIGN_POWER_OF_TWO(cm->mi_rows, cm->seq_params.mib_size_log2);
915 int sb_rows = mi_rows_aligned_to_sb >> cm->seq_params.mib_size_log2;
916
Angie Chiangf0fbf9d2017-03-15 15:01:22 -0700917 av1_alloc_txb_buf(cpi);
Angie Chiangf0fbf9d2017-03-15 15:01:22 -0700918
Yaowu Xuc27fc142016-08-22 16:08:15 -0700919 alloc_context_buffers_ext(cpi);
920
Yaowu Xuf883b422016-08-30 14:01:10 -0700921 aom_free(cpi->tile_tok[0][0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700922
923 {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000924 unsigned int tokens =
925 get_token_alloc(cm->mb_rows, cm->mb_cols, MAX_SB_SIZE_LOG2, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700926 CHECK_MEM_ERROR(cm, cpi->tile_tok[0][0],
Yaowu Xuf883b422016-08-30 14:01:10 -0700927 aom_calloc(tokens, sizeof(*cpi->tile_tok[0][0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700928 }
Ravi Chaudhary73cf15b2018-08-30 10:52:51 +0530929 aom_free(cpi->tplist[0][0]);
930
931 CHECK_MEM_ERROR(cm, cpi->tplist[0][0],
932 aom_calloc(sb_rows * MAX_TILE_ROWS * MAX_TILE_COLS,
933 sizeof(*cpi->tplist[0][0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700934
Yaowu Xuf883b422016-08-30 14:01:10 -0700935 av1_setup_pc_tree(&cpi->common, &cpi->td);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700936}
937
Yaowu Xuf883b422016-08-30 14:01:10 -0700938void av1_new_framerate(AV1_COMP *cpi, double framerate) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700939 cpi->framerate = framerate < 0.1 ? 30 : framerate;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700940 av1_rc_update_framerate(cpi, cpi->common.width, cpi->common.height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700941}
942
Yunqing Wang75e20e82018-06-16 12:10:48 -0700943static void set_tile_info(AV1_COMP *cpi) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200944 AV1_COMMON *const cm = &cpi->common;
Dominic Symesf58f1112017-09-25 12:47:40 +0200945 int i, start_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200946
947 av1_get_tile_limits(cm);
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200948
949 // configure tile columns
Dominic Symes26ad0b22017-10-01 16:35:13 +0200950 if (cpi->oxcf.tile_width_count == 0 || cpi->oxcf.tile_height_count == 0) {
Dominic Symesf58f1112017-09-25 12:47:40 +0200951 cm->uniform_tile_spacing_flag = 1;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200952 cm->log2_tile_cols = AOMMAX(cpi->oxcf.tile_columns, cm->min_log2_tile_cols);
953 cm->log2_tile_cols = AOMMIN(cm->log2_tile_cols, cm->max_log2_tile_cols);
Dominic Symesf58f1112017-09-25 12:47:40 +0200954 } else {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000955 int mi_cols = ALIGN_POWER_OF_TWO(cm->mi_cols, cm->seq_params.mib_size_log2);
956 int sb_cols = mi_cols >> cm->seq_params.mib_size_log2;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200957 int size_sb, j = 0;
Dominic Symesf58f1112017-09-25 12:47:40 +0200958 cm->uniform_tile_spacing_flag = 0;
959 for (i = 0, start_sb = 0; start_sb < sb_cols && i < MAX_TILE_COLS; i++) {
960 cm->tile_col_start_sb[i] = start_sb;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200961 size_sb = cpi->oxcf.tile_widths[j++];
962 if (j >= cpi->oxcf.tile_width_count) j = 0;
David Barker6cd5a822018-03-05 16:19:28 +0000963 start_sb += AOMMIN(size_sb, cm->max_tile_width_sb);
Dominic Symesf58f1112017-09-25 12:47:40 +0200964 }
965 cm->tile_cols = i;
966 cm->tile_col_start_sb[i] = sb_cols;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200967 }
968 av1_calculate_tile_cols(cm);
969
970 // configure tile rows
971 if (cm->uniform_tile_spacing_flag) {
972 cm->log2_tile_rows = AOMMAX(cpi->oxcf.tile_rows, cm->min_log2_tile_rows);
973 cm->log2_tile_rows = AOMMIN(cm->log2_tile_rows, cm->max_log2_tile_rows);
Dominic Symesf58f1112017-09-25 12:47:40 +0200974 } else {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000975 int mi_rows = ALIGN_POWER_OF_TWO(cm->mi_rows, cm->seq_params.mib_size_log2);
976 int sb_rows = mi_rows >> cm->seq_params.mib_size_log2;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200977 int size_sb, j = 0;
Dominic Symesf58f1112017-09-25 12:47:40 +0200978 for (i = 0, start_sb = 0; start_sb < sb_rows && i < MAX_TILE_ROWS; i++) {
979 cm->tile_row_start_sb[i] = start_sb;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200980 size_sb = cpi->oxcf.tile_heights[j++];
981 if (j >= cpi->oxcf.tile_height_count) j = 0;
982 start_sb += AOMMIN(size_sb, cm->max_tile_height_sb);
Dominic Symesf58f1112017-09-25 12:47:40 +0200983 }
984 cm->tile_rows = i;
985 cm->tile_row_start_sb[i] = sb_rows;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200986 }
987 av1_calculate_tile_rows(cm);
988}
989
Yaowu Xuf883b422016-08-30 14:01:10 -0700990static void update_frame_size(AV1_COMP *cpi) {
991 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700992 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
993
Yaowu Xuf883b422016-08-30 14:01:10 -0700994 av1_set_mb_mi(cm, cm->width, cm->height);
995 av1_init_context_buffers(cm);
Luc Trudeau1e84af52017-11-25 15:00:28 -0500996 av1_init_macroblockd(cm, xd, NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700997 memset(cpi->mbmi_ext_base, 0,
998 cm->mi_rows * cm->mi_cols * sizeof(*cpi->mbmi_ext_base));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700999 set_tile_info(cpi);
1000}
1001
Yaowu Xuf883b422016-08-30 14:01:10 -07001002static void init_buffer_indices(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001003 int fb_idx;
Zoe Liu5989a722018-03-29 13:37:36 -07001004 for (fb_idx = 0; fb_idx < REF_FRAMES; ++fb_idx)
David Turnera21966b2018-12-05 14:48:49 +00001005 cpi->common.remapped_ref_idx[fb_idx] = fb_idx;
RogerZhou3b635242017-09-19 10:06:46 -07001006 cpi->rate_index = 0;
1007 cpi->rate_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001008}
1009
Debargha Mukherjee57498692018-05-11 13:29:31 -07001010static INLINE int does_level_match(int width, int height, double fps,
1011 int lvl_width, int lvl_height,
1012 double lvl_fps, int lvl_dim_mult) {
1013 const int64_t lvl_luma_pels = lvl_width * lvl_height;
1014 const double lvl_display_sample_rate = lvl_luma_pels * lvl_fps;
1015 const int64_t luma_pels = width * height;
1016 const double display_sample_rate = luma_pels * fps;
1017 return luma_pels <= lvl_luma_pels &&
1018 display_sample_rate <= lvl_display_sample_rate &&
1019 width <= lvl_width * lvl_dim_mult &&
1020 height <= lvl_height * lvl_dim_mult;
1021}
1022
Andrey Norkin26495512018-06-20 17:13:11 -07001023static void set_bitstream_level_tier(SequenceHeader *seq, AV1_COMMON *cm,
Andrey Norkinf481d982018-05-15 12:05:31 -07001024 const AV1EncoderConfig *oxcf) {
Debargha Mukherjee57498692018-05-11 13:29:31 -07001025 // TODO(any): This is a placeholder function that only addresses dimensions
1026 // and max display sample rates.
1027 // Need to add checks for max bit rate, max decoded luma sample rate, header
1028 // rate, etc. that are not covered by this function.
Debargha Mukherjeeea675402018-05-10 16:10:41 -07001029 (void)oxcf;
Debargha Mukherjee57498692018-05-11 13:29:31 -07001030 BitstreamLevel bl = { 9, 3 };
1031 if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate, 512,
1032 288, 30.0, 4)) {
1033 bl.major = 2;
1034 bl.minor = 0;
1035 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1036 704, 396, 30.0, 4)) {
1037 bl.major = 2;
1038 bl.minor = 1;
1039 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1040 1088, 612, 30.0, 4)) {
1041 bl.major = 3;
1042 bl.minor = 0;
1043 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1044 1376, 774, 30.0, 4)) {
1045 bl.major = 3;
1046 bl.minor = 1;
1047 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1048 2048, 1152, 30.0, 3)) {
1049 bl.major = 4;
1050 bl.minor = 0;
1051 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1052 2048, 1152, 60.0, 3)) {
1053 bl.major = 4;
1054 bl.minor = 1;
1055 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1056 4096, 2176, 30.0, 2)) {
1057 bl.major = 5;
1058 bl.minor = 0;
1059 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1060 4096, 2176, 60.0, 2)) {
1061 bl.major = 5;
1062 bl.minor = 1;
1063 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1064 4096, 2176, 120.0, 2)) {
1065 bl.major = 5;
1066 bl.minor = 2;
1067 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1068 8192, 4352, 30.0, 2)) {
1069 bl.major = 6;
1070 bl.minor = 0;
1071 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1072 8192, 4352, 60.0, 2)) {
1073 bl.major = 6;
1074 bl.minor = 1;
1075 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1076 8192, 4352, 120.0, 2)) {
1077 bl.major = 6;
1078 bl.minor = 2;
1079 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1080 16384, 8704, 30.0, 2)) {
1081 bl.major = 7;
1082 bl.minor = 0;
1083 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1084 16384, 8704, 60.0, 2)) {
1085 bl.major = 7;
1086 bl.minor = 1;
1087 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1088 16384, 8704, 120.0, 2)) {
1089 bl.major = 7;
1090 bl.minor = 2;
1091 }
Debargha Mukherjeeea675402018-05-10 16:10:41 -07001092 for (int i = 0; i < MAX_NUM_OPERATING_POINTS; ++i) {
Debargha Mukherjee57498692018-05-11 13:29:31 -07001093 seq->level[i] = bl;
Andrey Norkinf481d982018-05-15 12:05:31 -07001094 seq->tier[i] = 0; // setting main tier by default
Andrey Norkin26495512018-06-20 17:13:11 -07001095 // Set the maximum parameters for bitrate and buffer size for this profile,
1096 // level, and tier
1097 cm->op_params[i].bitrate = max_level_bitrate(
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001098 cm->seq_params.profile, major_minor_to_seq_level_idx(seq->level[i]),
1099 seq->tier[i]);
Andrey Norkinc7511de2018-06-22 12:31:06 -07001100 // Level with seq_level_idx = 31 returns a high "dummy" bitrate to pass the
1101 // check
Andrey Norkin26495512018-06-20 17:13:11 -07001102 if (cm->op_params[i].bitrate == 0)
1103 aom_internal_error(
1104 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1105 "AV1 does not support this combination of profile, level, and tier.");
Andrey Norkinc7511de2018-06-22 12:31:06 -07001106 // Buffer size in bits/s is bitrate in bits/s * 1 s
Andrey Norkin26495512018-06-20 17:13:11 -07001107 cm->op_params[i].buffer_size = cm->op_params[i].bitrate;
Debargha Mukherjeeea675402018-05-10 16:10:41 -07001108 }
1109}
1110
Andrey Norkin26495512018-06-20 17:13:11 -07001111static void init_seq_coding_tools(SequenceHeader *seq, AV1_COMMON *cm,
1112 const AV1EncoderConfig *oxcf) {
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001113 seq->still_picture = (oxcf->limit == 1);
1114 seq->reduced_still_picture_hdr = seq->still_picture;
Debargha Mukherjee9713ccb2018-04-08 19:09:17 -07001115 seq->reduced_still_picture_hdr &= !oxcf->full_still_picture_hdr;
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001116 seq->force_screen_content_tools = 2;
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001117 seq->force_integer_mv = 2;
David Turnerebf96f42018-11-14 16:57:57 +00001118 seq->order_hint_info.enable_order_hint = oxcf->enable_order_hint;
David Turner936235c2018-11-28 13:42:01 +00001119 seq->frame_id_numbers_present_flag =
1120 !(seq->still_picture && seq->reduced_still_picture_hdr) &&
1121 !oxcf->large_scale_tile && oxcf->error_resilient_mode;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001122 if (seq->still_picture && seq->reduced_still_picture_hdr) {
David Turnerebf96f42018-11-14 16:57:57 +00001123 seq->order_hint_info.enable_order_hint = 0;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001124 seq->force_screen_content_tools = 2;
1125 seq->force_integer_mv = 2;
1126 }
David Turnerebf96f42018-11-14 16:57:57 +00001127 seq->order_hint_info.order_hint_bits_minus_1 =
1128 seq->order_hint_info.enable_order_hint
1129 ? DEFAULT_EXPLICIT_ORDER_HINT_BITS - 1
1130 : -1;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001131
David Turner760a2f42018-12-07 15:25:36 +00001132 seq->max_frame_width =
1133 oxcf->forced_max_frame_width ? oxcf->forced_max_frame_width : oxcf->width;
1134 seq->max_frame_height = oxcf->forced_max_frame_height
1135 ? oxcf->forced_max_frame_height
1136 : oxcf->height;
1137 seq->num_bits_width =
1138 (seq->max_frame_width > 1) ? get_msb(seq->max_frame_width - 1) + 1 : 1;
1139 seq->num_bits_height =
1140 (seq->max_frame_height > 1) ? get_msb(seq->max_frame_height - 1) + 1 : 1;
1141 assert(seq->num_bits_width <= 16);
1142 assert(seq->num_bits_height <= 16);
1143
1144 seq->frame_id_length = FRAME_ID_LENGTH;
1145 seq->delta_frame_id_length = DELTA_FRAME_ID_LENGTH;
1146
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001147 seq->enable_dual_filter = oxcf->enable_dual_filter;
Debargha Mukherjee7ac3eb12018-12-12 10:26:50 -08001148 seq->order_hint_info.enable_dist_wtd_comp = oxcf->enable_dist_wtd_comp;
1149 seq->order_hint_info.enable_dist_wtd_comp &=
David Turnerebf96f42018-11-14 16:57:57 +00001150 seq->order_hint_info.enable_order_hint;
1151 seq->order_hint_info.enable_ref_frame_mvs = oxcf->enable_ref_frame_mvs;
1152 seq->order_hint_info.enable_ref_frame_mvs &=
1153 seq->order_hint_info.enable_order_hint;
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001154 seq->enable_superres = oxcf->enable_superres;
1155 seq->enable_cdef = oxcf->enable_cdef;
1156 seq->enable_restoration = oxcf->enable_restoration;
Debargha Mukherjee37df9162018-03-25 12:48:24 -07001157 seq->enable_warped_motion = oxcf->enable_warped_motion;
Debargha Mukherjee16ea6ba2018-12-10 12:01:38 -08001158 seq->enable_interintra_compound = oxcf->enable_interintra_comp;
1159 seq->enable_masked_compound = oxcf->enable_masked_comp;
Debargha Mukherjee03c43ba2018-12-14 13:08:08 -08001160 seq->enable_intra_edge_filter = oxcf->enable_intra_edge_filter;
Yue Chen8f9ca582018-12-12 15:11:47 -08001161 seq->enable_filter_intra = oxcf->enable_filter_intra;
Debargha Mukherjee57498692018-05-11 13:29:31 -07001162
Andrey Norkin26495512018-06-20 17:13:11 -07001163 set_bitstream_level_tier(seq, cm, oxcf);
Adrian Grangec56f6ec2018-05-31 14:19:32 -07001164
1165 if (seq->operating_points_cnt_minus_1 == 0) {
1166 seq->operating_point_idc[0] = 0;
1167 } else {
1168 // Set operating_point_idc[] such that for the i-th operating point the
1169 // first (operating_points_cnt-i) spatial layers and the first temporal
1170 // layer are decoded Note that highest quality operating point should come
1171 // first
1172 for (int i = 0; i < seq->operating_points_cnt_minus_1 + 1; i++)
1173 seq->operating_point_idc[i] =
1174 (~(~0u << (seq->operating_points_cnt_minus_1 + 1 - i)) << 8) | 1;
1175 }
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001176}
1177
Yaowu Xuf883b422016-08-30 14:01:10 -07001178static void init_config(struct AV1_COMP *cpi, AV1EncoderConfig *oxcf) {
1179 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001180
1181 cpi->oxcf = *oxcf;
1182 cpi->framerate = oxcf->init_framerate;
1183
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001184 cm->seq_params.profile = oxcf->profile;
1185 cm->seq_params.bit_depth = oxcf->bit_depth;
1186 cm->seq_params.use_highbitdepth = oxcf->use_highbitdepth;
1187 cm->seq_params.color_primaries = oxcf->color_primaries;
1188 cm->seq_params.transfer_characteristics = oxcf->transfer_characteristics;
1189 cm->seq_params.matrix_coefficients = oxcf->matrix_coefficients;
Debargha Mukherjeef340fec2018-01-10 18:12:22 -08001190 cm->seq_params.monochrome = oxcf->monochrome;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001191 cm->seq_params.chroma_sample_position = oxcf->chroma_sample_position;
1192 cm->seq_params.color_range = oxcf->color_range;
Andrey Norkin28e9ce22018-01-08 10:11:21 -08001193 cm->timing_info_present = oxcf->timing_info_present;
Andrey Norkin795ba872018-03-06 13:24:14 -08001194 cm->timing_info.num_units_in_display_tick =
1195 oxcf->timing_info.num_units_in_display_tick;
1196 cm->timing_info.time_scale = oxcf->timing_info.time_scale;
1197 cm->timing_info.equal_picture_interval =
1198 oxcf->timing_info.equal_picture_interval;
1199 cm->timing_info.num_ticks_per_picture =
1200 oxcf->timing_info.num_ticks_per_picture;
1201
Andrey Norkin26495512018-06-20 17:13:11 -07001202 cm->seq_params.display_model_info_present_flag =
1203 oxcf->display_model_info_present_flag;
Adrian Grangec56f6ec2018-05-31 14:19:32 -07001204 cm->seq_params.decoder_model_info_present_flag =
1205 oxcf->decoder_model_info_present_flag;
Andrey Norkin795ba872018-03-06 13:24:14 -08001206 if (oxcf->decoder_model_info_present_flag) {
Andrey Norkin26495512018-06-20 17:13:11 -07001207 // set the decoder model parameters in schedule mode
Andrey Norkin795ba872018-03-06 13:24:14 -08001208 cm->buffer_model.num_units_in_decoding_tick =
1209 oxcf->buffer_model.num_units_in_decoding_tick;
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07001210 cm->buffer_removal_time_present = 1;
Andrey Norkin795ba872018-03-06 13:24:14 -08001211 set_aom_dec_model_info(&cm->buffer_model);
Andrey Norkin26495512018-06-20 17:13:11 -07001212 set_dec_model_op_parameters(&cm->op_params[0]);
1213 } else if (cm->timing_info_present &&
1214 cm->timing_info.equal_picture_interval &&
1215 !cm->seq_params.decoder_model_info_present_flag) {
1216 // set the decoder model parameters in resource availability mode
1217 set_resource_availability_parameters(&cm->op_params[0]);
Andrey Norkinc7511de2018-06-22 12:31:06 -07001218 } else {
1219 cm->op_params[0].initial_display_delay =
1220 10; // Default value (not signaled)
Andrey Norkin795ba872018-03-06 13:24:14 -08001221 }
Andrey Norkinc7511de2018-06-22 12:31:06 -07001222
Tom Fineganf8d6a162018-08-21 10:47:55 -07001223 if (cm->seq_params.monochrome) {
1224 cm->seq_params.subsampling_x = 1;
1225 cm->seq_params.subsampling_y = 1;
1226 } else if (cm->seq_params.color_primaries == AOM_CICP_CP_BT_709 &&
1227 cm->seq_params.transfer_characteristics == AOM_CICP_TC_SRGB &&
1228 cm->seq_params.matrix_coefficients == AOM_CICP_MC_IDENTITY) {
1229 cm->seq_params.subsampling_x = 0;
1230 cm->seq_params.subsampling_y = 0;
1231 } else {
1232 if (cm->seq_params.profile == 0) {
1233 cm->seq_params.subsampling_x = 1;
1234 cm->seq_params.subsampling_y = 1;
1235 } else if (cm->seq_params.profile == 1) {
1236 cm->seq_params.subsampling_x = 0;
1237 cm->seq_params.subsampling_y = 0;
1238 } else {
1239 if (cm->seq_params.bit_depth == AOM_BITS_12) {
1240 cm->seq_params.subsampling_x = oxcf->chroma_subsampling_x;
1241 cm->seq_params.subsampling_y = oxcf->chroma_subsampling_y;
1242 } else {
1243 cm->seq_params.subsampling_x = 1;
1244 cm->seq_params.subsampling_y = 0;
1245 }
1246 }
Tom Finegan02b2a842018-08-24 13:50:00 -07001247 }
1248
Yaowu Xuc27fc142016-08-22 16:08:15 -07001249 cm->width = oxcf->width;
1250 cm->height = oxcf->height;
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001251 set_sb_size(&cm->seq_params,
1252 select_sb_size(cpi)); // set sb size before allocations
Cheng Chen46f30c72017-09-07 11:13:33 -07001253 alloc_compressor_data(cpi);
Yaowu Xuc7119a72018-03-29 09:59:37 -07001254
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08001255 update_film_grain_parameters(cpi, oxcf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001256
1257 // Single thread case: use counts in common.
Yue Chencc6a6ef2018-05-21 16:21:05 -07001258 cpi->td.counts = &cpi->counts;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001259
1260 // change includes all joint functionality
Yaowu Xuf883b422016-08-30 14:01:10 -07001261 av1_change_config(cpi, oxcf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001262
1263 cpi->static_mb_pct = 0;
1264 cpi->ref_frame_flags = 0;
1265
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07001266 // Reset resize pending flags
1267 cpi->resize_pending_width = 0;
1268 cpi->resize_pending_height = 0;
1269
Yaowu Xuc27fc142016-08-22 16:08:15 -07001270 init_buffer_indices(cpi);
1271}
1272
1273static void set_rc_buffer_sizes(RATE_CONTROL *rc,
Yaowu Xuf883b422016-08-30 14:01:10 -07001274 const AV1EncoderConfig *oxcf) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001275 const int64_t bandwidth = oxcf->target_bandwidth;
1276 const int64_t starting = oxcf->starting_buffer_level_ms;
1277 const int64_t optimal = oxcf->optimal_buffer_level_ms;
1278 const int64_t maximum = oxcf->maximum_buffer_size_ms;
1279
1280 rc->starting_buffer_level = starting * bandwidth / 1000;
1281 rc->optimal_buffer_level =
1282 (optimal == 0) ? bandwidth / 8 : optimal * bandwidth / 1000;
1283 rc->maximum_buffer_size =
1284 (maximum == 0) ? bandwidth / 8 : maximum * bandwidth / 1000;
1285}
1286
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001287#define HIGHBD_BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX4DF, JSDAF, JSVAF) \
1288 cpi->fn_ptr[BT].sdf = SDF; \
1289 cpi->fn_ptr[BT].sdaf = SDAF; \
1290 cpi->fn_ptr[BT].vf = VF; \
1291 cpi->fn_ptr[BT].svf = SVF; \
1292 cpi->fn_ptr[BT].svaf = SVAF; \
1293 cpi->fn_ptr[BT].sdx4df = SDX4DF; \
1294 cpi->fn_ptr[BT].jsdaf = JSDAF; \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001295 cpi->fn_ptr[BT].jsvaf = JSVAF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001296
1297#define MAKE_BFP_SAD_WRAPPER(fnname) \
1298 static unsigned int fnname##_bits8(const uint8_t *src_ptr, \
1299 int source_stride, \
1300 const uint8_t *ref_ptr, int ref_stride) { \
1301 return fnname(src_ptr, source_stride, ref_ptr, ref_stride); \
1302 } \
1303 static unsigned int fnname##_bits10( \
1304 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1305 int ref_stride) { \
1306 return fnname(src_ptr, source_stride, ref_ptr, ref_stride) >> 2; \
1307 } \
1308 static unsigned int fnname##_bits12( \
1309 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1310 int ref_stride) { \
1311 return fnname(src_ptr, source_stride, ref_ptr, ref_stride) >> 4; \
1312 }
1313
1314#define MAKE_BFP_SADAVG_WRAPPER(fnname) \
1315 static unsigned int fnname##_bits8( \
1316 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1317 int ref_stride, const uint8_t *second_pred) { \
1318 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred); \
1319 } \
1320 static unsigned int fnname##_bits10( \
1321 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1322 int ref_stride, const uint8_t *second_pred) { \
1323 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred) >> \
1324 2; \
1325 } \
1326 static unsigned int fnname##_bits12( \
1327 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1328 int ref_stride, const uint8_t *second_pred) { \
1329 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred) >> \
1330 4; \
1331 }
1332
Yaowu Xuc27fc142016-08-22 16:08:15 -07001333#define MAKE_BFP_SAD4D_WRAPPER(fnname) \
1334 static void fnname##_bits8(const uint8_t *src_ptr, int source_stride, \
1335 const uint8_t *const ref_ptr[], int ref_stride, \
1336 unsigned int *sad_array) { \
1337 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1338 } \
1339 static void fnname##_bits10(const uint8_t *src_ptr, int source_stride, \
1340 const uint8_t *const ref_ptr[], int ref_stride, \
1341 unsigned int *sad_array) { \
1342 int i; \
1343 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1344 for (i = 0; i < 4; i++) sad_array[i] >>= 2; \
1345 } \
1346 static void fnname##_bits12(const uint8_t *src_ptr, int source_stride, \
1347 const uint8_t *const ref_ptr[], int ref_stride, \
1348 unsigned int *sad_array) { \
1349 int i; \
1350 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1351 for (i = 0; i < 4; i++) sad_array[i] >>= 4; \
1352 }
1353
Cheng Chenbf3d4962017-11-01 14:48:52 -07001354#define MAKE_BFP_JSADAVG_WRAPPER(fnname) \
1355 static unsigned int fnname##_bits8( \
1356 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1357 int ref_stride, const uint8_t *second_pred, \
Debargha Mukherjeef90004a2018-12-20 13:35:06 -08001358 const DIST_WTD_COMP_PARAMS *jcp_param) { \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001359 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred, \
1360 jcp_param); \
1361 } \
1362 static unsigned int fnname##_bits10( \
1363 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1364 int ref_stride, const uint8_t *second_pred, \
Debargha Mukherjeef90004a2018-12-20 13:35:06 -08001365 const DIST_WTD_COMP_PARAMS *jcp_param) { \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001366 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred, \
1367 jcp_param) >> \
1368 2; \
1369 } \
1370 static unsigned int fnname##_bits12( \
1371 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1372 int ref_stride, const uint8_t *second_pred, \
Debargha Mukherjeef90004a2018-12-20 13:35:06 -08001373 const DIST_WTD_COMP_PARAMS *jcp_param) { \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001374 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred, \
1375 jcp_param) >> \
1376 4; \
1377 }
Cheng Chenbf3d4962017-11-01 14:48:52 -07001378
Yaowu Xuf883b422016-08-30 14:01:10 -07001379MAKE_BFP_SAD_WRAPPER(aom_highbd_sad128x128)
1380MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad128x128_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001381MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad128x128x4d)
1382MAKE_BFP_SAD_WRAPPER(aom_highbd_sad128x64)
1383MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad128x64_avg)
1384MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad128x64x4d)
1385MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x128)
1386MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x128_avg)
1387MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x128x4d)
Yaowu Xuf883b422016-08-30 14:01:10 -07001388MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x16)
1389MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x16_avg)
1390MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x16x4d)
1391MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x32)
1392MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x32_avg)
1393MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x32x4d)
1394MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x32)
1395MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x32_avg)
1396MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x32x4d)
1397MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x64)
1398MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x64_avg)
1399MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x64x4d)
1400MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x32)
1401MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x32_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001402MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x32x4d)
1403MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x64)
1404MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x64_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001405MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x64x4d)
1406MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x16)
1407MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x16_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001408MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x16x4d)
1409MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x8)
1410MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x8_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001411MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x8x4d)
1412MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x16)
1413MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x16_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001414MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x16x4d)
1415MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x8)
1416MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x8_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001417MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x8x4d)
1418MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x4)
1419MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x4_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001420MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x4x4d)
1421MAKE_BFP_SAD_WRAPPER(aom_highbd_sad4x8)
1422MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad4x8_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001423MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad4x8x4d)
1424MAKE_BFP_SAD_WRAPPER(aom_highbd_sad4x4)
1425MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad4x4_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001426MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad4x4x4d)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001427
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001428MAKE_BFP_SAD_WRAPPER(aom_highbd_sad4x16)
1429MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad4x16_avg)
1430MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad4x16x4d)
1431MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x4)
1432MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x4_avg)
1433MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x4x4d)
1434MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x32)
1435MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x32_avg)
1436MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x32x4d)
1437MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x8)
1438MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x8_avg)
1439MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x8x4d)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001440MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x64)
1441MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x64_avg)
1442MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x64x4d)
1443MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x16)
1444MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x16_avg)
1445MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x16x4d)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001446
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001447MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad128x128_avg)
1448MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad128x64_avg)
1449MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x128_avg)
1450MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x16_avg)
1451MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x32_avg)
1452MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x32_avg)
1453MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x64_avg)
1454MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x32_avg)
1455MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x64_avg)
1456MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x16_avg)
1457MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x8_avg)
1458MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x16_avg)
1459MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x8_avg)
1460MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x4_avg)
1461MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad4x8_avg)
1462MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad4x4_avg)
1463MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad4x16_avg)
1464MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x4_avg)
1465MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x32_avg)
1466MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x8_avg)
1467MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x64_avg)
1468MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x16_avg)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001469
David Barker0f3c94e2017-05-16 15:21:50 +01001470#define HIGHBD_MBFP(BT, MCSDF, MCSVF) \
David Barkerf19f35f2017-05-22 16:33:22 +01001471 cpi->fn_ptr[BT].msdf = MCSDF; \
1472 cpi->fn_ptr[BT].msvf = MCSVF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001473
David Barkerc155e012017-05-11 13:54:54 +01001474#define MAKE_MBFP_COMPOUND_SAD_WRAPPER(fnname) \
1475 static unsigned int fnname##_bits8( \
1476 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1477 int ref_stride, const uint8_t *second_pred_ptr, const uint8_t *m, \
1478 int m_stride, int invert_mask) { \
1479 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
1480 second_pred_ptr, m, m_stride, invert_mask); \
1481 } \
1482 static unsigned int fnname##_bits10( \
1483 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1484 int ref_stride, const uint8_t *second_pred_ptr, const uint8_t *m, \
1485 int m_stride, int invert_mask) { \
1486 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
1487 second_pred_ptr, m, m_stride, invert_mask) >> \
1488 2; \
1489 } \
1490 static unsigned int fnname##_bits12( \
1491 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1492 int ref_stride, const uint8_t *second_pred_ptr, const uint8_t *m, \
1493 int m_stride, int invert_mask) { \
1494 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
1495 second_pred_ptr, m, m_stride, invert_mask) >> \
1496 4; \
1497 }
1498
David Barkerf19f35f2017-05-22 16:33:22 +01001499MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad128x128)
1500MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad128x64)
1501MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01001502MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x64)
1503MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x32)
1504MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x64)
1505MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x32)
1506MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x16)
1507MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x32)
1508MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x16)
1509MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x8)
1510MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x16)
1511MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x8)
1512MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x4)
1513MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad4x8)
1514MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001515MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad4x16)
1516MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x4)
1517MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x32)
1518MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001519MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x64)
1520MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001521
Yaowu Xuc27fc142016-08-22 16:08:15 -07001522#define HIGHBD_OBFP(BT, OSDF, OVF, OSVF) \
1523 cpi->fn_ptr[BT].osdf = OSDF; \
1524 cpi->fn_ptr[BT].ovf = OVF; \
1525 cpi->fn_ptr[BT].osvf = OSVF;
1526
1527#define MAKE_OBFP_SAD_WRAPPER(fnname) \
1528 static unsigned int fnname##_bits8(const uint8_t *ref, int ref_stride, \
1529 const int32_t *wsrc, \
1530 const int32_t *msk) { \
1531 return fnname(ref, ref_stride, wsrc, msk); \
1532 } \
1533 static unsigned int fnname##_bits10(const uint8_t *ref, int ref_stride, \
1534 const int32_t *wsrc, \
1535 const int32_t *msk) { \
1536 return fnname(ref, ref_stride, wsrc, msk) >> 2; \
1537 } \
1538 static unsigned int fnname##_bits12(const uint8_t *ref, int ref_stride, \
1539 const int32_t *wsrc, \
1540 const int32_t *msk) { \
1541 return fnname(ref, ref_stride, wsrc, msk) >> 4; \
1542 }
1543
Yaowu Xuf883b422016-08-30 14:01:10 -07001544MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad128x128)
1545MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad128x64)
1546MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07001547MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x64)
1548MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x32)
1549MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x64)
1550MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x32)
1551MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x16)
1552MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x32)
1553MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x16)
1554MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x8)
1555MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x16)
1556MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x8)
1557MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x4)
1558MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad4x8)
1559MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001560MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad4x16)
1561MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x4)
1562MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x32)
1563MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001564MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x64)
1565MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001566
Yaowu Xuf883b422016-08-30 14:01:10 -07001567static void highbd_set_var_fns(AV1_COMP *const cpi) {
1568 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001569 if (cm->seq_params.use_highbitdepth) {
1570 switch (cm->seq_params.bit_depth) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001571 case AOM_BITS_8:
Cheng Chenbf3d4962017-11-01 14:48:52 -07001572 HIGHBD_BFP(BLOCK_64X16, aom_highbd_sad64x16_bits8,
1573 aom_highbd_sad64x16_avg_bits8, aom_highbd_8_variance64x16,
1574 aom_highbd_8_sub_pixel_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001575 aom_highbd_8_sub_pixel_avg_variance64x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001576 aom_highbd_sad64x16x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001577 aom_highbd_dist_wtd_sad64x16_avg_bits8,
1578 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001579
1580 HIGHBD_BFP(BLOCK_16X64, aom_highbd_sad16x64_bits8,
1581 aom_highbd_sad16x64_avg_bits8, aom_highbd_8_variance16x64,
1582 aom_highbd_8_sub_pixel_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001583 aom_highbd_8_sub_pixel_avg_variance16x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001584 aom_highbd_sad16x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001585 aom_highbd_dist_wtd_sad16x64_avg_bits8,
1586 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001587
1588 HIGHBD_BFP(
1589 BLOCK_32X8, aom_highbd_sad32x8_bits8, aom_highbd_sad32x8_avg_bits8,
1590 aom_highbd_8_variance32x8, aom_highbd_8_sub_pixel_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001591 aom_highbd_8_sub_pixel_avg_variance32x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001592 aom_highbd_sad32x8x4d_bits8, aom_highbd_dist_wtd_sad32x8_avg_bits8,
1593 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x8)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001594
1595 HIGHBD_BFP(
1596 BLOCK_8X32, aom_highbd_sad8x32_bits8, aom_highbd_sad8x32_avg_bits8,
1597 aom_highbd_8_variance8x32, aom_highbd_8_sub_pixel_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001598 aom_highbd_8_sub_pixel_avg_variance8x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001599 aom_highbd_sad8x32x4d_bits8, aom_highbd_dist_wtd_sad8x32_avg_bits8,
1600 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001601
1602 HIGHBD_BFP(
1603 BLOCK_16X4, aom_highbd_sad16x4_bits8, aom_highbd_sad16x4_avg_bits8,
1604 aom_highbd_8_variance16x4, aom_highbd_8_sub_pixel_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001605 aom_highbd_8_sub_pixel_avg_variance16x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001606 aom_highbd_sad16x4x4d_bits8, aom_highbd_dist_wtd_sad16x4_avg_bits8,
1607 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x4)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001608
1609 HIGHBD_BFP(
1610 BLOCK_4X16, aom_highbd_sad4x16_bits8, aom_highbd_sad4x16_avg_bits8,
1611 aom_highbd_8_variance4x16, aom_highbd_8_sub_pixel_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001612 aom_highbd_8_sub_pixel_avg_variance4x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001613 aom_highbd_sad4x16x4d_bits8, aom_highbd_dist_wtd_sad4x16_avg_bits8,
1614 aom_highbd_8_dist_wtd_sub_pixel_avg_variance4x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001615
1616 HIGHBD_BFP(BLOCK_32X16, aom_highbd_sad32x16_bits8,
1617 aom_highbd_sad32x16_avg_bits8, aom_highbd_8_variance32x16,
1618 aom_highbd_8_sub_pixel_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001619 aom_highbd_8_sub_pixel_avg_variance32x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001620 aom_highbd_sad32x16x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001621 aom_highbd_dist_wtd_sad32x16_avg_bits8,
1622 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001623
1624 HIGHBD_BFP(BLOCK_16X32, aom_highbd_sad16x32_bits8,
1625 aom_highbd_sad16x32_avg_bits8, aom_highbd_8_variance16x32,
1626 aom_highbd_8_sub_pixel_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001627 aom_highbd_8_sub_pixel_avg_variance16x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001628 aom_highbd_sad16x32x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001629 aom_highbd_dist_wtd_sad16x32_avg_bits8,
1630 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001631
1632 HIGHBD_BFP(BLOCK_64X32, aom_highbd_sad64x32_bits8,
1633 aom_highbd_sad64x32_avg_bits8, aom_highbd_8_variance64x32,
1634 aom_highbd_8_sub_pixel_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001635 aom_highbd_8_sub_pixel_avg_variance64x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001636 aom_highbd_sad64x32x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001637 aom_highbd_dist_wtd_sad64x32_avg_bits8,
1638 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001639
1640 HIGHBD_BFP(BLOCK_32X64, aom_highbd_sad32x64_bits8,
1641 aom_highbd_sad32x64_avg_bits8, aom_highbd_8_variance32x64,
1642 aom_highbd_8_sub_pixel_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001643 aom_highbd_8_sub_pixel_avg_variance32x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001644 aom_highbd_sad32x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001645 aom_highbd_dist_wtd_sad32x64_avg_bits8,
1646 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001647
1648 HIGHBD_BFP(BLOCK_32X32, aom_highbd_sad32x32_bits8,
1649 aom_highbd_sad32x32_avg_bits8, aom_highbd_8_variance32x32,
1650 aom_highbd_8_sub_pixel_variance32x32,
1651 aom_highbd_8_sub_pixel_avg_variance32x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001652 aom_highbd_sad32x32x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001653 aom_highbd_dist_wtd_sad32x32_avg_bits8,
1654 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001655
1656 HIGHBD_BFP(BLOCK_64X64, aom_highbd_sad64x64_bits8,
1657 aom_highbd_sad64x64_avg_bits8, aom_highbd_8_variance64x64,
1658 aom_highbd_8_sub_pixel_variance64x64,
1659 aom_highbd_8_sub_pixel_avg_variance64x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001660 aom_highbd_sad64x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001661 aom_highbd_dist_wtd_sad64x64_avg_bits8,
1662 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001663
1664 HIGHBD_BFP(BLOCK_16X16, aom_highbd_sad16x16_bits8,
1665 aom_highbd_sad16x16_avg_bits8, aom_highbd_8_variance16x16,
1666 aom_highbd_8_sub_pixel_variance16x16,
1667 aom_highbd_8_sub_pixel_avg_variance16x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001668 aom_highbd_sad16x16x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001669 aom_highbd_dist_wtd_sad16x16_avg_bits8,
1670 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001671
1672 HIGHBD_BFP(
1673 BLOCK_16X8, aom_highbd_sad16x8_bits8, aom_highbd_sad16x8_avg_bits8,
1674 aom_highbd_8_variance16x8, aom_highbd_8_sub_pixel_variance16x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001675 aom_highbd_8_sub_pixel_avg_variance16x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001676 aom_highbd_sad16x8x4d_bits8, aom_highbd_dist_wtd_sad16x8_avg_bits8,
1677 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x8)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001678
1679 HIGHBD_BFP(
1680 BLOCK_8X16, aom_highbd_sad8x16_bits8, aom_highbd_sad8x16_avg_bits8,
1681 aom_highbd_8_variance8x16, aom_highbd_8_sub_pixel_variance8x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001682 aom_highbd_8_sub_pixel_avg_variance8x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001683 aom_highbd_sad8x16x4d_bits8, aom_highbd_dist_wtd_sad8x16_avg_bits8,
1684 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001685
Cheng Chenbf3d4962017-11-01 14:48:52 -07001686 HIGHBD_BFP(
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001687 BLOCK_8X8, aom_highbd_sad8x8_bits8, aom_highbd_sad8x8_avg_bits8,
1688 aom_highbd_8_variance8x8, aom_highbd_8_sub_pixel_variance8x8,
1689 aom_highbd_8_sub_pixel_avg_variance8x8, aom_highbd_sad8x8x4d_bits8,
1690 aom_highbd_dist_wtd_sad8x8_avg_bits8,
1691 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x8)
1692
1693 HIGHBD_BFP(
1694 BLOCK_8X4, aom_highbd_sad8x4_bits8, aom_highbd_sad8x4_avg_bits8,
1695 aom_highbd_8_variance8x4, aom_highbd_8_sub_pixel_variance8x4,
1696 aom_highbd_8_sub_pixel_avg_variance8x4, aom_highbd_sad8x4x4d_bits8,
1697 aom_highbd_dist_wtd_sad8x4_avg_bits8,
1698 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x4)
1699
1700 HIGHBD_BFP(
1701 BLOCK_4X8, aom_highbd_sad4x8_bits8, aom_highbd_sad4x8_avg_bits8,
1702 aom_highbd_8_variance4x8, aom_highbd_8_sub_pixel_variance4x8,
1703 aom_highbd_8_sub_pixel_avg_variance4x8, aom_highbd_sad4x8x4d_bits8,
1704 aom_highbd_dist_wtd_sad4x8_avg_bits8,
1705 aom_highbd_8_dist_wtd_sub_pixel_avg_variance4x8)
1706
1707 HIGHBD_BFP(
1708 BLOCK_4X4, aom_highbd_sad4x4_bits8, aom_highbd_sad4x4_avg_bits8,
1709 aom_highbd_8_variance4x4, aom_highbd_8_sub_pixel_variance4x4,
1710 aom_highbd_8_sub_pixel_avg_variance4x4, aom_highbd_sad4x4x4d_bits8,
1711 aom_highbd_dist_wtd_sad4x4_avg_bits8,
1712 aom_highbd_8_dist_wtd_sub_pixel_avg_variance4x4)
1713
1714 HIGHBD_BFP(BLOCK_128X128, aom_highbd_sad128x128_bits8,
1715 aom_highbd_sad128x128_avg_bits8,
1716 aom_highbd_8_variance128x128,
1717 aom_highbd_8_sub_pixel_variance128x128,
1718 aom_highbd_8_sub_pixel_avg_variance128x128,
1719 aom_highbd_sad128x128x4d_bits8,
1720 aom_highbd_dist_wtd_sad128x128_avg_bits8,
1721 aom_highbd_8_dist_wtd_sub_pixel_avg_variance128x128)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001722
1723 HIGHBD_BFP(BLOCK_128X64, aom_highbd_sad128x64_bits8,
1724 aom_highbd_sad128x64_avg_bits8, aom_highbd_8_variance128x64,
1725 aom_highbd_8_sub_pixel_variance128x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001726 aom_highbd_8_sub_pixel_avg_variance128x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001727 aom_highbd_sad128x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001728 aom_highbd_dist_wtd_sad128x64_avg_bits8,
1729 aom_highbd_8_dist_wtd_sub_pixel_avg_variance128x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001730
1731 HIGHBD_BFP(BLOCK_64X128, aom_highbd_sad64x128_bits8,
1732 aom_highbd_sad64x128_avg_bits8, aom_highbd_8_variance64x128,
1733 aom_highbd_8_sub_pixel_variance64x128,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001734 aom_highbd_8_sub_pixel_avg_variance64x128,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001735 aom_highbd_sad64x128x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001736 aom_highbd_dist_wtd_sad64x128_avg_bits8,
1737 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x128)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001738
David Barkerf19f35f2017-05-22 16:33:22 +01001739 HIGHBD_MBFP(BLOCK_128X128, aom_highbd_masked_sad128x128_bits8,
1740 aom_highbd_8_masked_sub_pixel_variance128x128)
1741 HIGHBD_MBFP(BLOCK_128X64, aom_highbd_masked_sad128x64_bits8,
1742 aom_highbd_8_masked_sub_pixel_variance128x64)
1743 HIGHBD_MBFP(BLOCK_64X128, aom_highbd_masked_sad64x128_bits8,
1744 aom_highbd_8_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01001745 HIGHBD_MBFP(BLOCK_64X64, aom_highbd_masked_sad64x64_bits8,
1746 aom_highbd_8_masked_sub_pixel_variance64x64)
1747 HIGHBD_MBFP(BLOCK_64X32, aom_highbd_masked_sad64x32_bits8,
1748 aom_highbd_8_masked_sub_pixel_variance64x32)
1749 HIGHBD_MBFP(BLOCK_32X64, aom_highbd_masked_sad32x64_bits8,
1750 aom_highbd_8_masked_sub_pixel_variance32x64)
1751 HIGHBD_MBFP(BLOCK_32X32, aom_highbd_masked_sad32x32_bits8,
1752 aom_highbd_8_masked_sub_pixel_variance32x32)
1753 HIGHBD_MBFP(BLOCK_32X16, aom_highbd_masked_sad32x16_bits8,
1754 aom_highbd_8_masked_sub_pixel_variance32x16)
1755 HIGHBD_MBFP(BLOCK_16X32, aom_highbd_masked_sad16x32_bits8,
1756 aom_highbd_8_masked_sub_pixel_variance16x32)
1757 HIGHBD_MBFP(BLOCK_16X16, aom_highbd_masked_sad16x16_bits8,
1758 aom_highbd_8_masked_sub_pixel_variance16x16)
1759 HIGHBD_MBFP(BLOCK_8X16, aom_highbd_masked_sad8x16_bits8,
1760 aom_highbd_8_masked_sub_pixel_variance8x16)
1761 HIGHBD_MBFP(BLOCK_16X8, aom_highbd_masked_sad16x8_bits8,
1762 aom_highbd_8_masked_sub_pixel_variance16x8)
1763 HIGHBD_MBFP(BLOCK_8X8, aom_highbd_masked_sad8x8_bits8,
1764 aom_highbd_8_masked_sub_pixel_variance8x8)
1765 HIGHBD_MBFP(BLOCK_4X8, aom_highbd_masked_sad4x8_bits8,
1766 aom_highbd_8_masked_sub_pixel_variance4x8)
1767 HIGHBD_MBFP(BLOCK_8X4, aom_highbd_masked_sad8x4_bits8,
1768 aom_highbd_8_masked_sub_pixel_variance8x4)
1769 HIGHBD_MBFP(BLOCK_4X4, aom_highbd_masked_sad4x4_bits8,
1770 aom_highbd_8_masked_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001771 HIGHBD_MBFP(BLOCK_64X16, aom_highbd_masked_sad64x16_bits8,
1772 aom_highbd_8_masked_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001773 HIGHBD_MBFP(BLOCK_16X64, aom_highbd_masked_sad16x64_bits8,
1774 aom_highbd_8_masked_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001775 HIGHBD_MBFP(BLOCK_32X8, aom_highbd_masked_sad32x8_bits8,
1776 aom_highbd_8_masked_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001777 HIGHBD_MBFP(BLOCK_8X32, aom_highbd_masked_sad8x32_bits8,
1778 aom_highbd_8_masked_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001779 HIGHBD_MBFP(BLOCK_16X4, aom_highbd_masked_sad16x4_bits8,
1780 aom_highbd_8_masked_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001781 HIGHBD_MBFP(BLOCK_4X16, aom_highbd_masked_sad4x16_bits8,
1782 aom_highbd_8_masked_sub_pixel_variance4x16)
Yaowu Xuf883b422016-08-30 14:01:10 -07001783 HIGHBD_OBFP(BLOCK_128X128, aom_highbd_obmc_sad128x128_bits8,
1784 aom_highbd_obmc_variance128x128,
1785 aom_highbd_obmc_sub_pixel_variance128x128)
1786 HIGHBD_OBFP(BLOCK_128X64, aom_highbd_obmc_sad128x64_bits8,
1787 aom_highbd_obmc_variance128x64,
1788 aom_highbd_obmc_sub_pixel_variance128x64)
1789 HIGHBD_OBFP(BLOCK_64X128, aom_highbd_obmc_sad64x128_bits8,
1790 aom_highbd_obmc_variance64x128,
1791 aom_highbd_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07001792 HIGHBD_OBFP(BLOCK_64X64, aom_highbd_obmc_sad64x64_bits8,
1793 aom_highbd_obmc_variance64x64,
1794 aom_highbd_obmc_sub_pixel_variance64x64)
1795 HIGHBD_OBFP(BLOCK_64X32, aom_highbd_obmc_sad64x32_bits8,
1796 aom_highbd_obmc_variance64x32,
1797 aom_highbd_obmc_sub_pixel_variance64x32)
1798 HIGHBD_OBFP(BLOCK_32X64, aom_highbd_obmc_sad32x64_bits8,
1799 aom_highbd_obmc_variance32x64,
1800 aom_highbd_obmc_sub_pixel_variance32x64)
1801 HIGHBD_OBFP(BLOCK_32X32, aom_highbd_obmc_sad32x32_bits8,
1802 aom_highbd_obmc_variance32x32,
1803 aom_highbd_obmc_sub_pixel_variance32x32)
1804 HIGHBD_OBFP(BLOCK_32X16, aom_highbd_obmc_sad32x16_bits8,
1805 aom_highbd_obmc_variance32x16,
1806 aom_highbd_obmc_sub_pixel_variance32x16)
1807 HIGHBD_OBFP(BLOCK_16X32, aom_highbd_obmc_sad16x32_bits8,
1808 aom_highbd_obmc_variance16x32,
1809 aom_highbd_obmc_sub_pixel_variance16x32)
1810 HIGHBD_OBFP(BLOCK_16X16, aom_highbd_obmc_sad16x16_bits8,
1811 aom_highbd_obmc_variance16x16,
1812 aom_highbd_obmc_sub_pixel_variance16x16)
1813 HIGHBD_OBFP(BLOCK_8X16, aom_highbd_obmc_sad8x16_bits8,
1814 aom_highbd_obmc_variance8x16,
1815 aom_highbd_obmc_sub_pixel_variance8x16)
1816 HIGHBD_OBFP(BLOCK_16X8, aom_highbd_obmc_sad16x8_bits8,
1817 aom_highbd_obmc_variance16x8,
1818 aom_highbd_obmc_sub_pixel_variance16x8)
1819 HIGHBD_OBFP(BLOCK_8X8, aom_highbd_obmc_sad8x8_bits8,
1820 aom_highbd_obmc_variance8x8,
1821 aom_highbd_obmc_sub_pixel_variance8x8)
1822 HIGHBD_OBFP(BLOCK_4X8, aom_highbd_obmc_sad4x8_bits8,
1823 aom_highbd_obmc_variance4x8,
1824 aom_highbd_obmc_sub_pixel_variance4x8)
1825 HIGHBD_OBFP(BLOCK_8X4, aom_highbd_obmc_sad8x4_bits8,
1826 aom_highbd_obmc_variance8x4,
1827 aom_highbd_obmc_sub_pixel_variance8x4)
1828 HIGHBD_OBFP(BLOCK_4X4, aom_highbd_obmc_sad4x4_bits8,
1829 aom_highbd_obmc_variance4x4,
1830 aom_highbd_obmc_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001831 HIGHBD_OBFP(BLOCK_64X16, aom_highbd_obmc_sad64x16_bits8,
1832 aom_highbd_obmc_variance64x16,
1833 aom_highbd_obmc_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001834 HIGHBD_OBFP(BLOCK_16X64, aom_highbd_obmc_sad16x64_bits8,
1835 aom_highbd_obmc_variance16x64,
1836 aom_highbd_obmc_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001837 HIGHBD_OBFP(BLOCK_32X8, aom_highbd_obmc_sad32x8_bits8,
1838 aom_highbd_obmc_variance32x8,
1839 aom_highbd_obmc_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001840 HIGHBD_OBFP(BLOCK_8X32, aom_highbd_obmc_sad8x32_bits8,
1841 aom_highbd_obmc_variance8x32,
1842 aom_highbd_obmc_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001843 HIGHBD_OBFP(BLOCK_16X4, aom_highbd_obmc_sad16x4_bits8,
1844 aom_highbd_obmc_variance16x4,
1845 aom_highbd_obmc_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001846 HIGHBD_OBFP(BLOCK_4X16, aom_highbd_obmc_sad4x16_bits8,
1847 aom_highbd_obmc_variance4x16,
1848 aom_highbd_obmc_sub_pixel_variance4x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001849 break;
1850
Yaowu Xuf883b422016-08-30 14:01:10 -07001851 case AOM_BITS_10:
Cheng Chenbf3d4962017-11-01 14:48:52 -07001852 HIGHBD_BFP(BLOCK_64X16, aom_highbd_sad64x16_bits10,
1853 aom_highbd_sad64x16_avg_bits10, aom_highbd_10_variance64x16,
1854 aom_highbd_10_sub_pixel_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001855 aom_highbd_10_sub_pixel_avg_variance64x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001856 aom_highbd_sad64x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001857 aom_highbd_dist_wtd_sad64x16_avg_bits10,
1858 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001859
1860 HIGHBD_BFP(BLOCK_16X64, aom_highbd_sad16x64_bits10,
1861 aom_highbd_sad16x64_avg_bits10, aom_highbd_10_variance16x64,
1862 aom_highbd_10_sub_pixel_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001863 aom_highbd_10_sub_pixel_avg_variance16x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001864 aom_highbd_sad16x64x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001865 aom_highbd_dist_wtd_sad16x64_avg_bits10,
1866 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001867
1868 HIGHBD_BFP(BLOCK_32X8, aom_highbd_sad32x8_bits10,
1869 aom_highbd_sad32x8_avg_bits10, aom_highbd_10_variance32x8,
1870 aom_highbd_10_sub_pixel_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001871 aom_highbd_10_sub_pixel_avg_variance32x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001872 aom_highbd_sad32x8x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001873 aom_highbd_dist_wtd_sad32x8_avg_bits10,
1874 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001875
1876 HIGHBD_BFP(BLOCK_8X32, aom_highbd_sad8x32_bits10,
1877 aom_highbd_sad8x32_avg_bits10, aom_highbd_10_variance8x32,
1878 aom_highbd_10_sub_pixel_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001879 aom_highbd_10_sub_pixel_avg_variance8x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001880 aom_highbd_sad8x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001881 aom_highbd_dist_wtd_sad8x32_avg_bits10,
1882 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001883
1884 HIGHBD_BFP(BLOCK_16X4, aom_highbd_sad16x4_bits10,
1885 aom_highbd_sad16x4_avg_bits10, aom_highbd_10_variance16x4,
1886 aom_highbd_10_sub_pixel_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001887 aom_highbd_10_sub_pixel_avg_variance16x4,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001888 aom_highbd_sad16x4x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001889 aom_highbd_dist_wtd_sad16x4_avg_bits10,
1890 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001891
1892 HIGHBD_BFP(BLOCK_4X16, aom_highbd_sad4x16_bits10,
1893 aom_highbd_sad4x16_avg_bits10, aom_highbd_10_variance4x16,
1894 aom_highbd_10_sub_pixel_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001895 aom_highbd_10_sub_pixel_avg_variance4x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001896 aom_highbd_sad4x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001897 aom_highbd_dist_wtd_sad4x16_avg_bits10,
1898 aom_highbd_10_dist_wtd_sub_pixel_avg_variance4x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001899
1900 HIGHBD_BFP(BLOCK_32X16, aom_highbd_sad32x16_bits10,
1901 aom_highbd_sad32x16_avg_bits10, aom_highbd_10_variance32x16,
1902 aom_highbd_10_sub_pixel_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001903 aom_highbd_10_sub_pixel_avg_variance32x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001904 aom_highbd_sad32x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001905 aom_highbd_dist_wtd_sad32x16_avg_bits10,
1906 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001907
1908 HIGHBD_BFP(BLOCK_16X32, aom_highbd_sad16x32_bits10,
1909 aom_highbd_sad16x32_avg_bits10, aom_highbd_10_variance16x32,
1910 aom_highbd_10_sub_pixel_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001911 aom_highbd_10_sub_pixel_avg_variance16x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001912 aom_highbd_sad16x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001913 aom_highbd_dist_wtd_sad16x32_avg_bits10,
1914 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001915
1916 HIGHBD_BFP(BLOCK_64X32, aom_highbd_sad64x32_bits10,
1917 aom_highbd_sad64x32_avg_bits10, aom_highbd_10_variance64x32,
1918 aom_highbd_10_sub_pixel_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001919 aom_highbd_10_sub_pixel_avg_variance64x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001920 aom_highbd_sad64x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001921 aom_highbd_dist_wtd_sad64x32_avg_bits10,
1922 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001923
1924 HIGHBD_BFP(BLOCK_32X64, aom_highbd_sad32x64_bits10,
1925 aom_highbd_sad32x64_avg_bits10, aom_highbd_10_variance32x64,
1926 aom_highbd_10_sub_pixel_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001927 aom_highbd_10_sub_pixel_avg_variance32x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001928 aom_highbd_sad32x64x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001929 aom_highbd_dist_wtd_sad32x64_avg_bits10,
1930 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001931
1932 HIGHBD_BFP(BLOCK_32X32, aom_highbd_sad32x32_bits10,
1933 aom_highbd_sad32x32_avg_bits10, aom_highbd_10_variance32x32,
1934 aom_highbd_10_sub_pixel_variance32x32,
1935 aom_highbd_10_sub_pixel_avg_variance32x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001936 aom_highbd_sad32x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001937 aom_highbd_dist_wtd_sad32x32_avg_bits10,
1938 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001939
1940 HIGHBD_BFP(BLOCK_64X64, aom_highbd_sad64x64_bits10,
1941 aom_highbd_sad64x64_avg_bits10, aom_highbd_10_variance64x64,
1942 aom_highbd_10_sub_pixel_variance64x64,
1943 aom_highbd_10_sub_pixel_avg_variance64x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001944 aom_highbd_sad64x64x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001945 aom_highbd_dist_wtd_sad64x64_avg_bits10,
1946 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001947
1948 HIGHBD_BFP(BLOCK_16X16, aom_highbd_sad16x16_bits10,
1949 aom_highbd_sad16x16_avg_bits10, aom_highbd_10_variance16x16,
1950 aom_highbd_10_sub_pixel_variance16x16,
1951 aom_highbd_10_sub_pixel_avg_variance16x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001952 aom_highbd_sad16x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001953 aom_highbd_dist_wtd_sad16x16_avg_bits10,
1954 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001955
1956 HIGHBD_BFP(BLOCK_16X8, aom_highbd_sad16x8_bits10,
1957 aom_highbd_sad16x8_avg_bits10, aom_highbd_10_variance16x8,
1958 aom_highbd_10_sub_pixel_variance16x8,
1959 aom_highbd_10_sub_pixel_avg_variance16x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001960 aom_highbd_sad16x8x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001961 aom_highbd_dist_wtd_sad16x8_avg_bits10,
1962 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001963
1964 HIGHBD_BFP(BLOCK_8X16, aom_highbd_sad8x16_bits10,
1965 aom_highbd_sad8x16_avg_bits10, aom_highbd_10_variance8x16,
1966 aom_highbd_10_sub_pixel_variance8x16,
1967 aom_highbd_10_sub_pixel_avg_variance8x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001968 aom_highbd_sad8x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001969 aom_highbd_dist_wtd_sad8x16_avg_bits10,
1970 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001971
1972 HIGHBD_BFP(
1973 BLOCK_8X8, aom_highbd_sad8x8_bits10, aom_highbd_sad8x8_avg_bits10,
1974 aom_highbd_10_variance8x8, aom_highbd_10_sub_pixel_variance8x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001975 aom_highbd_10_sub_pixel_avg_variance8x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001976 aom_highbd_sad8x8x4d_bits10, aom_highbd_dist_wtd_sad8x8_avg_bits10,
1977 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001978
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001979 HIGHBD_BFP(
1980 BLOCK_8X4, aom_highbd_sad8x4_bits10, aom_highbd_sad8x4_avg_bits10,
1981 aom_highbd_10_variance8x4, aom_highbd_10_sub_pixel_variance8x4,
1982 aom_highbd_10_sub_pixel_avg_variance8x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001983 aom_highbd_sad8x4x4d_bits10, aom_highbd_dist_wtd_sad8x4_avg_bits10,
1984 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001985
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001986 HIGHBD_BFP(
1987 BLOCK_4X8, aom_highbd_sad4x8_bits10, aom_highbd_sad4x8_avg_bits10,
1988 aom_highbd_10_variance4x8, aom_highbd_10_sub_pixel_variance4x8,
1989 aom_highbd_10_sub_pixel_avg_variance4x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001990 aom_highbd_sad4x8x4d_bits10, aom_highbd_dist_wtd_sad4x8_avg_bits10,
1991 aom_highbd_10_dist_wtd_sub_pixel_avg_variance4x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001992
1993 HIGHBD_BFP(
1994 BLOCK_4X4, aom_highbd_sad4x4_bits10, aom_highbd_sad4x4_avg_bits10,
1995 aom_highbd_10_variance4x4, aom_highbd_10_sub_pixel_variance4x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001996 aom_highbd_10_sub_pixel_avg_variance4x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001997 aom_highbd_sad4x4x4d_bits10, aom_highbd_dist_wtd_sad4x4_avg_bits10,
1998 aom_highbd_10_dist_wtd_sub_pixel_avg_variance4x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001999
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002000 HIGHBD_BFP(BLOCK_128X128, aom_highbd_sad128x128_bits10,
2001 aom_highbd_sad128x128_avg_bits10,
2002 aom_highbd_10_variance128x128,
2003 aom_highbd_10_sub_pixel_variance128x128,
2004 aom_highbd_10_sub_pixel_avg_variance128x128,
2005 aom_highbd_sad128x128x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002006 aom_highbd_dist_wtd_sad128x128_avg_bits10,
2007 aom_highbd_10_dist_wtd_sub_pixel_avg_variance128x128);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002008
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002009 HIGHBD_BFP(BLOCK_128X64, aom_highbd_sad128x64_bits10,
2010 aom_highbd_sad128x64_avg_bits10,
2011 aom_highbd_10_variance128x64,
2012 aom_highbd_10_sub_pixel_variance128x64,
2013 aom_highbd_10_sub_pixel_avg_variance128x64,
2014 aom_highbd_sad128x64x4d_bits10,
2015 aom_highbd_dist_wtd_sad128x64_avg_bits10,
2016 aom_highbd_10_dist_wtd_sub_pixel_avg_variance128x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002017
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002018 HIGHBD_BFP(BLOCK_64X128, aom_highbd_sad64x128_bits10,
2019 aom_highbd_sad64x128_avg_bits10,
2020 aom_highbd_10_variance64x128,
2021 aom_highbd_10_sub_pixel_variance64x128,
2022 aom_highbd_10_sub_pixel_avg_variance64x128,
2023 aom_highbd_sad64x128x4d_bits10,
2024 aom_highbd_dist_wtd_sad64x128_avg_bits10,
2025 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x128);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002026
David Barkerf19f35f2017-05-22 16:33:22 +01002027 HIGHBD_MBFP(BLOCK_128X128, aom_highbd_masked_sad128x128_bits10,
2028 aom_highbd_10_masked_sub_pixel_variance128x128)
2029 HIGHBD_MBFP(BLOCK_128X64, aom_highbd_masked_sad128x64_bits10,
2030 aom_highbd_10_masked_sub_pixel_variance128x64)
2031 HIGHBD_MBFP(BLOCK_64X128, aom_highbd_masked_sad64x128_bits10,
2032 aom_highbd_10_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01002033 HIGHBD_MBFP(BLOCK_64X64, aom_highbd_masked_sad64x64_bits10,
2034 aom_highbd_10_masked_sub_pixel_variance64x64)
2035 HIGHBD_MBFP(BLOCK_64X32, aom_highbd_masked_sad64x32_bits10,
2036 aom_highbd_10_masked_sub_pixel_variance64x32)
2037 HIGHBD_MBFP(BLOCK_32X64, aom_highbd_masked_sad32x64_bits10,
2038 aom_highbd_10_masked_sub_pixel_variance32x64)
2039 HIGHBD_MBFP(BLOCK_32X32, aom_highbd_masked_sad32x32_bits10,
2040 aom_highbd_10_masked_sub_pixel_variance32x32)
2041 HIGHBD_MBFP(BLOCK_32X16, aom_highbd_masked_sad32x16_bits10,
2042 aom_highbd_10_masked_sub_pixel_variance32x16)
2043 HIGHBD_MBFP(BLOCK_16X32, aom_highbd_masked_sad16x32_bits10,
2044 aom_highbd_10_masked_sub_pixel_variance16x32)
2045 HIGHBD_MBFP(BLOCK_16X16, aom_highbd_masked_sad16x16_bits10,
2046 aom_highbd_10_masked_sub_pixel_variance16x16)
2047 HIGHBD_MBFP(BLOCK_8X16, aom_highbd_masked_sad8x16_bits10,
2048 aom_highbd_10_masked_sub_pixel_variance8x16)
2049 HIGHBD_MBFP(BLOCK_16X8, aom_highbd_masked_sad16x8_bits10,
2050 aom_highbd_10_masked_sub_pixel_variance16x8)
2051 HIGHBD_MBFP(BLOCK_8X8, aom_highbd_masked_sad8x8_bits10,
2052 aom_highbd_10_masked_sub_pixel_variance8x8)
2053 HIGHBD_MBFP(BLOCK_4X8, aom_highbd_masked_sad4x8_bits10,
2054 aom_highbd_10_masked_sub_pixel_variance4x8)
2055 HIGHBD_MBFP(BLOCK_8X4, aom_highbd_masked_sad8x4_bits10,
2056 aom_highbd_10_masked_sub_pixel_variance8x4)
2057 HIGHBD_MBFP(BLOCK_4X4, aom_highbd_masked_sad4x4_bits10,
2058 aom_highbd_10_masked_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002059 HIGHBD_MBFP(BLOCK_64X16, aom_highbd_masked_sad64x16_bits10,
2060 aom_highbd_10_masked_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002061 HIGHBD_MBFP(BLOCK_16X64, aom_highbd_masked_sad16x64_bits10,
2062 aom_highbd_10_masked_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002063 HIGHBD_MBFP(BLOCK_32X8, aom_highbd_masked_sad32x8_bits10,
2064 aom_highbd_10_masked_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002065 HIGHBD_MBFP(BLOCK_8X32, aom_highbd_masked_sad8x32_bits10,
2066 aom_highbd_10_masked_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002067 HIGHBD_MBFP(BLOCK_16X4, aom_highbd_masked_sad16x4_bits10,
2068 aom_highbd_10_masked_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002069 HIGHBD_MBFP(BLOCK_4X16, aom_highbd_masked_sad4x16_bits10,
2070 aom_highbd_10_masked_sub_pixel_variance4x16)
Yaowu Xuf883b422016-08-30 14:01:10 -07002071 HIGHBD_OBFP(BLOCK_128X128, aom_highbd_obmc_sad128x128_bits10,
2072 aom_highbd_10_obmc_variance128x128,
2073 aom_highbd_10_obmc_sub_pixel_variance128x128)
2074 HIGHBD_OBFP(BLOCK_128X64, aom_highbd_obmc_sad128x64_bits10,
2075 aom_highbd_10_obmc_variance128x64,
2076 aom_highbd_10_obmc_sub_pixel_variance128x64)
2077 HIGHBD_OBFP(BLOCK_64X128, aom_highbd_obmc_sad64x128_bits10,
2078 aom_highbd_10_obmc_variance64x128,
2079 aom_highbd_10_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07002080 HIGHBD_OBFP(BLOCK_64X64, aom_highbd_obmc_sad64x64_bits10,
2081 aom_highbd_10_obmc_variance64x64,
2082 aom_highbd_10_obmc_sub_pixel_variance64x64)
2083 HIGHBD_OBFP(BLOCK_64X32, aom_highbd_obmc_sad64x32_bits10,
2084 aom_highbd_10_obmc_variance64x32,
2085 aom_highbd_10_obmc_sub_pixel_variance64x32)
2086 HIGHBD_OBFP(BLOCK_32X64, aom_highbd_obmc_sad32x64_bits10,
2087 aom_highbd_10_obmc_variance32x64,
2088 aom_highbd_10_obmc_sub_pixel_variance32x64)
2089 HIGHBD_OBFP(BLOCK_32X32, aom_highbd_obmc_sad32x32_bits10,
2090 aom_highbd_10_obmc_variance32x32,
2091 aom_highbd_10_obmc_sub_pixel_variance32x32)
2092 HIGHBD_OBFP(BLOCK_32X16, aom_highbd_obmc_sad32x16_bits10,
2093 aom_highbd_10_obmc_variance32x16,
2094 aom_highbd_10_obmc_sub_pixel_variance32x16)
2095 HIGHBD_OBFP(BLOCK_16X32, aom_highbd_obmc_sad16x32_bits10,
2096 aom_highbd_10_obmc_variance16x32,
2097 aom_highbd_10_obmc_sub_pixel_variance16x32)
2098 HIGHBD_OBFP(BLOCK_16X16, aom_highbd_obmc_sad16x16_bits10,
2099 aom_highbd_10_obmc_variance16x16,
2100 aom_highbd_10_obmc_sub_pixel_variance16x16)
2101 HIGHBD_OBFP(BLOCK_8X16, aom_highbd_obmc_sad8x16_bits10,
2102 aom_highbd_10_obmc_variance8x16,
2103 aom_highbd_10_obmc_sub_pixel_variance8x16)
2104 HIGHBD_OBFP(BLOCK_16X8, aom_highbd_obmc_sad16x8_bits10,
2105 aom_highbd_10_obmc_variance16x8,
2106 aom_highbd_10_obmc_sub_pixel_variance16x8)
2107 HIGHBD_OBFP(BLOCK_8X8, aom_highbd_obmc_sad8x8_bits10,
2108 aom_highbd_10_obmc_variance8x8,
2109 aom_highbd_10_obmc_sub_pixel_variance8x8)
2110 HIGHBD_OBFP(BLOCK_4X8, aom_highbd_obmc_sad4x8_bits10,
2111 aom_highbd_10_obmc_variance4x8,
2112 aom_highbd_10_obmc_sub_pixel_variance4x8)
2113 HIGHBD_OBFP(BLOCK_8X4, aom_highbd_obmc_sad8x4_bits10,
2114 aom_highbd_10_obmc_variance8x4,
2115 aom_highbd_10_obmc_sub_pixel_variance8x4)
2116 HIGHBD_OBFP(BLOCK_4X4, aom_highbd_obmc_sad4x4_bits10,
2117 aom_highbd_10_obmc_variance4x4,
2118 aom_highbd_10_obmc_sub_pixel_variance4x4)
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01002119
Rupert Swarbrick72678572017-08-02 12:05:26 +01002120 HIGHBD_OBFP(BLOCK_64X16, aom_highbd_obmc_sad64x16_bits10,
2121 aom_highbd_10_obmc_variance64x16,
2122 aom_highbd_10_obmc_sub_pixel_variance64x16)
2123
2124 HIGHBD_OBFP(BLOCK_16X64, aom_highbd_obmc_sad16x64_bits10,
2125 aom_highbd_10_obmc_variance16x64,
2126 aom_highbd_10_obmc_sub_pixel_variance16x64)
2127
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002128 HIGHBD_OBFP(BLOCK_32X8, aom_highbd_obmc_sad32x8_bits10,
2129 aom_highbd_10_obmc_variance32x8,
2130 aom_highbd_10_obmc_sub_pixel_variance32x8)
2131
2132 HIGHBD_OBFP(BLOCK_8X32, aom_highbd_obmc_sad8x32_bits10,
2133 aom_highbd_10_obmc_variance8x32,
2134 aom_highbd_10_obmc_sub_pixel_variance8x32)
2135
2136 HIGHBD_OBFP(BLOCK_16X4, aom_highbd_obmc_sad16x4_bits10,
2137 aom_highbd_10_obmc_variance16x4,
2138 aom_highbd_10_obmc_sub_pixel_variance16x4)
2139
2140 HIGHBD_OBFP(BLOCK_4X16, aom_highbd_obmc_sad4x16_bits10,
2141 aom_highbd_10_obmc_variance4x16,
2142 aom_highbd_10_obmc_sub_pixel_variance4x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002143 break;
2144
Yaowu Xuf883b422016-08-30 14:01:10 -07002145 case AOM_BITS_12:
Cheng Chenbf3d4962017-11-01 14:48:52 -07002146 HIGHBD_BFP(BLOCK_64X16, aom_highbd_sad64x16_bits12,
2147 aom_highbd_sad64x16_avg_bits12, aom_highbd_12_variance64x16,
2148 aom_highbd_12_sub_pixel_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002149 aom_highbd_12_sub_pixel_avg_variance64x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002150 aom_highbd_sad64x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002151 aom_highbd_dist_wtd_sad64x16_avg_bits12,
2152 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002153
2154 HIGHBD_BFP(BLOCK_16X64, aom_highbd_sad16x64_bits12,
2155 aom_highbd_sad16x64_avg_bits12, aom_highbd_12_variance16x64,
2156 aom_highbd_12_sub_pixel_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002157 aom_highbd_12_sub_pixel_avg_variance16x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002158 aom_highbd_sad16x64x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002159 aom_highbd_dist_wtd_sad16x64_avg_bits12,
2160 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002161
2162 HIGHBD_BFP(BLOCK_32X8, aom_highbd_sad32x8_bits12,
2163 aom_highbd_sad32x8_avg_bits12, aom_highbd_12_variance32x8,
2164 aom_highbd_12_sub_pixel_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002165 aom_highbd_12_sub_pixel_avg_variance32x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002166 aom_highbd_sad32x8x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002167 aom_highbd_dist_wtd_sad32x8_avg_bits12,
2168 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002169
2170 HIGHBD_BFP(BLOCK_8X32, aom_highbd_sad8x32_bits12,
2171 aom_highbd_sad8x32_avg_bits12, aom_highbd_12_variance8x32,
2172 aom_highbd_12_sub_pixel_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002173 aom_highbd_12_sub_pixel_avg_variance8x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002174 aom_highbd_sad8x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002175 aom_highbd_dist_wtd_sad8x32_avg_bits12,
2176 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002177
2178 HIGHBD_BFP(BLOCK_16X4, aom_highbd_sad16x4_bits12,
2179 aom_highbd_sad16x4_avg_bits12, aom_highbd_12_variance16x4,
2180 aom_highbd_12_sub_pixel_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002181 aom_highbd_12_sub_pixel_avg_variance16x4,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002182 aom_highbd_sad16x4x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002183 aom_highbd_dist_wtd_sad16x4_avg_bits12,
2184 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002185
2186 HIGHBD_BFP(BLOCK_4X16, aom_highbd_sad4x16_bits12,
2187 aom_highbd_sad4x16_avg_bits12, aom_highbd_12_variance4x16,
2188 aom_highbd_12_sub_pixel_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002189 aom_highbd_12_sub_pixel_avg_variance4x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002190 aom_highbd_sad4x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002191 aom_highbd_dist_wtd_sad4x16_avg_bits12,
2192 aom_highbd_12_dist_wtd_sub_pixel_avg_variance4x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002193
2194 HIGHBD_BFP(BLOCK_32X16, aom_highbd_sad32x16_bits12,
2195 aom_highbd_sad32x16_avg_bits12, aom_highbd_12_variance32x16,
2196 aom_highbd_12_sub_pixel_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002197 aom_highbd_12_sub_pixel_avg_variance32x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002198 aom_highbd_sad32x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002199 aom_highbd_dist_wtd_sad32x16_avg_bits12,
2200 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002201
2202 HIGHBD_BFP(BLOCK_16X32, aom_highbd_sad16x32_bits12,
2203 aom_highbd_sad16x32_avg_bits12, aom_highbd_12_variance16x32,
2204 aom_highbd_12_sub_pixel_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002205 aom_highbd_12_sub_pixel_avg_variance16x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002206 aom_highbd_sad16x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002207 aom_highbd_dist_wtd_sad16x32_avg_bits12,
2208 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002209
2210 HIGHBD_BFP(BLOCK_64X32, aom_highbd_sad64x32_bits12,
2211 aom_highbd_sad64x32_avg_bits12, aom_highbd_12_variance64x32,
2212 aom_highbd_12_sub_pixel_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002213 aom_highbd_12_sub_pixel_avg_variance64x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002214 aom_highbd_sad64x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002215 aom_highbd_dist_wtd_sad64x32_avg_bits12,
2216 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002217
2218 HIGHBD_BFP(BLOCK_32X64, aom_highbd_sad32x64_bits12,
2219 aom_highbd_sad32x64_avg_bits12, aom_highbd_12_variance32x64,
2220 aom_highbd_12_sub_pixel_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002221 aom_highbd_12_sub_pixel_avg_variance32x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002222 aom_highbd_sad32x64x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002223 aom_highbd_dist_wtd_sad32x64_avg_bits12,
2224 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002225
2226 HIGHBD_BFP(BLOCK_32X32, aom_highbd_sad32x32_bits12,
2227 aom_highbd_sad32x32_avg_bits12, aom_highbd_12_variance32x32,
2228 aom_highbd_12_sub_pixel_variance32x32,
2229 aom_highbd_12_sub_pixel_avg_variance32x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002230 aom_highbd_sad32x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002231 aom_highbd_dist_wtd_sad32x32_avg_bits12,
2232 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002233
2234 HIGHBD_BFP(BLOCK_64X64, aom_highbd_sad64x64_bits12,
2235 aom_highbd_sad64x64_avg_bits12, aom_highbd_12_variance64x64,
2236 aom_highbd_12_sub_pixel_variance64x64,
2237 aom_highbd_12_sub_pixel_avg_variance64x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002238 aom_highbd_sad64x64x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002239 aom_highbd_dist_wtd_sad64x64_avg_bits12,
2240 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002241
2242 HIGHBD_BFP(BLOCK_16X16, aom_highbd_sad16x16_bits12,
2243 aom_highbd_sad16x16_avg_bits12, aom_highbd_12_variance16x16,
2244 aom_highbd_12_sub_pixel_variance16x16,
2245 aom_highbd_12_sub_pixel_avg_variance16x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002246 aom_highbd_sad16x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002247 aom_highbd_dist_wtd_sad16x16_avg_bits12,
2248 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002249
2250 HIGHBD_BFP(BLOCK_16X8, aom_highbd_sad16x8_bits12,
2251 aom_highbd_sad16x8_avg_bits12, aom_highbd_12_variance16x8,
2252 aom_highbd_12_sub_pixel_variance16x8,
2253 aom_highbd_12_sub_pixel_avg_variance16x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002254 aom_highbd_sad16x8x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002255 aom_highbd_dist_wtd_sad16x8_avg_bits12,
2256 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002257
2258 HIGHBD_BFP(BLOCK_8X16, aom_highbd_sad8x16_bits12,
2259 aom_highbd_sad8x16_avg_bits12, aom_highbd_12_variance8x16,
2260 aom_highbd_12_sub_pixel_variance8x16,
2261 aom_highbd_12_sub_pixel_avg_variance8x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002262 aom_highbd_sad8x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002263 aom_highbd_dist_wtd_sad8x16_avg_bits12,
2264 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002265
2266 HIGHBD_BFP(
2267 BLOCK_8X8, aom_highbd_sad8x8_bits12, aom_highbd_sad8x8_avg_bits12,
2268 aom_highbd_12_variance8x8, aom_highbd_12_sub_pixel_variance8x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002269 aom_highbd_12_sub_pixel_avg_variance8x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002270 aom_highbd_sad8x8x4d_bits12, aom_highbd_dist_wtd_sad8x8_avg_bits12,
2271 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002272
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002273 HIGHBD_BFP(
2274 BLOCK_8X4, aom_highbd_sad8x4_bits12, aom_highbd_sad8x4_avg_bits12,
2275 aom_highbd_12_variance8x4, aom_highbd_12_sub_pixel_variance8x4,
2276 aom_highbd_12_sub_pixel_avg_variance8x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002277 aom_highbd_sad8x4x4d_bits12, aom_highbd_dist_wtd_sad8x4_avg_bits12,
2278 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002279
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002280 HIGHBD_BFP(
2281 BLOCK_4X8, aom_highbd_sad4x8_bits12, aom_highbd_sad4x8_avg_bits12,
2282 aom_highbd_12_variance4x8, aom_highbd_12_sub_pixel_variance4x8,
2283 aom_highbd_12_sub_pixel_avg_variance4x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002284 aom_highbd_sad4x8x4d_bits12, aom_highbd_dist_wtd_sad4x8_avg_bits12,
2285 aom_highbd_12_dist_wtd_sub_pixel_avg_variance4x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002286
2287 HIGHBD_BFP(
2288 BLOCK_4X4, aom_highbd_sad4x4_bits12, aom_highbd_sad4x4_avg_bits12,
2289 aom_highbd_12_variance4x4, aom_highbd_12_sub_pixel_variance4x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002290 aom_highbd_12_sub_pixel_avg_variance4x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002291 aom_highbd_sad4x4x4d_bits12, aom_highbd_dist_wtd_sad4x4_avg_bits12,
2292 aom_highbd_12_dist_wtd_sub_pixel_avg_variance4x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002293
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002294 HIGHBD_BFP(BLOCK_128X128, aom_highbd_sad128x128_bits12,
2295 aom_highbd_sad128x128_avg_bits12,
2296 aom_highbd_12_variance128x128,
2297 aom_highbd_12_sub_pixel_variance128x128,
2298 aom_highbd_12_sub_pixel_avg_variance128x128,
2299 aom_highbd_sad128x128x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002300 aom_highbd_dist_wtd_sad128x128_avg_bits12,
2301 aom_highbd_12_dist_wtd_sub_pixel_avg_variance128x128);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002302
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002303 HIGHBD_BFP(BLOCK_128X64, aom_highbd_sad128x64_bits12,
2304 aom_highbd_sad128x64_avg_bits12,
2305 aom_highbd_12_variance128x64,
2306 aom_highbd_12_sub_pixel_variance128x64,
2307 aom_highbd_12_sub_pixel_avg_variance128x64,
2308 aom_highbd_sad128x64x4d_bits12,
2309 aom_highbd_dist_wtd_sad128x64_avg_bits12,
2310 aom_highbd_12_dist_wtd_sub_pixel_avg_variance128x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002311
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002312 HIGHBD_BFP(BLOCK_64X128, aom_highbd_sad64x128_bits12,
2313 aom_highbd_sad64x128_avg_bits12,
2314 aom_highbd_12_variance64x128,
2315 aom_highbd_12_sub_pixel_variance64x128,
2316 aom_highbd_12_sub_pixel_avg_variance64x128,
2317 aom_highbd_sad64x128x4d_bits12,
2318 aom_highbd_dist_wtd_sad64x128_avg_bits12,
2319 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x128);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002320
David Barkerf19f35f2017-05-22 16:33:22 +01002321 HIGHBD_MBFP(BLOCK_128X128, aom_highbd_masked_sad128x128_bits12,
2322 aom_highbd_12_masked_sub_pixel_variance128x128)
2323 HIGHBD_MBFP(BLOCK_128X64, aom_highbd_masked_sad128x64_bits12,
2324 aom_highbd_12_masked_sub_pixel_variance128x64)
2325 HIGHBD_MBFP(BLOCK_64X128, aom_highbd_masked_sad64x128_bits12,
2326 aom_highbd_12_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01002327 HIGHBD_MBFP(BLOCK_64X64, aom_highbd_masked_sad64x64_bits12,
2328 aom_highbd_12_masked_sub_pixel_variance64x64)
2329 HIGHBD_MBFP(BLOCK_64X32, aom_highbd_masked_sad64x32_bits12,
2330 aom_highbd_12_masked_sub_pixel_variance64x32)
2331 HIGHBD_MBFP(BLOCK_32X64, aom_highbd_masked_sad32x64_bits12,
2332 aom_highbd_12_masked_sub_pixel_variance32x64)
2333 HIGHBD_MBFP(BLOCK_32X32, aom_highbd_masked_sad32x32_bits12,
2334 aom_highbd_12_masked_sub_pixel_variance32x32)
2335 HIGHBD_MBFP(BLOCK_32X16, aom_highbd_masked_sad32x16_bits12,
2336 aom_highbd_12_masked_sub_pixel_variance32x16)
2337 HIGHBD_MBFP(BLOCK_16X32, aom_highbd_masked_sad16x32_bits12,
2338 aom_highbd_12_masked_sub_pixel_variance16x32)
2339 HIGHBD_MBFP(BLOCK_16X16, aom_highbd_masked_sad16x16_bits12,
2340 aom_highbd_12_masked_sub_pixel_variance16x16)
2341 HIGHBD_MBFP(BLOCK_8X16, aom_highbd_masked_sad8x16_bits12,
2342 aom_highbd_12_masked_sub_pixel_variance8x16)
2343 HIGHBD_MBFP(BLOCK_16X8, aom_highbd_masked_sad16x8_bits12,
2344 aom_highbd_12_masked_sub_pixel_variance16x8)
2345 HIGHBD_MBFP(BLOCK_8X8, aom_highbd_masked_sad8x8_bits12,
2346 aom_highbd_12_masked_sub_pixel_variance8x8)
2347 HIGHBD_MBFP(BLOCK_4X8, aom_highbd_masked_sad4x8_bits12,
2348 aom_highbd_12_masked_sub_pixel_variance4x8)
2349 HIGHBD_MBFP(BLOCK_8X4, aom_highbd_masked_sad8x4_bits12,
2350 aom_highbd_12_masked_sub_pixel_variance8x4)
2351 HIGHBD_MBFP(BLOCK_4X4, aom_highbd_masked_sad4x4_bits12,
2352 aom_highbd_12_masked_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002353 HIGHBD_MBFP(BLOCK_64X16, aom_highbd_masked_sad64x16_bits12,
2354 aom_highbd_12_masked_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002355 HIGHBD_MBFP(BLOCK_16X64, aom_highbd_masked_sad16x64_bits12,
2356 aom_highbd_12_masked_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002357 HIGHBD_MBFP(BLOCK_32X8, aom_highbd_masked_sad32x8_bits12,
2358 aom_highbd_12_masked_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002359 HIGHBD_MBFP(BLOCK_8X32, aom_highbd_masked_sad8x32_bits12,
2360 aom_highbd_12_masked_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002361 HIGHBD_MBFP(BLOCK_16X4, aom_highbd_masked_sad16x4_bits12,
2362 aom_highbd_12_masked_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002363 HIGHBD_MBFP(BLOCK_4X16, aom_highbd_masked_sad4x16_bits12,
2364 aom_highbd_12_masked_sub_pixel_variance4x16)
Yaowu Xuf883b422016-08-30 14:01:10 -07002365 HIGHBD_OBFP(BLOCK_128X128, aom_highbd_obmc_sad128x128_bits12,
2366 aom_highbd_12_obmc_variance128x128,
2367 aom_highbd_12_obmc_sub_pixel_variance128x128)
2368 HIGHBD_OBFP(BLOCK_128X64, aom_highbd_obmc_sad128x64_bits12,
2369 aom_highbd_12_obmc_variance128x64,
2370 aom_highbd_12_obmc_sub_pixel_variance128x64)
2371 HIGHBD_OBFP(BLOCK_64X128, aom_highbd_obmc_sad64x128_bits12,
2372 aom_highbd_12_obmc_variance64x128,
2373 aom_highbd_12_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07002374 HIGHBD_OBFP(BLOCK_64X64, aom_highbd_obmc_sad64x64_bits12,
2375 aom_highbd_12_obmc_variance64x64,
2376 aom_highbd_12_obmc_sub_pixel_variance64x64)
2377 HIGHBD_OBFP(BLOCK_64X32, aom_highbd_obmc_sad64x32_bits12,
2378 aom_highbd_12_obmc_variance64x32,
2379 aom_highbd_12_obmc_sub_pixel_variance64x32)
2380 HIGHBD_OBFP(BLOCK_32X64, aom_highbd_obmc_sad32x64_bits12,
2381 aom_highbd_12_obmc_variance32x64,
2382 aom_highbd_12_obmc_sub_pixel_variance32x64)
2383 HIGHBD_OBFP(BLOCK_32X32, aom_highbd_obmc_sad32x32_bits12,
2384 aom_highbd_12_obmc_variance32x32,
2385 aom_highbd_12_obmc_sub_pixel_variance32x32)
2386 HIGHBD_OBFP(BLOCK_32X16, aom_highbd_obmc_sad32x16_bits12,
2387 aom_highbd_12_obmc_variance32x16,
2388 aom_highbd_12_obmc_sub_pixel_variance32x16)
2389 HIGHBD_OBFP(BLOCK_16X32, aom_highbd_obmc_sad16x32_bits12,
2390 aom_highbd_12_obmc_variance16x32,
2391 aom_highbd_12_obmc_sub_pixel_variance16x32)
2392 HIGHBD_OBFP(BLOCK_16X16, aom_highbd_obmc_sad16x16_bits12,
2393 aom_highbd_12_obmc_variance16x16,
2394 aom_highbd_12_obmc_sub_pixel_variance16x16)
2395 HIGHBD_OBFP(BLOCK_8X16, aom_highbd_obmc_sad8x16_bits12,
2396 aom_highbd_12_obmc_variance8x16,
2397 aom_highbd_12_obmc_sub_pixel_variance8x16)
2398 HIGHBD_OBFP(BLOCK_16X8, aom_highbd_obmc_sad16x8_bits12,
2399 aom_highbd_12_obmc_variance16x8,
2400 aom_highbd_12_obmc_sub_pixel_variance16x8)
2401 HIGHBD_OBFP(BLOCK_8X8, aom_highbd_obmc_sad8x8_bits12,
2402 aom_highbd_12_obmc_variance8x8,
2403 aom_highbd_12_obmc_sub_pixel_variance8x8)
2404 HIGHBD_OBFP(BLOCK_4X8, aom_highbd_obmc_sad4x8_bits12,
2405 aom_highbd_12_obmc_variance4x8,
2406 aom_highbd_12_obmc_sub_pixel_variance4x8)
2407 HIGHBD_OBFP(BLOCK_8X4, aom_highbd_obmc_sad8x4_bits12,
2408 aom_highbd_12_obmc_variance8x4,
2409 aom_highbd_12_obmc_sub_pixel_variance8x4)
2410 HIGHBD_OBFP(BLOCK_4X4, aom_highbd_obmc_sad4x4_bits12,
2411 aom_highbd_12_obmc_variance4x4,
2412 aom_highbd_12_obmc_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002413 HIGHBD_OBFP(BLOCK_64X16, aom_highbd_obmc_sad64x16_bits12,
2414 aom_highbd_12_obmc_variance64x16,
2415 aom_highbd_12_obmc_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002416 HIGHBD_OBFP(BLOCK_16X64, aom_highbd_obmc_sad16x64_bits12,
2417 aom_highbd_12_obmc_variance16x64,
2418 aom_highbd_12_obmc_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002419 HIGHBD_OBFP(BLOCK_32X8, aom_highbd_obmc_sad32x8_bits12,
2420 aom_highbd_12_obmc_variance32x8,
2421 aom_highbd_12_obmc_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002422 HIGHBD_OBFP(BLOCK_8X32, aom_highbd_obmc_sad8x32_bits12,
2423 aom_highbd_12_obmc_variance8x32,
2424 aom_highbd_12_obmc_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002425 HIGHBD_OBFP(BLOCK_16X4, aom_highbd_obmc_sad16x4_bits12,
2426 aom_highbd_12_obmc_variance16x4,
2427 aom_highbd_12_obmc_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002428 HIGHBD_OBFP(BLOCK_4X16, aom_highbd_obmc_sad4x16_bits12,
2429 aom_highbd_12_obmc_variance4x16,
2430 aom_highbd_12_obmc_sub_pixel_variance4x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002431 break;
2432
2433 default:
2434 assert(0 &&
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002435 "cm->seq_params.bit_depth should be AOM_BITS_8, "
Yaowu Xuf883b422016-08-30 14:01:10 -07002436 "AOM_BITS_10 or AOM_BITS_12");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002437 }
2438 }
2439}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002440
Yaowu Xuf883b422016-08-30 14:01:10 -07002441static void realloc_segmentation_maps(AV1_COMP *cpi) {
2442 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002443
2444 // Create the encoder segmentation map and set all entries to 0
Yaowu Xuf883b422016-08-30 14:01:10 -07002445 aom_free(cpi->segmentation_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002446 CHECK_MEM_ERROR(cm, cpi->segmentation_map,
Yaowu Xuf883b422016-08-30 14:01:10 -07002447 aom_calloc(cm->mi_rows * cm->mi_cols, 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002448
2449 // Create a map used for cyclic background refresh.
Yaowu Xuf883b422016-08-30 14:01:10 -07002450 if (cpi->cyclic_refresh) av1_cyclic_refresh_free(cpi->cyclic_refresh);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002451 CHECK_MEM_ERROR(cm, cpi->cyclic_refresh,
Yaowu Xuf883b422016-08-30 14:01:10 -07002452 av1_cyclic_refresh_alloc(cm->mi_rows, cm->mi_cols));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002453
2454 // Create a map used to mark inactive areas.
Yaowu Xuf883b422016-08-30 14:01:10 -07002455 aom_free(cpi->active_map.map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002456 CHECK_MEM_ERROR(cm, cpi->active_map.map,
Yaowu Xuf883b422016-08-30 14:01:10 -07002457 aom_calloc(cm->mi_rows * cm->mi_cols, 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002458}
2459
Yaowu Xuf883b422016-08-30 14:01:10 -07002460void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
2461 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002462 SequenceHeader *const seq_params = &cm->seq_params;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002463 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002464 RATE_CONTROL *const rc = &cpi->rc;
hui sud9a812b2017-07-06 14:34:37 -07002465 MACROBLOCK *const x = &cpi->td.mb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002466
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002467 if (seq_params->profile != oxcf->profile) seq_params->profile = oxcf->profile;
2468 seq_params->bit_depth = oxcf->bit_depth;
2469 seq_params->color_primaries = oxcf->color_primaries;
2470 seq_params->transfer_characteristics = oxcf->transfer_characteristics;
2471 seq_params->matrix_coefficients = oxcf->matrix_coefficients;
2472 seq_params->monochrome = oxcf->monochrome;
2473 seq_params->chroma_sample_position = oxcf->chroma_sample_position;
2474 seq_params->color_range = oxcf->color_range;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002475
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002476 assert(IMPLIES(seq_params->profile <= PROFILE_1,
2477 seq_params->bit_depth <= AOM_BITS_10));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002478
Andrey Norkin28e9ce22018-01-08 10:11:21 -08002479 cm->timing_info_present = oxcf->timing_info_present;
Andrey Norkin795ba872018-03-06 13:24:14 -08002480 cm->timing_info.num_units_in_display_tick =
2481 oxcf->timing_info.num_units_in_display_tick;
2482 cm->timing_info.time_scale = oxcf->timing_info.time_scale;
2483 cm->timing_info.equal_picture_interval =
2484 oxcf->timing_info.equal_picture_interval;
2485 cm->timing_info.num_ticks_per_picture =
2486 oxcf->timing_info.num_ticks_per_picture;
2487
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002488 seq_params->display_model_info_present_flag =
Andrey Norkin26495512018-06-20 17:13:11 -07002489 oxcf->display_model_info_present_flag;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002490 seq_params->decoder_model_info_present_flag =
Adrian Grangec56f6ec2018-05-31 14:19:32 -07002491 oxcf->decoder_model_info_present_flag;
Andrey Norkin795ba872018-03-06 13:24:14 -08002492 if (oxcf->decoder_model_info_present_flag) {
Andrey Norkin26495512018-06-20 17:13:11 -07002493 // set the decoder model parameters in schedule mode
Andrey Norkin795ba872018-03-06 13:24:14 -08002494 cm->buffer_model.num_units_in_decoding_tick =
2495 oxcf->buffer_model.num_units_in_decoding_tick;
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07002496 cm->buffer_removal_time_present = 1;
Andrey Norkin795ba872018-03-06 13:24:14 -08002497 set_aom_dec_model_info(&cm->buffer_model);
Andrey Norkin26495512018-06-20 17:13:11 -07002498 set_dec_model_op_parameters(&cm->op_params[0]);
2499 } else if (cm->timing_info_present &&
2500 cm->timing_info.equal_picture_interval &&
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002501 !seq_params->decoder_model_info_present_flag) {
Andrey Norkin26495512018-06-20 17:13:11 -07002502 // set the decoder model parameters in resource availability mode
2503 set_resource_availability_parameters(&cm->op_params[0]);
Andrey Norkinc7511de2018-06-22 12:31:06 -07002504 } else {
2505 cm->op_params[0].initial_display_delay =
2506 10; // Default value (not signaled)
Andrey Norkin795ba872018-03-06 13:24:14 -08002507 }
Andrey Norkin28e9ce22018-01-08 10:11:21 -08002508
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002509 update_film_grain_parameters(cpi, oxcf);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002510
Yaowu Xuc27fc142016-08-22 16:08:15 -07002511 cpi->oxcf = *oxcf;
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +01002512 cpi->common.options = oxcf->cfg;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002513 x->e_mbd.bd = (int)seq_params->bit_depth;
hui sud9a812b2017-07-06 14:34:37 -07002514 x->e_mbd.global_motion = cm->global_motion;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002515
Yaowu Xuf883b422016-08-30 14:01:10 -07002516 if ((oxcf->pass == 0) && (oxcf->rc_mode == AOM_Q)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002517 rc->baseline_gf_interval = FIXED_GF_INTERVAL;
2518 } else {
2519 rc->baseline_gf_interval = (MIN_GF_INTERVAL + MAX_GF_INTERVAL) / 2;
2520 }
2521
2522 cpi->refresh_last_frame = 1;
2523 cpi->refresh_golden_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002524 cpi->refresh_bwd_ref_frame = 0;
Zoe Liue9b15e22017-07-19 15:53:01 -07002525 cpi->refresh_alt2_ref_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002526
Debargha Mukherjee229fdc82018-03-10 07:45:33 -08002527 cm->refresh_frame_context = (oxcf->frame_parallel_decoding_mode)
2528 ? REFRESH_FRAME_CONTEXT_DISABLED
2529 : REFRESH_FRAME_CONTEXT_BACKWARD;
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01002530 if (oxcf->large_scale_tile)
James Zernf34dfc82018-02-23 16:53:33 -08002531 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_DISABLED;
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01002532
Alex Converse74ad0912017-07-18 10:22:58 -07002533 if (x->palette_buffer == NULL) {
hui sud9a812b2017-07-06 14:34:37 -07002534 CHECK_MEM_ERROR(cm, x->palette_buffer,
2535 aom_memalign(16, sizeof(*x->palette_buffer)));
2536 }
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002537
2538 if (x->tmp_conv_dst == NULL) {
2539 CHECK_MEM_ERROR(
2540 cm, x->tmp_conv_dst,
2541 aom_memalign(32, MAX_SB_SIZE * MAX_SB_SIZE * sizeof(*x->tmp_conv_dst)));
Urvang Joshie58f6ec2018-09-10 15:10:12 -07002542 x->e_mbd.tmp_conv_dst = x->tmp_conv_dst;
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002543 }
2544 for (int i = 0; i < 2; ++i) {
2545 if (x->tmp_obmc_bufs[i] == NULL) {
2546 CHECK_MEM_ERROR(cm, x->tmp_obmc_bufs[i],
wenyao.liu22d8ab32018-10-16 09:11:29 +08002547 aom_memalign(32, 2 * MAX_MB_PLANE * MAX_SB_SQUARE *
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002548 sizeof(*x->tmp_obmc_bufs[i])));
Urvang Joshie58f6ec2018-09-10 15:10:12 -07002549 x->e_mbd.tmp_obmc_bufs[i] = x->tmp_obmc_bufs[i];
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002550 }
2551 }
2552
Yaowu Xuf883b422016-08-30 14:01:10 -07002553 av1_reset_segment_features(cm);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07002554 set_high_precision_mv(cpi, 1, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002555
Yaowu Xuc27fc142016-08-22 16:08:15 -07002556 set_rc_buffer_sizes(rc, &cpi->oxcf);
2557
2558 // Under a configuration change, where maximum_buffer_size may change,
2559 // keep buffer level clipped to the maximum allowed buffer size.
Yaowu Xuf883b422016-08-30 14:01:10 -07002560 rc->bits_off_target = AOMMIN(rc->bits_off_target, rc->maximum_buffer_size);
2561 rc->buffer_level = AOMMIN(rc->buffer_level, rc->maximum_buffer_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002562
2563 // Set up frame rate and related parameters rate control values.
Yaowu Xuf883b422016-08-30 14:01:10 -07002564 av1_new_framerate(cpi, cpi->framerate);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002565
2566 // Set absolute upper and lower quality limits
2567 rc->worst_quality = cpi->oxcf.worst_allowed_q;
2568 rc->best_quality = cpi->oxcf.best_allowed_q;
2569
Urvang Joshib55cb5e2018-09-12 14:50:21 -07002570 cm->interp_filter = oxcf->large_scale_tile ? EIGHTTAP_REGULAR : SWITCHABLE;
Yue Chen5380cb52018-02-23 15:33:21 -08002571 cm->switchable_motion_mode = 1;
2572
Yaowu Xuc27fc142016-08-22 16:08:15 -07002573 if (cpi->oxcf.render_width > 0 && cpi->oxcf.render_height > 0) {
2574 cm->render_width = cpi->oxcf.render_width;
2575 cm->render_height = cpi->oxcf.render_height;
2576 } else {
2577 cm->render_width = cpi->oxcf.width;
2578 cm->render_height = cpi->oxcf.height;
2579 }
2580 cm->width = cpi->oxcf.width;
2581 cm->height = cpi->oxcf.height;
2582
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002583 int sb_size = seq_params->sb_size;
Urvang Joshie4530f82018-01-09 11:43:37 -08002584 // Superblock size should not be updated after the first key frame.
2585 if (!cpi->seq_params_locked) {
2586 set_sb_size(&cm->seq_params, select_sb_size(cpi));
2587 }
Dominic Symes917d6c02017-10-11 18:00:52 +02002588
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002589 if (cpi->initial_width || sb_size != seq_params->sb_size) {
Dominic Symes917d6c02017-10-11 18:00:52 +02002590 if (cm->width > cpi->initial_width || cm->height > cpi->initial_height ||
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002591 seq_params->sb_size != sb_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002592 av1_free_context_buffers(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002593 av1_free_pc_tree(&cpi->td, num_planes);
Cheng Chen46f30c72017-09-07 11:13:33 -07002594 alloc_compressor_data(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002595 realloc_segmentation_maps(cpi);
2596 cpi->initial_width = cpi->initial_height = 0;
2597 }
2598 }
2599 update_frame_size(cpi);
2600
2601 cpi->alt_ref_source = NULL;
2602 rc->is_src_frame_alt_ref = 0;
2603
Yaowu Xuc27fc142016-08-22 16:08:15 -07002604 rc->is_bwd_ref_frame = 0;
2605 rc->is_last_bipred_frame = 0;
2606 rc->is_bipred_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002607
Yaowu Xuc27fc142016-08-22 16:08:15 -07002608 set_tile_info(cpi);
2609
2610 cpi->ext_refresh_frame_flags_pending = 0;
2611 cpi->ext_refresh_frame_context_pending = 0;
2612
Yaowu Xuc27fc142016-08-22 16:08:15 -07002613 highbd_set_var_fns(cpi);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002614
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07002615 // Init sequence level coding tools
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07002616 // This should not be called after the first key frame.
2617 if (!cpi->seq_params_locked) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002618 seq_params->operating_points_cnt_minus_1 =
Adrian Grangec56f6ec2018-05-31 14:19:32 -07002619 cm->number_spatial_layers > 1 ? cm->number_spatial_layers - 1 : 0;
Andrey Norkin26495512018-06-20 17:13:11 -07002620 init_seq_coding_tools(&cm->seq_params, cm, oxcf);
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07002621 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002622}
2623
Yaowu Xuf883b422016-08-30 14:01:10 -07002624AV1_COMP *av1_create_compressor(AV1EncoderConfig *oxcf,
2625 BufferPool *const pool) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002626 unsigned int i;
Yaowu Xuf883b422016-08-30 14:01:10 -07002627 AV1_COMP *volatile const cpi = aom_memalign(32, sizeof(AV1_COMP));
2628 AV1_COMMON *volatile const cm = cpi != NULL ? &cpi->common : NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002629
2630 if (!cm) return NULL;
2631
Yaowu Xuf883b422016-08-30 14:01:10 -07002632 av1_zero(*cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002633
Wan-Teh Changa2fad3e2018-07-19 16:55:19 -07002634 // The jmp_buf is valid only for the duration of the function that calls
2635 // setjmp(). Therefore, this function must reset the 'setjmp' field to 0
2636 // before it returns.
Yaowu Xuc27fc142016-08-22 16:08:15 -07002637 if (setjmp(cm->error.jmp)) {
2638 cm->error.setjmp = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002639 av1_remove_compressor(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002640 return 0;
2641 }
2642
2643 cm->error.setjmp = 1;
Cheng Chen46f30c72017-09-07 11:13:33 -07002644 cm->alloc_mi = enc_alloc_mi;
2645 cm->free_mi = enc_free_mi;
2646 cm->setup_mi = enc_setup_mi;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002647
Angie Chianga5d96c42016-10-21 16:16:56 -07002648 CHECK_MEM_ERROR(cm, cm->fc,
2649 (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->fc)));
David Turner1bcefb32018-11-19 17:54:00 +00002650 CHECK_MEM_ERROR(
2651 cm, cm->default_frame_context,
2652 (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->default_frame_context)));
Angie Chianga5d96c42016-10-21 16:16:56 -07002653 memset(cm->fc, 0, sizeof(*cm->fc));
David Turner1bcefb32018-11-19 17:54:00 +00002654 memset(cm->default_frame_context, 0, sizeof(*cm->default_frame_context));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002655
2656 cpi->resize_state = 0;
2657 cpi->resize_avg_qp = 0;
2658 cpi->resize_buffer_underflow = 0;
Fergus Simpsonddc846e2017-04-24 18:09:13 -07002659
Yaowu Xuc27fc142016-08-22 16:08:15 -07002660 cpi->common.buffer_pool = pool;
2661
2662 init_config(cpi, oxcf);
Yaowu Xuf883b422016-08-30 14:01:10 -07002663 av1_rc_init(&cpi->oxcf, oxcf->pass, &cpi->rc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002664
David Turnerd2a592e2018-11-16 14:59:31 +00002665 cm->current_frame.frame_number = 0;
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07002666 cpi->seq_params_locked = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002667 cpi->partition_search_skippable_frame = 0;
2668 cpi->tile_data = NULL;
David Turnere7ebf902018-12-04 14:04:55 +00002669 cpi->last_show_frame_buf = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002670 realloc_segmentation_maps(cpi);
2671
Jingning Hanf050fc12018-03-09 14:53:33 -08002672 memset(cpi->nmv_costs, 0, sizeof(cpi->nmv_costs));
2673 memset(cpi->nmv_costs_hp, 0, sizeof(cpi->nmv_costs_hp));
James Zern01a9d702017-08-25 19:09:33 +00002674
Yaowu Xuc27fc142016-08-22 16:08:15 -07002675 for (i = 0; i < (sizeof(cpi->mbgraph_stats) / sizeof(cpi->mbgraph_stats[0]));
2676 i++) {
2677 CHECK_MEM_ERROR(
2678 cm, cpi->mbgraph_stats[i].mb_stats,
Yaowu Xuf883b422016-08-30 14:01:10 -07002679 aom_calloc(cm->MBs * sizeof(*cpi->mbgraph_stats[i].mb_stats), 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002680 }
2681
2682#if CONFIG_FP_MB_STATS
2683 cpi->use_fp_mb_stats = 0;
2684 if (cpi->use_fp_mb_stats) {
2685 // a place holder used to store the first pass mb stats in the first pass
2686 CHECK_MEM_ERROR(cm, cpi->twopass.frame_mb_stats_buf,
Yaowu Xuf883b422016-08-30 14:01:10 -07002687 aom_calloc(cm->MBs * sizeof(uint8_t), 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002688 } else {
2689 cpi->twopass.frame_mb_stats_buf = NULL;
2690 }
2691#endif
2692
2693 cpi->refresh_alt_ref_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002694
2695 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
2696#if CONFIG_INTERNAL_STATS
2697 cpi->b_calculate_blockiness = 1;
2698 cpi->b_calculate_consistency = 1;
2699 cpi->total_inconsistency = 0;
2700 cpi->psnr.worst = 100.0;
2701 cpi->worst_ssim = 100.0;
2702
2703 cpi->count = 0;
2704 cpi->bytes = 0;
Debargha Mukherjee0857e662019-01-04 16:22:09 -08002705#if CONFIG_SPEED_STATS
2706 cpi->tx_search_count = 0;
2707#endif // CONFIG_SPEED_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002708
2709 if (cpi->b_calculate_psnr) {
2710 cpi->total_sq_error = 0;
2711 cpi->total_samples = 0;
2712 cpi->tot_recode_hits = 0;
2713 cpi->summed_quality = 0;
2714 cpi->summed_weights = 0;
2715 }
2716
2717 cpi->fastssim.worst = 100.0;
2718 cpi->psnrhvs.worst = 100.0;
2719
2720 if (cpi->b_calculate_blockiness) {
2721 cpi->total_blockiness = 0;
2722 cpi->worst_blockiness = 0.0;
2723 }
2724
2725 if (cpi->b_calculate_consistency) {
2726 CHECK_MEM_ERROR(cm, cpi->ssim_vars,
Yaowu Xuf883b422016-08-30 14:01:10 -07002727 aom_malloc(sizeof(*cpi->ssim_vars) * 4 *
Yaowu Xuc27fc142016-08-22 16:08:15 -07002728 cpi->common.mi_rows * cpi->common.mi_cols));
2729 cpi->worst_consistency = 100.0;
2730 }
2731#endif
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08002732#if CONFIG_ENTROPY_STATS
2733 av1_zero(aggregate_fc);
2734#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002735
2736 cpi->first_time_stamp_ever = INT64_MAX;
2737
Jingning Hanf050fc12018-03-09 14:53:33 -08002738 cpi->td.mb.nmvcost[0] = &cpi->nmv_costs[0][MV_MAX];
2739 cpi->td.mb.nmvcost[1] = &cpi->nmv_costs[1][MV_MAX];
2740 cpi->td.mb.nmvcost_hp[0] = &cpi->nmv_costs_hp[0][MV_MAX];
2741 cpi->td.mb.nmvcost_hp[1] = &cpi->nmv_costs_hp[1][MV_MAX];
James Zern01a9d702017-08-25 19:09:33 +00002742
Yaowu Xuc27fc142016-08-22 16:08:15 -07002743#ifdef OUTPUT_YUV_SKINMAP
2744 yuv_skinmap_file = fopen("skinmap.yuv", "ab");
2745#endif
2746#ifdef OUTPUT_YUV_REC
2747 yuv_rec_file = fopen("rec.yuv", "wb");
2748#endif
2749
Yaowu Xuc27fc142016-08-22 16:08:15 -07002750 if (oxcf->pass == 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002751 av1_init_first_pass(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002752 } else if (oxcf->pass == 2) {
2753 const size_t packet_sz = sizeof(FIRSTPASS_STATS);
2754 const int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
2755
2756#if CONFIG_FP_MB_STATS
2757 if (cpi->use_fp_mb_stats) {
2758 const size_t psz = cpi->common.MBs * sizeof(uint8_t);
2759 const int ps = (int)(oxcf->firstpass_mb_stats_in.sz / psz);
2760
2761 cpi->twopass.firstpass_mb_stats.mb_stats_start =
2762 oxcf->firstpass_mb_stats_in.buf;
2763 cpi->twopass.firstpass_mb_stats.mb_stats_end =
2764 cpi->twopass.firstpass_mb_stats.mb_stats_start +
2765 (ps - 1) * cpi->common.MBs * sizeof(uint8_t);
2766 }
2767#endif
2768
2769 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
2770 cpi->twopass.stats_in = cpi->twopass.stats_in_start;
2771 cpi->twopass.stats_in_end = &cpi->twopass.stats_in[packets - 1];
2772
Yaowu Xuf883b422016-08-30 14:01:10 -07002773 av1_init_second_pass(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002774 }
2775
Jingning Hand064cf02017-06-01 10:00:39 -07002776 CHECK_MEM_ERROR(
2777 cm, cpi->td.mb.above_pred_buf,
Yue Chen1a799252018-03-01 16:47:41 -08002778 (uint8_t *)aom_memalign(16, MAX_MB_PLANE * MAX_SB_SQUARE *
Johannb0ef6ff2018-02-08 14:32:21 -08002779 sizeof(*cpi->td.mb.above_pred_buf)));
Jingning Hand064cf02017-06-01 10:00:39 -07002780 CHECK_MEM_ERROR(
2781 cm, cpi->td.mb.left_pred_buf,
Yue Chen1a799252018-03-01 16:47:41 -08002782 (uint8_t *)aom_memalign(16, MAX_MB_PLANE * MAX_SB_SQUARE *
Johannb0ef6ff2018-02-08 14:32:21 -08002783 sizeof(*cpi->td.mb.left_pred_buf)));
Jingning Hand064cf02017-06-01 10:00:39 -07002784
2785 CHECK_MEM_ERROR(cm, cpi->td.mb.wsrc_buf,
2786 (int32_t *)aom_memalign(
2787 16, MAX_SB_SQUARE * sizeof(*cpi->td.mb.wsrc_buf)));
2788
Ravi Chaudhary5d970f42018-09-25 11:25:32 +05302789#if CONFIG_COLLECT_INTER_MODE_RD_STATS
2790 CHECK_MEM_ERROR(
2791 cm, cpi->td.mb.inter_modes_info,
2792 (InterModesInfo *)aom_malloc(sizeof(*cpi->td.mb.inter_modes_info)));
2793#endif
2794
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05302795 for (int x = 0; x < 2; x++)
2796 for (int y = 0; y < 2; y++)
2797 CHECK_MEM_ERROR(
2798 cm, cpi->td.mb.hash_value_buffer[x][y],
2799 (uint32_t *)aom_malloc(AOM_BUFFER_SIZE_FOR_BLOCK_HASH *
2800 sizeof(*cpi->td.mb.hash_value_buffer[0][0])));
2801
2802 cpi->td.mb.g_crc_initialized = 0;
2803
Jingning Hand064cf02017-06-01 10:00:39 -07002804 CHECK_MEM_ERROR(cm, cpi->td.mb.mask_buf,
2805 (int32_t *)aom_memalign(
2806 16, MAX_SB_SQUARE * sizeof(*cpi->td.mb.mask_buf)));
2807
Yaowu Xuf883b422016-08-30 14:01:10 -07002808 av1_set_speed_features_framesize_independent(cpi);
2809 av1_set_speed_features_framesize_dependent(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002810
Yue Chen7cae98f2018-08-24 10:43:16 -07002811 for (int frame = 0; frame < MAX_LAG_BUFFERS; ++frame) {
2812 int mi_cols = ALIGN_POWER_OF_TWO(cm->mi_cols, MAX_MIB_SIZE_LOG2);
2813 int mi_rows = ALIGN_POWER_OF_TWO(cm->mi_rows, MAX_MIB_SIZE_LOG2);
2814
2815 CHECK_MEM_ERROR(cm, cpi->tpl_stats[frame].tpl_stats_ptr,
2816 aom_calloc(mi_rows * mi_cols,
2817 sizeof(*cpi->tpl_stats[frame].tpl_stats_ptr)));
2818 cpi->tpl_stats[frame].is_valid = 0;
2819 cpi->tpl_stats[frame].width = mi_cols;
2820 cpi->tpl_stats[frame].height = mi_rows;
2821 cpi->tpl_stats[frame].stride = mi_cols;
2822 cpi->tpl_stats[frame].mi_rows = cm->mi_rows;
2823 cpi->tpl_stats[frame].mi_cols = cm->mi_cols;
2824 }
2825
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002826#define BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX4DF, JSDAF, JSVAF) \
2827 cpi->fn_ptr[BT].sdf = SDF; \
2828 cpi->fn_ptr[BT].sdaf = SDAF; \
2829 cpi->fn_ptr[BT].vf = VF; \
2830 cpi->fn_ptr[BT].svf = SVF; \
2831 cpi->fn_ptr[BT].svaf = SVAF; \
2832 cpi->fn_ptr[BT].sdx4df = SDX4DF; \
2833 cpi->fn_ptr[BT].jsdaf = JSDAF; \
Cheng Chenf78632e2017-10-20 15:30:51 -07002834 cpi->fn_ptr[BT].jsvaf = JSVAF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002835
Cheng Chenf78632e2017-10-20 15:30:51 -07002836 BFP(BLOCK_4X16, aom_sad4x16, aom_sad4x16_avg, aom_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002837 aom_sub_pixel_variance4x16, aom_sub_pixel_avg_variance4x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002838 aom_sad4x16x4d, aom_dist_wtd_sad4x16_avg,
2839 aom_dist_wtd_sub_pixel_avg_variance4x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002840
2841 BFP(BLOCK_16X4, aom_sad16x4, aom_sad16x4_avg, aom_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002842 aom_sub_pixel_variance16x4, aom_sub_pixel_avg_variance16x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002843 aom_sad16x4x4d, aom_dist_wtd_sad16x4_avg,
2844 aom_dist_wtd_sub_pixel_avg_variance16x4)
Cheng Chenf78632e2017-10-20 15:30:51 -07002845
2846 BFP(BLOCK_8X32, aom_sad8x32, aom_sad8x32_avg, aom_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002847 aom_sub_pixel_variance8x32, aom_sub_pixel_avg_variance8x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002848 aom_sad8x32x4d, aom_dist_wtd_sad8x32_avg,
2849 aom_dist_wtd_sub_pixel_avg_variance8x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002850
2851 BFP(BLOCK_32X8, aom_sad32x8, aom_sad32x8_avg, aom_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002852 aom_sub_pixel_variance32x8, aom_sub_pixel_avg_variance32x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002853 aom_sad32x8x4d, aom_dist_wtd_sad32x8_avg,
2854 aom_dist_wtd_sub_pixel_avg_variance32x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002855
2856 BFP(BLOCK_16X64, aom_sad16x64, aom_sad16x64_avg, aom_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002857 aom_sub_pixel_variance16x64, aom_sub_pixel_avg_variance16x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002858 aom_sad16x64x4d, aom_dist_wtd_sad16x64_avg,
2859 aom_dist_wtd_sub_pixel_avg_variance16x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002860
2861 BFP(BLOCK_64X16, aom_sad64x16, aom_sad64x16_avg, aom_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002862 aom_sub_pixel_variance64x16, aom_sub_pixel_avg_variance64x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002863 aom_sad64x16x4d, aom_dist_wtd_sad64x16_avg,
2864 aom_dist_wtd_sub_pixel_avg_variance64x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002865
Cheng Chenf78632e2017-10-20 15:30:51 -07002866 BFP(BLOCK_128X128, aom_sad128x128, aom_sad128x128_avg, aom_variance128x128,
2867 aom_sub_pixel_variance128x128, aom_sub_pixel_avg_variance128x128,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002868 aom_sad128x128x4d, aom_dist_wtd_sad128x128_avg,
2869 aom_dist_wtd_sub_pixel_avg_variance128x128)
Cheng Chenf78632e2017-10-20 15:30:51 -07002870
2871 BFP(BLOCK_128X64, aom_sad128x64, aom_sad128x64_avg, aom_variance128x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002872 aom_sub_pixel_variance128x64, aom_sub_pixel_avg_variance128x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002873 aom_sad128x64x4d, aom_dist_wtd_sad128x64_avg,
2874 aom_dist_wtd_sub_pixel_avg_variance128x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002875
2876 BFP(BLOCK_64X128, aom_sad64x128, aom_sad64x128_avg, aom_variance64x128,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002877 aom_sub_pixel_variance64x128, aom_sub_pixel_avg_variance64x128,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002878 aom_sad64x128x4d, aom_dist_wtd_sad64x128_avg,
2879 aom_dist_wtd_sub_pixel_avg_variance64x128)
Cheng Chenf78632e2017-10-20 15:30:51 -07002880
2881 BFP(BLOCK_32X16, aom_sad32x16, aom_sad32x16_avg, aom_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002882 aom_sub_pixel_variance32x16, aom_sub_pixel_avg_variance32x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002883 aom_sad32x16x4d, aom_dist_wtd_sad32x16_avg,
2884 aom_dist_wtd_sub_pixel_avg_variance32x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002885
2886 BFP(BLOCK_16X32, aom_sad16x32, aom_sad16x32_avg, aom_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002887 aom_sub_pixel_variance16x32, aom_sub_pixel_avg_variance16x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002888 aom_sad16x32x4d, aom_dist_wtd_sad16x32_avg,
2889 aom_dist_wtd_sub_pixel_avg_variance16x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002890
2891 BFP(BLOCK_64X32, aom_sad64x32, aom_sad64x32_avg, aom_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002892 aom_sub_pixel_variance64x32, aom_sub_pixel_avg_variance64x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002893 aom_sad64x32x4d, aom_dist_wtd_sad64x32_avg,
2894 aom_dist_wtd_sub_pixel_avg_variance64x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002895
2896 BFP(BLOCK_32X64, aom_sad32x64, aom_sad32x64_avg, aom_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002897 aom_sub_pixel_variance32x64, aom_sub_pixel_avg_variance32x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002898 aom_sad32x64x4d, aom_dist_wtd_sad32x64_avg,
2899 aom_dist_wtd_sub_pixel_avg_variance32x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002900
2901 BFP(BLOCK_32X32, aom_sad32x32, aom_sad32x32_avg, aom_variance32x32,
2902 aom_sub_pixel_variance32x32, aom_sub_pixel_avg_variance32x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002903 aom_sad32x32x4d, aom_dist_wtd_sad32x32_avg,
2904 aom_dist_wtd_sub_pixel_avg_variance32x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002905
2906 BFP(BLOCK_64X64, aom_sad64x64, aom_sad64x64_avg, aom_variance64x64,
2907 aom_sub_pixel_variance64x64, aom_sub_pixel_avg_variance64x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002908 aom_sad64x64x4d, aom_dist_wtd_sad64x64_avg,
2909 aom_dist_wtd_sub_pixel_avg_variance64x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002910
2911 BFP(BLOCK_16X16, aom_sad16x16, aom_sad16x16_avg, aom_variance16x16,
2912 aom_sub_pixel_variance16x16, aom_sub_pixel_avg_variance16x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002913 aom_sad16x16x4d, aom_dist_wtd_sad16x16_avg,
2914 aom_dist_wtd_sub_pixel_avg_variance16x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002915
2916 BFP(BLOCK_16X8, aom_sad16x8, aom_sad16x8_avg, aom_variance16x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002917 aom_sub_pixel_variance16x8, aom_sub_pixel_avg_variance16x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002918 aom_sad16x8x4d, aom_dist_wtd_sad16x8_avg,
2919 aom_dist_wtd_sub_pixel_avg_variance16x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002920
2921 BFP(BLOCK_8X16, aom_sad8x16, aom_sad8x16_avg, aom_variance8x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002922 aom_sub_pixel_variance8x16, aom_sub_pixel_avg_variance8x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002923 aom_sad8x16x4d, aom_dist_wtd_sad8x16_avg,
2924 aom_dist_wtd_sub_pixel_avg_variance8x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002925
2926 BFP(BLOCK_8X8, aom_sad8x8, aom_sad8x8_avg, aom_variance8x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002927 aom_sub_pixel_variance8x8, aom_sub_pixel_avg_variance8x8, aom_sad8x8x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002928 aom_dist_wtd_sad8x8_avg, aom_dist_wtd_sub_pixel_avg_variance8x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002929
2930 BFP(BLOCK_8X4, aom_sad8x4, aom_sad8x4_avg, aom_variance8x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002931 aom_sub_pixel_variance8x4, aom_sub_pixel_avg_variance8x4, aom_sad8x4x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002932 aom_dist_wtd_sad8x4_avg, aom_dist_wtd_sub_pixel_avg_variance8x4)
Cheng Chenf78632e2017-10-20 15:30:51 -07002933
2934 BFP(BLOCK_4X8, aom_sad4x8, aom_sad4x8_avg, aom_variance4x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002935 aom_sub_pixel_variance4x8, aom_sub_pixel_avg_variance4x8, aom_sad4x8x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002936 aom_dist_wtd_sad4x8_avg, aom_dist_wtd_sub_pixel_avg_variance4x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002937
2938 BFP(BLOCK_4X4, aom_sad4x4, aom_sad4x4_avg, aom_variance4x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002939 aom_sub_pixel_variance4x4, aom_sub_pixel_avg_variance4x4, aom_sad4x4x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002940 aom_dist_wtd_sad4x4_avg, aom_dist_wtd_sub_pixel_avg_variance4x4)
Cheng Chenf78632e2017-10-20 15:30:51 -07002941
Yaowu Xuc27fc142016-08-22 16:08:15 -07002942#define OBFP(BT, OSDF, OVF, OSVF) \
2943 cpi->fn_ptr[BT].osdf = OSDF; \
2944 cpi->fn_ptr[BT].ovf = OVF; \
2945 cpi->fn_ptr[BT].osvf = OSVF;
2946
Yaowu Xuf883b422016-08-30 14:01:10 -07002947 OBFP(BLOCK_128X128, aom_obmc_sad128x128, aom_obmc_variance128x128,
2948 aom_obmc_sub_pixel_variance128x128)
2949 OBFP(BLOCK_128X64, aom_obmc_sad128x64, aom_obmc_variance128x64,
2950 aom_obmc_sub_pixel_variance128x64)
2951 OBFP(BLOCK_64X128, aom_obmc_sad64x128, aom_obmc_variance64x128,
2952 aom_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07002953 OBFP(BLOCK_64X64, aom_obmc_sad64x64, aom_obmc_variance64x64,
2954 aom_obmc_sub_pixel_variance64x64)
2955 OBFP(BLOCK_64X32, aom_obmc_sad64x32, aom_obmc_variance64x32,
2956 aom_obmc_sub_pixel_variance64x32)
2957 OBFP(BLOCK_32X64, aom_obmc_sad32x64, aom_obmc_variance32x64,
2958 aom_obmc_sub_pixel_variance32x64)
2959 OBFP(BLOCK_32X32, aom_obmc_sad32x32, aom_obmc_variance32x32,
2960 aom_obmc_sub_pixel_variance32x32)
2961 OBFP(BLOCK_32X16, aom_obmc_sad32x16, aom_obmc_variance32x16,
2962 aom_obmc_sub_pixel_variance32x16)
2963 OBFP(BLOCK_16X32, aom_obmc_sad16x32, aom_obmc_variance16x32,
2964 aom_obmc_sub_pixel_variance16x32)
2965 OBFP(BLOCK_16X16, aom_obmc_sad16x16, aom_obmc_variance16x16,
2966 aom_obmc_sub_pixel_variance16x16)
2967 OBFP(BLOCK_16X8, aom_obmc_sad16x8, aom_obmc_variance16x8,
2968 aom_obmc_sub_pixel_variance16x8)
2969 OBFP(BLOCK_8X16, aom_obmc_sad8x16, aom_obmc_variance8x16,
2970 aom_obmc_sub_pixel_variance8x16)
2971 OBFP(BLOCK_8X8, aom_obmc_sad8x8, aom_obmc_variance8x8,
2972 aom_obmc_sub_pixel_variance8x8)
2973 OBFP(BLOCK_4X8, aom_obmc_sad4x8, aom_obmc_variance4x8,
2974 aom_obmc_sub_pixel_variance4x8)
2975 OBFP(BLOCK_8X4, aom_obmc_sad8x4, aom_obmc_variance8x4,
2976 aom_obmc_sub_pixel_variance8x4)
2977 OBFP(BLOCK_4X4, aom_obmc_sad4x4, aom_obmc_variance4x4,
2978 aom_obmc_sub_pixel_variance4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002979 OBFP(BLOCK_4X16, aom_obmc_sad4x16, aom_obmc_variance4x16,
2980 aom_obmc_sub_pixel_variance4x16)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002981 OBFP(BLOCK_16X4, aom_obmc_sad16x4, aom_obmc_variance16x4,
2982 aom_obmc_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002983 OBFP(BLOCK_8X32, aom_obmc_sad8x32, aom_obmc_variance8x32,
2984 aom_obmc_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002985 OBFP(BLOCK_32X8, aom_obmc_sad32x8, aom_obmc_variance32x8,
2986 aom_obmc_sub_pixel_variance32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002987 OBFP(BLOCK_16X64, aom_obmc_sad16x64, aom_obmc_variance16x64,
2988 aom_obmc_sub_pixel_variance16x64)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002989 OBFP(BLOCK_64X16, aom_obmc_sad64x16, aom_obmc_variance64x16,
2990 aom_obmc_sub_pixel_variance64x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002991
David Barkerf19f35f2017-05-22 16:33:22 +01002992#define MBFP(BT, MCSDF, MCSVF) \
2993 cpi->fn_ptr[BT].msdf = MCSDF; \
2994 cpi->fn_ptr[BT].msvf = MCSVF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002995
David Barkerf19f35f2017-05-22 16:33:22 +01002996 MBFP(BLOCK_128X128, aom_masked_sad128x128,
2997 aom_masked_sub_pixel_variance128x128)
2998 MBFP(BLOCK_128X64, aom_masked_sad128x64, aom_masked_sub_pixel_variance128x64)
2999 MBFP(BLOCK_64X128, aom_masked_sad64x128, aom_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01003000 MBFP(BLOCK_64X64, aom_masked_sad64x64, aom_masked_sub_pixel_variance64x64)
3001 MBFP(BLOCK_64X32, aom_masked_sad64x32, aom_masked_sub_pixel_variance64x32)
3002 MBFP(BLOCK_32X64, aom_masked_sad32x64, aom_masked_sub_pixel_variance32x64)
3003 MBFP(BLOCK_32X32, aom_masked_sad32x32, aom_masked_sub_pixel_variance32x32)
3004 MBFP(BLOCK_32X16, aom_masked_sad32x16, aom_masked_sub_pixel_variance32x16)
3005 MBFP(BLOCK_16X32, aom_masked_sad16x32, aom_masked_sub_pixel_variance16x32)
3006 MBFP(BLOCK_16X16, aom_masked_sad16x16, aom_masked_sub_pixel_variance16x16)
3007 MBFP(BLOCK_16X8, aom_masked_sad16x8, aom_masked_sub_pixel_variance16x8)
3008 MBFP(BLOCK_8X16, aom_masked_sad8x16, aom_masked_sub_pixel_variance8x16)
3009 MBFP(BLOCK_8X8, aom_masked_sad8x8, aom_masked_sub_pixel_variance8x8)
3010 MBFP(BLOCK_4X8, aom_masked_sad4x8, aom_masked_sub_pixel_variance4x8)
3011 MBFP(BLOCK_8X4, aom_masked_sad8x4, aom_masked_sub_pixel_variance8x4)
3012 MBFP(BLOCK_4X4, aom_masked_sad4x4, aom_masked_sub_pixel_variance4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01003013
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01003014 MBFP(BLOCK_4X16, aom_masked_sad4x16, aom_masked_sub_pixel_variance4x16)
3015
3016 MBFP(BLOCK_16X4, aom_masked_sad16x4, aom_masked_sub_pixel_variance16x4)
3017
3018 MBFP(BLOCK_8X32, aom_masked_sad8x32, aom_masked_sub_pixel_variance8x32)
3019
3020 MBFP(BLOCK_32X8, aom_masked_sad32x8, aom_masked_sub_pixel_variance32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01003021
3022 MBFP(BLOCK_16X64, aom_masked_sad16x64, aom_masked_sub_pixel_variance16x64)
3023
3024 MBFP(BLOCK_64X16, aom_masked_sad64x16, aom_masked_sub_pixel_variance64x16)
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01003025
Yaowu Xuc27fc142016-08-22 16:08:15 -07003026 highbd_set_var_fns(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003027
Yaowu Xuf883b422016-08-30 14:01:10 -07003028 /* av1_init_quantizer() is first called here. Add check in
3029 * av1_frame_init_quantizer() so that av1_init_quantizer is only
Yaowu Xuc27fc142016-08-22 16:08:15 -07003030 * called later when needed. This will avoid unnecessary calls of
Yaowu Xuf883b422016-08-30 14:01:10 -07003031 * av1_init_quantizer() for every frame.
Yaowu Xuc27fc142016-08-22 16:08:15 -07003032 */
Yaowu Xuf883b422016-08-30 14:01:10 -07003033 av1_init_quantizer(cpi);
Zoe Liud902b742018-02-19 17:02:41 -08003034 av1_qm_init(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003035
Yaowu Xuf883b422016-08-30 14:01:10 -07003036 av1_loop_filter_init(cm);
Urvang Joshide71d142017-10-05 12:12:15 -07003037 cm->superres_scale_denominator = SCALE_NUMERATOR;
Debargha Mukherjee29e40a62017-06-14 09:37:12 -07003038 cm->superres_upscaled_width = oxcf->width;
3039 cm->superres_upscaled_height = oxcf->height;
Yaowu Xuf883b422016-08-30 14:01:10 -07003040 av1_loop_restoration_precal();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003041
3042 cm->error.setjmp = 0;
3043
3044 return cpi;
3045}
3046
Urvang Joshiee2c8112018-05-04 14:53:15 -07003047#if CONFIG_INTERNAL_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003048#define SNPRINT(H, T) snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T))
3049
3050#define SNPRINT2(H, T, V) \
3051 snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T), (V))
Urvang Joshiee2c8112018-05-04 14:53:15 -07003052#endif // CONFIG_INTERNAL_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003053
Yaowu Xuf883b422016-08-30 14:01:10 -07003054void av1_remove_compressor(AV1_COMP *cpi) {
3055 AV1_COMMON *cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003056 unsigned int i;
3057 int t;
3058
3059 if (!cpi) return;
3060
3061 cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003062 const int num_planes = av1_num_planes(cm);
3063
David Turnerd2a592e2018-11-16 14:59:31 +00003064 if (cm->current_frame.frame_number > 0) {
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08003065#if CONFIG_ENTROPY_STATS
3066 if (cpi->oxcf.pass != 1) {
3067 fprintf(stderr, "Writing counts.stt\n");
3068 FILE *f = fopen("counts.stt", "wb");
3069 fwrite(&aggregate_fc, sizeof(aggregate_fc), 1, f);
3070 fclose(f);
3071 }
3072#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003073#if CONFIG_INTERNAL_STATS
Yaowu Xuf883b422016-08-30 14:01:10 -07003074 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003075
3076 if (cpi->oxcf.pass != 1) {
3077 char headings[512] = { 0 };
3078 char results[512] = { 0 };
3079 FILE *f = fopen("opsnr.stt", "a");
3080 double time_encoded =
3081 (cpi->last_end_time_stamp_seen - cpi->first_time_stamp_ever) /
3082 10000000.000;
3083 double total_encode_time =
3084 (cpi->time_receive_data + cpi->time_compress_data) / 1000.000;
3085 const double dr =
3086 (double)cpi->bytes * (double)8 / (double)1000 / time_encoded;
3087 const double peak = (double)((1 << cpi->oxcf.input_bit_depth) - 1);
3088 const double target_rate = (double)cpi->oxcf.target_bandwidth / 1000;
3089 const double rate_err = ((100.0 * (dr - target_rate)) / target_rate);
3090
3091 if (cpi->b_calculate_psnr) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003092 const double total_psnr = aom_sse_to_psnr(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003093 (double)cpi->total_samples, peak, (double)cpi->total_sq_error);
3094 const double total_ssim =
3095 100 * pow(cpi->summed_quality / cpi->summed_weights, 8.0);
3096 snprintf(headings, sizeof(headings),
Jingning Han87651b22017-11-28 20:02:26 -08003097 "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\tGLPsnrP\t"
Yaowu Xuf883b422016-08-30 14:01:10 -07003098 "AOMSSIM\tVPSSIMP\tFASTSIM\tPSNRHVS\t"
Jingning Hanbe1ae3f2017-11-27 10:27:56 -08003099 "WstPsnr\tWstSsim\tWstFast\tWstHVS\t"
Jingning Han87651b22017-11-28 20:02:26 -08003100 "AVPsrnY\tAPsnrCb\tAPsnrCr");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003101 snprintf(results, sizeof(results),
3102 "%7.2f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
3103 "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
Jingning Hanbe1ae3f2017-11-27 10:27:56 -08003104 "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
Jingning Han87651b22017-11-28 20:02:26 -08003105 "%7.3f\t%7.3f\t%7.3f",
Wan-Teh Changc25c92a2018-04-23 15:04:14 -07003106 dr, cpi->psnr.stat[STAT_ALL] / cpi->count, total_psnr,
3107 cpi->psnr.stat[STAT_ALL] / cpi->count, total_psnr, total_ssim,
3108 total_ssim, cpi->fastssim.stat[STAT_ALL] / cpi->count,
3109 cpi->psnrhvs.stat[STAT_ALL] / cpi->count, cpi->psnr.worst,
Jingning Hanbe1ae3f2017-11-27 10:27:56 -08003110 cpi->worst_ssim, cpi->fastssim.worst, cpi->psnrhvs.worst,
Wan-Teh Changc25c92a2018-04-23 15:04:14 -07003111 cpi->psnr.stat[STAT_Y] / cpi->count,
3112 cpi->psnr.stat[STAT_U] / cpi->count,
3113 cpi->psnr.stat[STAT_V] / cpi->count);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003114
3115 if (cpi->b_calculate_blockiness) {
3116 SNPRINT(headings, "\t Block\tWstBlck");
3117 SNPRINT2(results, "\t%7.3f", cpi->total_blockiness / cpi->count);
3118 SNPRINT2(results, "\t%7.3f", cpi->worst_blockiness);
3119 }
3120
3121 if (cpi->b_calculate_consistency) {
3122 double consistency =
Yaowu Xuf883b422016-08-30 14:01:10 -07003123 aom_sse_to_psnr((double)cpi->total_samples, peak,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003124 (double)cpi->total_inconsistency);
3125
3126 SNPRINT(headings, "\tConsist\tWstCons");
3127 SNPRINT2(results, "\t%7.3f", consistency);
3128 SNPRINT2(results, "\t%7.3f", cpi->worst_consistency);
3129 }
Sarah Parkerf97b7862016-08-25 17:42:57 -07003130 fprintf(f, "%s\t Time\tRcErr\tAbsErr\n", headings);
3131 fprintf(f, "%s\t%8.0f\t%7.2f\t%7.2f\n", results, total_encode_time,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003132 rate_err, fabs(rate_err));
3133 }
3134
3135 fclose(f);
3136 }
Urvang Joshiee2c8112018-05-04 14:53:15 -07003137#endif // CONFIG_INTERNAL_STATS
Debargha Mukherjee0857e662019-01-04 16:22:09 -08003138#if CONFIG_SPEED_STATS
3139 if (cpi->oxcf.pass != 1) {
3140 fprintf(stdout, "tx_search_count = %d\n", cpi->tx_search_count);
3141 }
3142#endif // CONFIG_SPEED_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003143 }
3144
Yue Chen7cae98f2018-08-24 10:43:16 -07003145 for (int frame = 0; frame < MAX_LAG_BUFFERS; ++frame) {
3146 aom_free(cpi->tpl_stats[frame].tpl_stats_ptr);
3147 cpi->tpl_stats[frame].is_valid = 0;
3148 }
3149
Ravi Chaudhary1f58dd82018-12-07 17:24:15 +05303150 for (t = cpi->num_workers - 1; t >= 0; --t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003151 AVxWorker *const worker = &cpi->workers[t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003152 EncWorkerData *const thread_data = &cpi->tile_thr_data[t];
3153
3154 // Deallocate allocated threads.
Yaowu Xuf883b422016-08-30 14:01:10 -07003155 aom_get_worker_interface()->end(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003156
3157 // Deallocate allocated thread data.
Ravi Chaudhary1cf7d162018-10-09 17:00:43 +05303158 if (cpi->row_mt == 1) aom_free(thread_data->td->tctx);
Ravi Chaudhary1f58dd82018-12-07 17:24:15 +05303159 if (t > 0) {
hui sud9a812b2017-07-06 14:34:37 -07003160 aom_free(thread_data->td->palette_buffer);
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07003161 aom_free(thread_data->td->tmp_conv_dst);
3162 for (int j = 0; j < 2; ++j) {
3163 aom_free(thread_data->td->tmp_obmc_bufs[j]);
3164 }
Jingning Hand064cf02017-06-01 10:00:39 -07003165 aom_free(thread_data->td->above_pred_buf);
3166 aom_free(thread_data->td->left_pred_buf);
3167 aom_free(thread_data->td->wsrc_buf);
wenyao.liu22d8ab32018-10-16 09:11:29 +08003168
Ravi Chaudhary5d970f42018-09-25 11:25:32 +05303169#if CONFIG_COLLECT_INTER_MODE_RD_STATS
3170 aom_free(thread_data->td->inter_modes_info);
3171#endif
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07003172 for (int x = 0; x < 2; x++) {
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05303173 for (int y = 0; y < 2; y++) {
3174 aom_free(thread_data->td->hash_value_buffer[x][y]);
3175 thread_data->td->hash_value_buffer[x][y] = NULL;
3176 }
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07003177 }
Jingning Hand064cf02017-06-01 10:00:39 -07003178 aom_free(thread_data->td->mask_buf);
Yaowu Xuf883b422016-08-30 14:01:10 -07003179 aom_free(thread_data->td->counts);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003180 av1_free_pc_tree(thread_data->td, num_planes);
Yaowu Xuf883b422016-08-30 14:01:10 -07003181 aom_free(thread_data->td);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003182 }
3183 }
Ravi Chaudhary90a15f42018-10-11 18:56:35 +05303184#if CONFIG_MULTITHREAD
3185 if (cpi->row_mt == 1) {
3186 if (cpi->row_mt_mutex_ != NULL) {
3187 pthread_mutex_destroy(cpi->row_mt_mutex_);
3188 aom_free(cpi->row_mt_mutex_);
3189 }
3190 }
3191#endif
Ravi Chaudharyc5e74692018-10-08 16:05:38 +05303192 av1_row_mt_mem_dealloc(cpi);
Yaowu Xuf883b422016-08-30 14:01:10 -07003193 aom_free(cpi->tile_thr_data);
3194 aom_free(cpi->workers);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003195
Deepa K G964e72e2018-05-16 16:56:01 +05303196 if (cpi->num_workers > 1) {
3197 av1_loop_filter_dealloc(&cpi->lf_row_sync);
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05303198 av1_loop_restoration_dealloc(&cpi->lr_row_sync, cpi->num_workers);
Deepa K G964e72e2018-05-16 16:56:01 +05303199 }
3200
Yaowu Xuc27fc142016-08-22 16:08:15 -07003201 dealloc_compressor_data(cpi);
3202
3203 for (i = 0; i < sizeof(cpi->mbgraph_stats) / sizeof(cpi->mbgraph_stats[0]);
3204 ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003205 aom_free(cpi->mbgraph_stats[i].mb_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003206 }
3207
3208#if CONFIG_FP_MB_STATS
3209 if (cpi->use_fp_mb_stats) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003210 aom_free(cpi->twopass.frame_mb_stats_buf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003211 cpi->twopass.frame_mb_stats_buf = NULL;
3212 }
3213#endif
Debargha Mukherjee5d157212017-01-10 14:44:47 -08003214#if CONFIG_INTERNAL_STATS
3215 aom_free(cpi->ssim_vars);
3216 cpi->ssim_vars = NULL;
3217#endif // CONFIG_INTERNAL_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003218
Yaowu Xuf883b422016-08-30 14:01:10 -07003219 av1_remove_common(cm);
RogerZhou80d52342017-11-20 10:56:26 -08003220 for (i = 0; i < FRAME_BUFFERS; ++i) {
3221 av1_hash_table_destroy(&cm->buffer_pool->frame_bufs[i].hash_table);
3222 }
Michelle Findlay-Olynykdea531d2017-12-13 14:10:56 -08003223 if (cpi->sf.use_hash_based_trellis) hbt_destroy();
Yaowu Xuf883b422016-08-30 14:01:10 -07003224 av1_free_ref_frame_buffers(cm->buffer_pool);
3225 aom_free(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003226
3227#ifdef OUTPUT_YUV_SKINMAP
3228 fclose(yuv_skinmap_file);
3229#endif
3230#ifdef OUTPUT_YUV_REC
3231 fclose(yuv_rec_file);
3232#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003233}
3234
Yaowu Xuf883b422016-08-30 14:01:10 -07003235static void generate_psnr_packet(AV1_COMP *cpi) {
3236 struct aom_codec_cx_pkt pkt;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003237 int i;
3238 PSNR_STATS psnr;
David Turnerc29e1a92018-12-06 14:10:14 +00003239 aom_calc_highbd_psnr(cpi->source, &cpi->common.cur_frame->buf, &psnr,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003240 cpi->td.mb.e_mbd.bd, cpi->oxcf.input_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003241
3242 for (i = 0; i < 4; ++i) {
3243 pkt.data.psnr.samples[i] = psnr.samples[i];
3244 pkt.data.psnr.sse[i] = psnr.sse[i];
3245 pkt.data.psnr.psnr[i] = psnr.psnr[i];
3246 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003247 pkt.kind = AOM_CODEC_PSNR_PKT;
3248 aom_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003249}
3250
Yaowu Xuf883b422016-08-30 14:01:10 -07003251int av1_use_as_reference(AV1_COMP *cpi, int ref_frame_flags) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003252 if (ref_frame_flags > ((1 << INTER_REFS_PER_FRAME) - 1)) return -1;
3253
Yunqing Wangf2e7a392017-11-08 00:27:21 -08003254 cpi->ext_ref_frame_flags = ref_frame_flags;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003255 return 0;
3256}
3257
Yunqing Wang9a50fec2017-11-02 17:02:00 -07003258void av1_update_reference(AV1_COMP *cpi, int ref_frame_upd_flags) {
3259 cpi->ext_refresh_last_frame = (ref_frame_upd_flags & AOM_LAST_FLAG) != 0;
3260 cpi->ext_refresh_golden_frame = (ref_frame_upd_flags & AOM_GOLD_FLAG) != 0;
3261 cpi->ext_refresh_alt_ref_frame = (ref_frame_upd_flags & AOM_ALT_FLAG) != 0;
3262 cpi->ext_refresh_bwd_ref_frame = (ref_frame_upd_flags & AOM_BWD_FLAG) != 0;
3263 cpi->ext_refresh_alt2_ref_frame = (ref_frame_upd_flags & AOM_ALT2_FLAG) != 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003264 cpi->ext_refresh_frame_flags_pending = 1;
3265}
3266
Thomas Daede497d1952017-08-08 17:33:06 -07003267int av1_copy_reference_enc(AV1_COMP *cpi, int idx, YV12_BUFFER_CONFIG *sd) {
3268 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003269 const int num_planes = av1_num_planes(cm);
Thomas Daede497d1952017-08-08 17:33:06 -07003270 YV12_BUFFER_CONFIG *cfg = get_ref_frame(cm, idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003271 if (cfg) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003272 aom_yv12_copy_frame(cfg, sd, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003273 return 0;
3274 } else {
3275 return -1;
3276 }
3277}
3278
Thomas Daede497d1952017-08-08 17:33:06 -07003279int av1_set_reference_enc(AV1_COMP *cpi, int idx, YV12_BUFFER_CONFIG *sd) {
3280 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003281 const int num_planes = av1_num_planes(cm);
Thomas Daede497d1952017-08-08 17:33:06 -07003282 YV12_BUFFER_CONFIG *cfg = get_ref_frame(cm, idx);
Yaowu Xuf883b422016-08-30 14:01:10 -07003283 if (cfg) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003284 aom_yv12_copy_frame(sd, cfg, num_planes);
Yaowu Xuf883b422016-08-30 14:01:10 -07003285 return 0;
3286 } else {
3287 return -1;
3288 }
3289}
3290
3291int av1_update_entropy(AV1_COMP *cpi, int update) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003292 cpi->ext_refresh_frame_context = update;
3293 cpi->ext_refresh_frame_context_pending = 1;
3294 return 0;
3295}
3296
3297#if defined(OUTPUT_YUV_DENOISED) || defined(OUTPUT_YUV_SKINMAP)
3298// The denoiser buffer is allocated as a YUV 440 buffer. This function writes it
3299// as YUV 420. We simply use the top-left pixels of the UV buffers, since we do
3300// not denoise the UV channels at this time. If ever we implement UV channel
3301// denoising we will have to modify this.
Yaowu Xuf883b422016-08-30 14:01:10 -07003302void aom_write_yuv_frame_420(YV12_BUFFER_CONFIG *s, FILE *f) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003303 uint8_t *src = s->y_buffer;
3304 int h = s->y_height;
3305
3306 do {
3307 fwrite(src, s->y_width, 1, f);
3308 src += s->y_stride;
3309 } while (--h);
3310
3311 src = s->u_buffer;
3312 h = s->uv_height;
3313
3314 do {
3315 fwrite(src, s->uv_width, 1, f);
3316 src += s->uv_stride;
3317 } while (--h);
3318
3319 src = s->v_buffer;
3320 h = s->uv_height;
3321
3322 do {
3323 fwrite(src, s->uv_width, 1, f);
3324 src += s->uv_stride;
3325 } while (--h);
3326}
3327#endif
3328
Yaowu Xuf883b422016-08-30 14:01:10 -07003329static void check_show_existing_frame(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003330 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
Yaowu Xuf883b422016-08-30 14:01:10 -07003331 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003332 const FRAME_UPDATE_TYPE next_frame_update_type =
3333 gf_group->update_type[gf_group->index];
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003334#if USE_SYMM_MULTI_LAYER
3335 const int which_arf = (cpi->new_bwdref_update_rule == 1)
3336 ? gf_group->arf_update_idx[gf_group->index] > 0
3337 : gf_group->arf_update_idx[gf_group->index];
3338#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003339 const int which_arf = gf_group->arf_update_idx[gf_group->index];
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003340#endif
Zoe Liu5fca7242016-10-10 17:18:57 -07003341
3342 if (cm->show_existing_frame == 1) {
3343 cm->show_existing_frame = 0;
3344 } else if (cpi->rc.is_last_bipred_frame) {
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003345#if USE_SYMM_MULTI_LAYER
3346 // NOTE: When new structure is used, every bwdref will have one overlay
3347 // frame. Therefore, there is no need to find out which frame to
3348 // show in advance.
3349 if (cpi->new_bwdref_update_rule == 0) {
3350#endif
3351 // NOTE: If the current frame is a last bi-predictive frame, it is
3352 // needed next to show the BWDREF_FRAME, which is pointed by
3353 // the last_fb_idxes[0] after reference frame buffer update
3354 cpi->rc.is_last_bipred_frame = 0;
3355 cm->show_existing_frame = 1;
David Turnera21966b2018-12-05 14:48:49 +00003356 cpi->existing_fb_idx_to_show = cm->remapped_ref_idx[0];
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003357#if USE_SYMM_MULTI_LAYER
3358 }
3359#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003360 } else if (cpi->is_arf_filter_off[which_arf] &&
3361 (next_frame_update_type == OVERLAY_UPDATE ||
3362 next_frame_update_type == INTNL_OVERLAY_UPDATE)) {
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003363#if USE_SYMM_MULTI_LAYER
3364 const int bwdref_to_show =
3365 (cpi->new_bwdref_update_rule == 1) ? BWDREF_FRAME : ALTREF2_FRAME;
3366#else
3367 const int bwdref_to_show = ALTREF2_FRAME;
3368#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003369 // Other parameters related to OVERLAY_UPDATE will be taken care of
Yaowu Xuf883b422016-08-30 14:01:10 -07003370 // in av1_rc_get_second_pass_params(cpi)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003371 cm->show_existing_frame = 1;
3372 cpi->rc.is_src_frame_alt_ref = 1;
Urvang Joshi4d9f15f2018-11-05 15:26:22 -08003373 cpi->existing_fb_idx_to_show =
3374 (next_frame_update_type == OVERLAY_UPDATE)
David Turnera21966b2018-12-05 14:48:49 +00003375 ? get_ref_frame_map_idx(cm, ALTREF_FRAME)
3376 : get_ref_frame_map_idx(cm, bwdref_to_show);
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003377#if USE_SYMM_MULTI_LAYER
3378 if (cpi->new_bwdref_update_rule == 0)
3379#endif
3380 cpi->is_arf_filter_off[which_arf] = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003381 }
3382 cpi->rc.is_src_frame_ext_arf = 0;
3383}
Yaowu Xuc27fc142016-08-22 16:08:15 -07003384
3385#ifdef OUTPUT_YUV_REC
Yaowu Xuf883b422016-08-30 14:01:10 -07003386void aom_write_one_yuv_frame(AV1_COMMON *cm, YV12_BUFFER_CONFIG *s) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003387 uint8_t *src = s->y_buffer;
3388 int h = cm->height;
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07003389 if (yuv_rec_file == NULL) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003390 if (s->flags & YV12_FLAG_HIGHBITDEPTH) {
3391 uint16_t *src16 = CONVERT_TO_SHORTPTR(s->y_buffer);
3392
3393 do {
3394 fwrite(src16, s->y_width, 2, yuv_rec_file);
3395 src16 += s->y_stride;
3396 } while (--h);
3397
3398 src16 = CONVERT_TO_SHORTPTR(s->u_buffer);
3399 h = s->uv_height;
3400
3401 do {
3402 fwrite(src16, s->uv_width, 2, yuv_rec_file);
3403 src16 += s->uv_stride;
3404 } while (--h);
3405
3406 src16 = CONVERT_TO_SHORTPTR(s->v_buffer);
3407 h = s->uv_height;
3408
3409 do {
3410 fwrite(src16, s->uv_width, 2, yuv_rec_file);
3411 src16 += s->uv_stride;
3412 } while (--h);
3413
3414 fflush(yuv_rec_file);
3415 return;
3416 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003417
3418 do {
3419 fwrite(src, s->y_width, 1, yuv_rec_file);
3420 src += s->y_stride;
3421 } while (--h);
3422
3423 src = s->u_buffer;
3424 h = s->uv_height;
3425
3426 do {
3427 fwrite(src, s->uv_width, 1, yuv_rec_file);
3428 src += s->uv_stride;
3429 } while (--h);
3430
3431 src = s->v_buffer;
3432 h = s->uv_height;
3433
3434 do {
3435 fwrite(src, s->uv_width, 1, yuv_rec_file);
3436 src += s->uv_stride;
3437 } while (--h);
3438
3439 fflush(yuv_rec_file);
3440}
3441#endif // OUTPUT_YUV_REC
3442
Debargha Mukherjee11f0e402017-03-29 07:42:40 -07003443#define GM_RECODE_LOOP_NUM4X4_FACTOR 192
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003444static int recode_loop_test_global_motion(AV1_COMP *cpi) {
3445 int i;
3446 int recode = 0;
Debargha Mukherjeea575d232017-04-28 17:46:47 -07003447 RD_COUNTS *const rdc = &cpi->td.rd_counts;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003448 AV1_COMMON *const cm = &cpi->common;
3449 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
3450 if (cm->global_motion[i].wmtype != IDENTITY &&
Debargha Mukherjeea575d232017-04-28 17:46:47 -07003451 rdc->global_motion_used[i] * GM_RECODE_LOOP_NUM4X4_FACTOR <
Debargha Mukherjee265db6d2017-03-28 11:15:27 -07003452 cpi->gmparams_cost[i]) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003453 cm->global_motion[i] = default_warp_params;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07003454 assert(cm->global_motion[i].wmtype == IDENTITY);
Debargha Mukherjee265db6d2017-03-28 11:15:27 -07003455 cpi->gmparams_cost[i] = 0;
David Barker43479c62016-11-30 10:34:20 +00003456 recode = 1;
Urvang Joshi02aade82017-12-18 17:18:16 -08003457 // TODO(sarahparker): The earlier condition for recoding here was:
3458 // "recode |= (rdc->global_motion_used[i] > 0);". Can we bring something
3459 // similar to that back to speed up global motion?
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003460 }
3461 }
3462 return recode;
3463}
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003464
Yaowu Xuc27fc142016-08-22 16:08:15 -07003465// Function to test for conditions that indicate we should loop
3466// back and recode a frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07003467static int recode_loop_test(AV1_COMP *cpi, int high_limit, int low_limit, int q,
3468 int maxq, int minq) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003469 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -07003470 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003471 const int frame_is_kfgfarf = frame_is_kf_gf_arf(cpi);
3472 int force_recode = 0;
3473
3474 if ((rc->projected_frame_size >= rc->max_frame_bandwidth) ||
3475 (cpi->sf.recode_loop == ALLOW_RECODE) ||
3476 (frame_is_kfgfarf && (cpi->sf.recode_loop == ALLOW_RECODE_KFARFGF))) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003477 // TODO(agrange) high_limit could be greater than the scale-down threshold.
3478 if ((rc->projected_frame_size > high_limit && q < maxq) ||
3479 (rc->projected_frame_size < low_limit && q > minq)) {
3480 force_recode = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003481 } else if (cpi->oxcf.rc_mode == AOM_CQ) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003482 // Deal with frame undershoot and whether or not we are
3483 // below the automatically set cq level.
3484 if (q > oxcf->cq_level &&
3485 rc->projected_frame_size < ((rc->this_frame_target * 7) >> 3)) {
3486 force_recode = 1;
3487 }
3488 }
3489 }
3490 return force_recode;
3491}
3492
Yaowu Xuc27fc142016-08-22 16:08:15 -07003493#define DUMP_REF_FRAME_IMAGES 0
3494
3495#if DUMP_REF_FRAME_IMAGES == 1
Yaowu Xuf883b422016-08-30 14:01:10 -07003496static int dump_one_image(AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003497 const YV12_BUFFER_CONFIG *const ref_buf,
3498 char *file_name) {
3499 int h;
3500 FILE *f_ref = NULL;
3501
3502 if (ref_buf == NULL) {
3503 printf("Frame data buffer is NULL.\n");
Yaowu Xuf883b422016-08-30 14:01:10 -07003504 return AOM_CODEC_MEM_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003505 }
3506
3507 if ((f_ref = fopen(file_name, "wb")) == NULL) {
3508 printf("Unable to open file %s to write.\n", file_name);
Yaowu Xuf883b422016-08-30 14:01:10 -07003509 return AOM_CODEC_MEM_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003510 }
3511
3512 // --- Y ---
3513 for (h = 0; h < cm->height; ++h) {
3514 fwrite(&ref_buf->y_buffer[h * ref_buf->y_stride], 1, cm->width, f_ref);
3515 }
3516 // --- U ---
3517 for (h = 0; h < (cm->height >> 1); ++h) {
3518 fwrite(&ref_buf->u_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
3519 f_ref);
3520 }
3521 // --- V ---
3522 for (h = 0; h < (cm->height >> 1); ++h) {
3523 fwrite(&ref_buf->v_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
3524 f_ref);
3525 }
3526
3527 fclose(f_ref);
3528
Yaowu Xuf883b422016-08-30 14:01:10 -07003529 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003530}
3531
Yaowu Xuf883b422016-08-30 14:01:10 -07003532static void dump_ref_frame_images(AV1_COMP *cpi) {
3533 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003534 MV_REFERENCE_FRAME ref_frame;
3535
3536 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3537 char file_name[256] = "";
3538 snprintf(file_name, sizeof(file_name), "/tmp/enc_F%d_ref_%d.yuv",
David Turnerd2a592e2018-11-16 14:59:31 +00003539 cm->current_frame.frame_number, ref_frame);
David Turnera21966b2018-12-05 14:48:49 +00003540 dump_one_image(cm, get_ref_frame_yv12_buf(cpi, ref_frame), file_name);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003541 }
3542}
3543#endif // DUMP_REF_FRAME_IMAGES == 1
3544
Yaowu Xuc27fc142016-08-22 16:08:15 -07003545// This function is used to shift the virtual indices of last reference frames
3546// as follows:
3547// LAST_FRAME -> LAST2_FRAME -> LAST3_FRAME
3548// when the LAST_FRAME is updated.
Yaowu Xuf883b422016-08-30 14:01:10 -07003549static INLINE void shift_last_ref_frames(AV1_COMP *cpi) {
Imdad Sardharwalladadaba62018-02-23 12:06:56 +00003550 // TODO(isbs): shift the scaled indices as well
Urvang Joshia130dcc2018-11-06 10:27:35 -08003551 for (int ref_frame = LAST3_FRAME; ref_frame > LAST_FRAME; --ref_frame) {
3552 const int ref_idx = ref_frame - LAST_FRAME;
David Turnera21966b2018-12-05 14:48:49 +00003553 cpi->common.remapped_ref_idx[ref_idx] =
3554 cpi->common.remapped_ref_idx[ref_idx - 1];
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003555
3556 if (!cpi->rc.is_src_frame_alt_ref) {
3557 memcpy(cpi->interp_filter_selected[ref_frame],
3558 cpi->interp_filter_selected[ref_frame - 1],
3559 sizeof(cpi->interp_filter_selected[ref_frame - 1]));
3560 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003561 }
3562}
Yaowu Xuc27fc142016-08-22 16:08:15 -07003563
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003564#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003565// This function is used to shift the virtual indices of bwd reference
3566// frames as follows:
3567// BWD_REF -> ALT2_REF -> EXT_REF
3568// to clear a space to store the closest bwdref
3569static INLINE void rshift_bwd_ref_frames(AV1_COMP *cpi) {
3570 // TODO(isbs): shift the scaled indices as well
Urvang Joshi03d8ebe2018-11-08 17:13:44 -08003571 static const int ordered_bwd[3] = { BWDREF_FRAME, ALTREF2_FRAME,
3572 EXTREF_FRAME };
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003573
3574 for (int i = 2; i > 0; --i) {
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003575 // [0] is allocated to the current coded frame, i.e. bwdref
3576 memcpy(cpi->interp_filter_selected[ordered_bwd[i]],
3577 cpi->interp_filter_selected[ordered_bwd[i - 1]],
3578 sizeof(cpi->interp_filter_selected[ordered_bwd[i - 1]]));
3579
David Turnera21966b2018-12-05 14:48:49 +00003580 cpi->common.remapped_ref_idx[ordered_bwd[i] - LAST_FRAME] =
3581 cpi->common.remapped_ref_idx[ordered_bwd[i - 1] - LAST_FRAME];
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003582 }
3583}
3584
3585// This function is used to shift the virtual indices of bwd reference
3586// frames as follows:
3587// BWD_REF <- ALT2_REF <- EXT_REF
3588// to update the bwd reference frame for coding the next frame.
3589static INLINE void lshift_bwd_ref_frames(AV1_COMP *cpi) {
3590 // TODO(isbs): shift the scaled indices as well
Urvang Joshi03d8ebe2018-11-08 17:13:44 -08003591 static const int ordered_bwd[3] = { BWDREF_FRAME, ALTREF2_FRAME,
3592 EXTREF_FRAME };
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003593
3594 for (int i = 0; i < 2; ++i) {
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003595 // [0] is allocated to the current coded frame, i.e. bwdref
3596 memcpy(cpi->interp_filter_selected[ordered_bwd[i]],
3597 cpi->interp_filter_selected[ordered_bwd[i + 1]],
3598 sizeof(cpi->interp_filter_selected[ordered_bwd[i + 1]]));
3599
David Turnera21966b2018-12-05 14:48:49 +00003600 cpi->common.remapped_ref_idx[ordered_bwd[i] - LAST_FRAME] =
3601 cpi->common.remapped_ref_idx[ordered_bwd[i + 1] - LAST_FRAME];
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003602 }
3603}
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003604#endif // USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003605
Zoe Liu8dd1c982017-09-11 10:14:35 -07003606static void update_reference_frames(AV1_COMP *cpi) {
3607 AV1_COMMON *const cm = &cpi->common;
3608
Yaowu Xuc27fc142016-08-22 16:08:15 -07003609 // NOTE: Save the new show frame buffer index for --test-code=warn, i.e.,
3610 // for the purpose to verify no mismatch between encoder and decoder.
David Turnere7ebf902018-12-04 14:04:55 +00003611 if (cm->show_frame) cpi->last_show_frame_buf = cm->cur_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003612
Sarah Parker33005522018-07-27 14:46:25 -07003613 // In the case of show_existing frame, we will not send fresh flag
3614 // to decoder. Any change in the reference frame buffer can be done by
3615 // switching the virtual indices.
3616 if (cm->show_existing_frame) {
Sarah Parker29147cf2018-10-16 20:34:51 -07003617 // If we are not indicating to the decoder that this frame is
3618 // a show_existing_frame, which occurs in error_resilient mode,
Sarah Parkera9e19052018-10-18 17:49:26 -07003619 // we still want to refresh the LAST_FRAME when the current frame
3620 // was the source of an ext_arf.
3621 cpi->refresh_last_frame =
3622 !encode_show_existing_frame(cm) && cpi->rc.is_src_frame_ext_arf;
Sarah Parker33005522018-07-27 14:46:25 -07003623 cpi->refresh_golden_frame = 0;
3624 cpi->refresh_bwd_ref_frame = 0;
3625 cpi->refresh_alt2_ref_frame = 0;
3626 cpi->refresh_alt_ref_frame = 0;
3627
3628 cpi->rc.is_bwd_ref_frame = 0;
3629 cpi->rc.is_last_bipred_frame = 0;
3630 cpi->rc.is_bipred_frame = 0;
3631 }
3632
Yaowu Xuc27fc142016-08-22 16:08:15 -07003633 // At this point the new frame has been encoded.
3634 // If any buffer copy / swapping is signaled it should be done here.
Zoe Liubcef1e62018-04-06 20:56:11 -07003635
Sarah Parkerb9041612018-05-22 19:06:47 -07003636 // Only update all of the reference buffers if a KEY_FRAME is also a
3637 // show_frame. This ensures a fwd keyframe does not update all of the buffers
David Turnerd2a592e2018-11-16 14:59:31 +00003638 if ((cm->current_frame.frame_type == KEY_FRAME && cm->show_frame) ||
3639 frame_is_sframe(cm)) {
Zoe Liubcef1e62018-04-06 20:56:11 -07003640 for (int ref_frame = 0; ref_frame < REF_FRAMES; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00003641 assign_frame_buffer_p(&cm->ref_frame_map[cm->remapped_ref_idx[ref_frame]],
3642 cm->cur_frame);
Zoe Liubcef1e62018-04-06 20:56:11 -07003643 }
3644 return;
3645 }
3646
3647 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003648 // We have decided to preserve the previously existing golden frame as our
3649 // new ARF frame. However, in the short term in function
Yaowu Xuf883b422016-08-30 14:01:10 -07003650 // av1_bitstream.c::get_refresh_mask() we left it in the GF slot and, if
Yaowu Xuc27fc142016-08-22 16:08:15 -07003651 // we're updating the GF with the current decoded frame, we save it to the
3652 // ARF slot instead.
3653 // We now have to update the ARF with the current frame and swap gld_fb_idx
3654 // and alt_fb_idx so that, overall, we've stored the old GF in the new ARF
3655 // slot and, if we're updating the GF, the current frame becomes the new GF.
3656 int tmp;
3657
Wei-Ting Lina8c02452018-08-13 11:04:06 -07003658 // ARF in general is a better reference than overlay. We shouldkeep ARF as
3659 // reference instead of replacing it with overlay.
3660
3661 if (!cpi->preserve_arf_as_gld) {
David Turnere7ebf902018-12-04 14:04:55 +00003662 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003663 &cm->ref_frame_map[get_ref_frame_map_idx(cm, ALTREF_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003664 cm->cur_frame);
Wei-Ting Lina8c02452018-08-13 11:04:06 -07003665 }
3666
David Turnera21966b2018-12-05 14:48:49 +00003667 tmp = get_ref_frame_map_idx(cm, ALTREF_FRAME);
3668 cm->remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] =
3669 get_ref_frame_map_idx(cm, GOLDEN_FRAME);
3670 cm->remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] = tmp;
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003671
3672 // TODO(zoeliu): Do we need to copy cpi->interp_filter_selected[0] over to
3673 // cpi->interp_filter_selected[GOLDEN_FRAME]?
Sarah Parker7a9bb782018-10-11 14:52:42 -07003674 } else if (cpi->rc.is_src_frame_ext_arf && encode_show_existing_frame(cm)) {
Wei-Ting Linb72453f2018-06-26 14:05:38 -07003675#if CONFIG_DEBUG
3676 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
3677 assert(gf_group->update_type[gf_group->index] == INTNL_OVERLAY_UPDATE);
3678#endif
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003679#if USE_SYMM_MULTI_LAYER
3680 const int bwdref_to_show =
3681 (cpi->new_bwdref_update_rule == 1) ? BWDREF_FRAME : ALTREF2_FRAME;
3682#else
3683 const int bwdref_to_show = ALTREF2_FRAME;
3684#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003685 // Deal with the special case for showing existing internal ALTREF_FRAME
3686 // Refresh the LAST_FRAME with the ALTREF_FRAME and retire the LAST3_FRAME
3687 // by updating the virtual indices.
David Turnera21966b2018-12-05 14:48:49 +00003688 const int last3_remapped_idx = get_ref_frame_map_idx(cm, LAST3_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003689 shift_last_ref_frames(cpi);
Zoe Liue9b15e22017-07-19 15:53:01 -07003690
David Turnera21966b2018-12-05 14:48:49 +00003691 cm->remapped_ref_idx[LAST_FRAME - LAST_FRAME] =
3692 get_ref_frame_map_idx(cm, bwdref_to_show);
Zoe Liue9b15e22017-07-19 15:53:01 -07003693
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003694 memcpy(cpi->interp_filter_selected[LAST_FRAME],
3695 cpi->interp_filter_selected[bwdref_to_show],
3696 sizeof(cpi->interp_filter_selected[bwdref_to_show]));
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003697#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003698 if (cpi->new_bwdref_update_rule == 1) {
3699 lshift_bwd_ref_frames(cpi);
3700 // pass outdated forward reference frame (previous LAST3) to the
3701 // spared space
David Turnera21966b2018-12-05 14:48:49 +00003702 cm->remapped_ref_idx[EXTREF_FRAME - LAST_FRAME] = last3_remapped_idx;
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003703 } else {
3704#endif
David Turnera21966b2018-12-05 14:48:49 +00003705 cm->remapped_ref_idx[bwdref_to_show - LAST_FRAME] = last3_remapped_idx;
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003706#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003707 }
3708#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003709 } else { /* For non key/golden frames */
Zoe Liue9b15e22017-07-19 15:53:01 -07003710 // === ALTREF_FRAME ===
Yaowu Xuc27fc142016-08-22 16:08:15 -07003711 if (cpi->refresh_alt_ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00003712 int arf_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
David Turnere7ebf902018-12-04 14:04:55 +00003713 assign_frame_buffer_p(&cm->ref_frame_map[arf_idx], cm->cur_frame);
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003714
3715 memcpy(cpi->interp_filter_selected[ALTREF_FRAME],
3716 cpi->interp_filter_selected[0],
3717 sizeof(cpi->interp_filter_selected[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003718 }
3719
Zoe Liue9b15e22017-07-19 15:53:01 -07003720 // === GOLDEN_FRAME ===
Yaowu Xuc27fc142016-08-22 16:08:15 -07003721 if (cpi->refresh_golden_frame) {
David Turnere7ebf902018-12-04 14:04:55 +00003722 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003723 &cm->ref_frame_map[get_ref_frame_map_idx(cm, GOLDEN_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003724 cm->cur_frame);
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003725
3726 memcpy(cpi->interp_filter_selected[GOLDEN_FRAME],
3727 cpi->interp_filter_selected[0],
3728 sizeof(cpi->interp_filter_selected[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003729 }
3730
Zoe Liue9b15e22017-07-19 15:53:01 -07003731 // === BWDREF_FRAME ===
Yaowu Xuc27fc142016-08-22 16:08:15 -07003732 if (cpi->refresh_bwd_ref_frame) {
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003733#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003734 if (cpi->new_bwdref_update_rule) {
3735 // We shift the backward reference frame as follows:
3736 // BWDREF -> ALTREF2 -> EXTREF
3737 // and assign the newly coded frame to BWDREF so that it always
3738 // keeps the nearest future frame
David Turnera21966b2018-12-05 14:48:49 +00003739 const int tmp = get_ref_frame_map_idx(cm, EXTREF_FRAME);
David Turnere7ebf902018-12-04 14:04:55 +00003740 assign_frame_buffer_p(&cm->ref_frame_map[tmp], cm->cur_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003741
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003742 rshift_bwd_ref_frames(cpi);
David Turnera21966b2018-12-05 14:48:49 +00003743 cm->remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = tmp;
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003744 } else {
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003745#endif // USE_SYMM_MULTI_LAYER
David Turnere7ebf902018-12-04 14:04:55 +00003746 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003747 &cm->ref_frame_map[get_ref_frame_map_idx(cm, BWDREF_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003748 cm->cur_frame);
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003749#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003750 }
3751#endif
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003752 memcpy(cpi->interp_filter_selected[BWDREF_FRAME],
3753 cpi->interp_filter_selected[0],
3754 sizeof(cpi->interp_filter_selected[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003755 }
Zoe Liue9b15e22017-07-19 15:53:01 -07003756
Zoe Liue9b15e22017-07-19 15:53:01 -07003757 // === ALTREF2_FRAME ===
3758 if (cpi->refresh_alt2_ref_frame) {
David Turnere7ebf902018-12-04 14:04:55 +00003759 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003760 &cm->ref_frame_map[get_ref_frame_map_idx(cm, ALTREF2_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003761 cm->cur_frame);
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003762
3763 memcpy(cpi->interp_filter_selected[ALTREF2_FRAME],
3764 cpi->interp_filter_selected[0],
3765 sizeof(cpi->interp_filter_selected[0]));
Zoe Liue9b15e22017-07-19 15:53:01 -07003766 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003767 }
3768
3769 if (cpi->refresh_last_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003770 // NOTE(zoeliu): We have two layers of mapping (1) from the per-frame
3771 // reference to the reference frame buffer virtual index; and then (2) from
David Turnere7ebf902018-12-04 14:04:55 +00003772 // the virtual index to the reference frame buffer (RefCntBuffer):
Yaowu Xuc27fc142016-08-22 16:08:15 -07003773 //
Urvang Joshi4d9f15f2018-11-05 15:26:22 -08003774 // LAST_FRAME, ..., EXTREF_FRAME
3775 // | |
3776 // v v
3777 // remapped_ref_idx[LAST_FRAME - 1], ..., remapped_ref_idx[EXTREF_FRAME - 1]
3778 // | |
3779 // v v
3780 // ref_frame_map[], ..., ref_frame_map[]
Yaowu Xuc27fc142016-08-22 16:08:15 -07003781 //
3782 // When refresh_last_frame is set, it is intended to retire LAST3_FRAME,
3783 // have the other 2 LAST reference frames shifted as follows:
3784 // LAST_FRAME -> LAST2_FRAME -> LAST3_FRAME
3785 // , and then have LAST_FRAME refreshed by the newly coded frame.
3786 //
3787 // To fulfill it, the decoder will be notified to execute following 2 steps:
3788 //
3789 // (a) To change ref_frame_map[] and have the virtual index of LAST3_FRAME
3790 // to point to the newly coded frame, i.e.
David Turnere7ebf902018-12-04 14:04:55 +00003791 // ref_frame_map[lst_fb_idexes[2]] => cur_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003792 //
3793 // (b) To change the 1st layer mapping to have LAST_FRAME mapped to the
3794 // original virtual index of LAST3_FRAME and have the other mappings
3795 // shifted as follows:
Urvang Joshi4d9f15f2018-11-05 15:26:22 -08003796 // LAST_FRAME, LAST2_FRAME, LAST3_FRAME
3797 // | | |
3798 // v v v
3799 // remapped_ref_idx[2], remapped_ref_idx[0], remapped_ref_idx[1]
David Turnere7ebf902018-12-04 14:04:55 +00003800 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003801 &cm->ref_frame_map[get_ref_frame_map_idx(cm, LAST3_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003802 cm->cur_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003803
David Turnera21966b2018-12-05 14:48:49 +00003804 int last3_remapped_idx = get_ref_frame_map_idx(cm, LAST3_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003805
Zoe Liubcef1e62018-04-06 20:56:11 -07003806 shift_last_ref_frames(cpi);
David Turnera21966b2018-12-05 14:48:49 +00003807 cm->remapped_ref_idx[LAST_FRAME - LAST_FRAME] = last3_remapped_idx;
Zoe Liubcef1e62018-04-06 20:56:11 -07003808
Sarah Parker5336b9c2018-10-18 11:34:20 -07003809 assert(!encode_show_existing_frame(cm));
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003810 memcpy(cpi->interp_filter_selected[LAST_FRAME],
3811 cpi->interp_filter_selected[0],
3812 sizeof(cpi->interp_filter_selected[0]));
Zoe Liubcef1e62018-04-06 20:56:11 -07003813
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003814 // If the new structure is used, we will always have overlay frames coupled
3815 // with bwdref frames. Therefore, we won't have to perform this update
3816 // in advance (we do this update when the overlay frame shows up).
3817#if USE_SYMM_MULTI_LAYER
3818 if (cpi->new_bwdref_update_rule == 0 && cpi->rc.is_last_bipred_frame) {
3819#else
Zoe Liubcef1e62018-04-06 20:56:11 -07003820 if (cpi->rc.is_last_bipred_frame) {
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003821#endif
Zoe Liubcef1e62018-04-06 20:56:11 -07003822 // Refresh the LAST_FRAME with the BWDREF_FRAME and retire the
3823 // LAST3_FRAME by updating the virtual indices.
3824 //
3825 // NOTE: The source frame for BWDREF does not have a holding position as
3826 // the OVERLAY frame for ALTREF's. Hence, to resolve the reference
3827 // virtual index reshuffling for BWDREF, the encoder always
3828 // specifies a LAST_BIPRED right before BWDREF and completes the
3829 // reshuffling job accordingly.
David Turnera21966b2018-12-05 14:48:49 +00003830 last3_remapped_idx = get_ref_frame_map_idx(cm, LAST3_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003831
3832 shift_last_ref_frames(cpi);
David Turnera21966b2018-12-05 14:48:49 +00003833 cm->remapped_ref_idx[LAST_FRAME - LAST_FRAME] =
3834 get_ref_frame_map_idx(cm, BWDREF_FRAME);
3835 cm->remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = last3_remapped_idx;
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003836
3837 memcpy(cpi->interp_filter_selected[LAST_FRAME],
3838 cpi->interp_filter_selected[BWDREF_FRAME],
3839 sizeof(cpi->interp_filter_selected[BWDREF_FRAME]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003840 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003841 }
3842
3843#if DUMP_REF_FRAME_IMAGES == 1
3844 // Dump out all reference frame images.
3845 dump_ref_frame_images(cpi);
3846#endif // DUMP_REF_FRAME_IMAGES
3847}
3848
David Turnere7ebf902018-12-04 14:04:55 +00003849static INLINE void alloc_frame_mvs(AV1_COMMON *const cm, RefCntBuffer *buf) {
3850 assert(buf != NULL);
3851 ensure_mv_buffer(buf, cm);
3852 buf->width = cm->width;
3853 buf->height = cm->height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003854}
3855
Cheng Chen46f30c72017-09-07 11:13:33 -07003856static void scale_references(AV1_COMP *cpi) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003857 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003858 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003859 MV_REFERENCE_FRAME ref_frame;
Yaowu Xuf883b422016-08-30 14:01:10 -07003860 const AOM_REFFRAME ref_mask[INTER_REFS_PER_FRAME] = {
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02003861 AOM_LAST_FLAG, AOM_LAST2_FLAG, AOM_LAST3_FLAG, AOM_GOLD_FLAG,
3862 AOM_BWD_FLAG, AOM_ALT2_FLAG, AOM_ALT_FLAG
Yaowu Xuc27fc142016-08-22 16:08:15 -07003863 };
3864
3865 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003866 // Need to convert from AOM_REFFRAME to index into ref_mask (subtract 1).
Yaowu Xuc27fc142016-08-22 16:08:15 -07003867 if (cpi->ref_frame_flags & ref_mask[ref_frame - 1]) {
3868 BufferPool *const pool = cm->buffer_pool;
3869 const YV12_BUFFER_CONFIG *const ref =
David Turnera21966b2018-12-05 14:48:49 +00003870 get_ref_frame_yv12_buf(cm, ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003871
3872 if (ref == NULL) {
David Turnere7ebf902018-12-04 14:04:55 +00003873 cpi->scaled_ref_buf[ref_frame - 1] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003874 continue;
3875 }
3876
Yaowu Xuc27fc142016-08-22 16:08:15 -07003877 if (ref->y_crop_width != cm->width || ref->y_crop_height != cm->height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003878 int force_scaling = 0;
David Turnere7ebf902018-12-04 14:04:55 +00003879 RefCntBuffer *new_fb = cpi->scaled_ref_buf[ref_frame - 1];
3880 if (new_fb == NULL) {
3881 const int new_fb_idx = get_free_fb(cm);
3882 if (new_fb_idx == INVALID_IDX) {
Wan-Teh Chang4a8c0042018-10-05 09:41:52 -07003883 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
3884 "Unable to find free frame buffer");
David Turnere7ebf902018-12-04 14:04:55 +00003885 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003886 force_scaling = 1;
David Turnere7ebf902018-12-04 14:04:55 +00003887 new_fb = &pool->frame_bufs[new_fb_idx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003888 }
David Turnere7ebf902018-12-04 14:04:55 +00003889
3890 if (force_scaling || new_fb->buf.y_crop_width != cm->width ||
3891 new_fb->buf.y_crop_height != cm->height) {
Yaowu Xu671f2bd2016-09-30 15:07:57 -07003892 if (aom_realloc_frame_buffer(
David Turnere7ebf902018-12-04 14:04:55 +00003893 &new_fb->buf, cm->width, cm->height,
Urvang Joshi20cf30e2018-07-19 02:33:58 -07003894 cm->seq_params.subsampling_x, cm->seq_params.subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +05303895 cm->seq_params.use_highbitdepth, cpi->oxcf.border_in_pixels,
Wan-Teh Chang41d286f2018-10-03 11:43:03 -07003896 cm->byte_alignment, NULL, NULL, NULL)) {
3897 if (force_scaling) {
3898 // Release the reference acquired in the get_free_fb() call above.
David Turnere7ebf902018-12-04 14:04:55 +00003899 --new_fb->ref_count;
Wan-Teh Chang41d286f2018-10-03 11:43:03 -07003900 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003901 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003902 "Failed to allocate frame buffer");
Wan-Teh Chang41d286f2018-10-03 11:43:03 -07003903 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07003904 av1_resize_and_extend_frame(
David Turnere7ebf902018-12-04 14:04:55 +00003905 ref, &new_fb->buf, (int)cm->seq_params.bit_depth, num_planes);
3906 cpi->scaled_ref_buf[ref_frame - 1] = new_fb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003907 alloc_frame_mvs(cm, new_fb);
3908 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003909 } else {
David Turnera21966b2018-12-05 14:48:49 +00003910 RefCntBuffer *buf = get_ref_frame_buf(cm, ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003911 buf->buf.y_crop_width = ref->y_crop_width;
3912 buf->buf.y_crop_height = ref->y_crop_height;
David Turnere7ebf902018-12-04 14:04:55 +00003913 cpi->scaled_ref_buf[ref_frame - 1] = buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003914 ++buf->ref_count;
3915 }
3916 } else {
David Turnere7ebf902018-12-04 14:04:55 +00003917 if (cpi->oxcf.pass != 0) cpi->scaled_ref_buf[ref_frame - 1] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003918 }
3919 }
3920}
3921
Yaowu Xuf883b422016-08-30 14:01:10 -07003922static void release_scaled_references(AV1_COMP *cpi) {
Imdad Sardharwalladadaba62018-02-23 12:06:56 +00003923 // TODO(isbs): only refresh the necessary frames, rather than all of them
David Turnere7ebf902018-12-04 14:04:55 +00003924 for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3925 RefCntBuffer *const buf = cpi->scaled_ref_buf[i];
3926 if (buf != NULL) {
Imdad Sardharwalladadaba62018-02-23 12:06:56 +00003927 --buf->ref_count;
David Turnere7ebf902018-12-04 14:04:55 +00003928 cpi->scaled_ref_buf[i] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003929 }
3930 }
3931}
3932
Yaowu Xuf883b422016-08-30 14:01:10 -07003933static void set_mv_search_params(AV1_COMP *cpi) {
3934 const AV1_COMMON *const cm = &cpi->common;
3935 const unsigned int max_mv_def = AOMMIN(cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003936
3937 // Default based on max resolution.
Yaowu Xuf883b422016-08-30 14:01:10 -07003938 cpi->mv_step_param = av1_init_search_range(max_mv_def);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003939
3940 if (cpi->sf.mv.auto_mv_step_size) {
3941 if (frame_is_intra_only(cm)) {
3942 // Initialize max_mv_magnitude for use in the first INTER frame
3943 // after a key/intra-only frame.
3944 cpi->max_mv_magnitude = max_mv_def;
3945 } else {
3946 if (cm->show_frame) {
3947 // Allow mv_steps to correspond to twice the max mv magnitude found
3948 // in the previous frame, capped by the default max_mv_magnitude based
3949 // on resolution.
Yaowu Xuf883b422016-08-30 14:01:10 -07003950 cpi->mv_step_param = av1_init_search_range(
3951 AOMMIN(max_mv_def, 2 * cpi->max_mv_magnitude));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003952 }
3953 cpi->max_mv_magnitude = 0;
3954 }
3955 }
3956}
3957
Yaowu Xuf883b422016-08-30 14:01:10 -07003958static void set_size_independent_vars(AV1_COMP *cpi) {
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003959 int i;
Debargha Mukherjeedf713102018-10-02 12:33:32 -07003960 AV1_COMMON *cm = &cpi->common;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003961 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
Debargha Mukherjeedf713102018-10-02 12:33:32 -07003962 cm->global_motion[i] = default_warp_params;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003963 }
3964 cpi->global_motion_search_done = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07003965 av1_set_speed_features_framesize_independent(cpi);
3966 av1_set_rd_speed_thresholds(cpi);
Debargha Mukherjeedf713102018-10-02 12:33:32 -07003967 cm->interp_filter = SWITCHABLE;
3968 cm->switchable_motion_mode = 1;
3969
3970 if (frame_is_intra_only(cm)) {
3971 if (cm->seq_params.force_screen_content_tools == 2) {
3972 cm->allow_screen_content_tools =
3973 cpi->oxcf.content == AOM_CONTENT_SCREEN ||
3974 is_screen_content(cpi->source->y_buffer,
3975 cpi->source->flags & YV12_FLAG_HIGHBITDEPTH,
3976 cm->seq_params.bit_depth, cpi->source->y_stride,
3977 cpi->source->y_width, cpi->source->y_height);
3978 } else {
3979 cm->allow_screen_content_tools =
3980 cm->seq_params.force_screen_content_tools;
3981 }
3982 }
Aniket Dhokf6d7ed82019-01-04 14:05:57 +05303983 cpi->is_screen_content_type = (cm->allow_screen_content_tools != 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003984}
3985
Yaowu Xuf883b422016-08-30 14:01:10 -07003986static void set_size_dependent_vars(AV1_COMP *cpi, int *q, int *bottom_index,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003987 int *top_index) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003988 AV1_COMMON *const cm = &cpi->common;
3989 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003990
3991 // Setup variables that depend on the dimensions of the frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07003992 av1_set_speed_features_framesize_dependent(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003993
Sebastien Alaiwan41cae6a2018-01-12 12:22:29 +01003994 // Decide q and q bounds.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07003995 *q = av1_rc_pick_q_and_bounds(cpi, cm->width, cm->height, bottom_index,
3996 top_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003997
James Zern01a9d702017-08-25 19:09:33 +00003998 if (!frame_is_intra_only(cm)) {
RogerZhou3b635242017-09-19 10:06:46 -07003999 set_high_precision_mv(cpi, (*q) < HIGH_PRECISION_MV_QTHRESH,
RogerZhou10a03802017-10-26 11:49:48 -07004000 cpi->common.cur_frame_force_integer_mv);
James Zern01a9d702017-08-25 19:09:33 +00004001 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004002
4003 // Configure experimental use of segmentation for enhanced coding of
4004 // static regions if indicated.
4005 // Only allowed in the second pass of a two pass encode, as it requires
4006 // lagged coding, and if the relevant speed feature flag is set.
4007 if (oxcf->pass == 2 && cpi->sf.static_segmentation)
4008 configure_static_seg_features(cpi);
4009}
4010
Yaowu Xuf883b422016-08-30 14:01:10 -07004011static void init_motion_estimation(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004012 int y_stride = cpi->scaled_source.y_stride;
4013
4014 if (cpi->sf.mv.search_method == NSTEP) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004015 av1_init3smotion_compensation(&cpi->ss_cfg, y_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004016 } else if (cpi->sf.mv.search_method == DIAMOND) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004017 av1_init_dsmotion_compensation(&cpi->ss_cfg, y_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004018 }
4019}
4020
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07004021#define COUPLED_CHROMA_FROM_LUMA_RESTORATION 0
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01004022static void set_restoration_unit_size(int width, int height, int sx, int sy,
4023 RestorationInfo *rst) {
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08004024 (void)width;
4025 (void)height;
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07004026 (void)sx;
4027 (void)sy;
4028#if COUPLED_CHROMA_FROM_LUMA_RESTORATION
4029 int s = AOMMIN(sx, sy);
4030#else
4031 int s = 0;
4032#endif // !COUPLED_CHROMA_FROM_LUMA_RESTORATION
4033
Debargha Mukherjee5f7f3672017-08-12 10:22:49 -07004034 if (width * height > 352 * 288)
Urvang Joshi813186b2018-03-08 15:38:46 -08004035 rst[0].restoration_unit_size = RESTORATION_UNITSIZE_MAX;
Debargha Mukherjee5f7f3672017-08-12 10:22:49 -07004036 else
Urvang Joshi813186b2018-03-08 15:38:46 -08004037 rst[0].restoration_unit_size = (RESTORATION_UNITSIZE_MAX >> 1);
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01004038 rst[1].restoration_unit_size = rst[0].restoration_unit_size >> s;
4039 rst[2].restoration_unit_size = rst[1].restoration_unit_size;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08004040}
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08004041
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05304042static void init_ref_frame_bufs(AV1_COMP *cpi) {
4043 AV1_COMMON *const cm = &cpi->common;
Cheng Chen46f30c72017-09-07 11:13:33 -07004044 int i;
4045 BufferPool *const pool = cm->buffer_pool;
Jack Haughtonddb80602018-11-21 16:41:49 +00004046 cm->cur_frame = NULL;
Cheng Chen46f30c72017-09-07 11:13:33 -07004047 for (i = 0; i < REF_FRAMES; ++i) {
David Turnere7ebf902018-12-04 14:04:55 +00004048 cm->ref_frame_map[i] = NULL;
Wan-Teh Changd05e0332018-10-03 12:00:43 -07004049 }
4050 for (i = 0; i < FRAME_BUFFERS; ++i) {
Cheng Chen46f30c72017-09-07 11:13:33 -07004051 pool->frame_bufs[i].ref_count = 0;
4052 }
RogerZhou86902d02018-02-28 15:29:16 -08004053 if (cm->seq_params.force_screen_content_tools) {
Hui Su2d5fd742018-02-21 18:10:37 -08004054 for (i = 0; i < FRAME_BUFFERS; ++i) {
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05304055 av1_hash_table_init(&pool->frame_bufs[i].hash_table, &cpi->td.mb);
Hui Su2d5fd742018-02-21 18:10:37 -08004056 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004057 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004058}
4059
Yaowu Xud3e7c682017-12-21 14:08:25 -08004060static void check_initial_width(AV1_COMP *cpi, int use_highbitdepth,
Cheng Chen46f30c72017-09-07 11:13:33 -07004061 int subsampling_x, int subsampling_y) {
4062 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004063 SequenceHeader *const seq_params = &cm->seq_params;
Cheng Chen46f30c72017-09-07 11:13:33 -07004064
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004065 if (!cpi->initial_width || seq_params->use_highbitdepth != use_highbitdepth ||
4066 seq_params->subsampling_x != subsampling_x ||
4067 seq_params->subsampling_y != subsampling_y) {
4068 seq_params->subsampling_x = subsampling_x;
4069 seq_params->subsampling_y = subsampling_y;
4070 seq_params->use_highbitdepth = use_highbitdepth;
Cheng Chen46f30c72017-09-07 11:13:33 -07004071
4072 alloc_raw_frame_buffers(cpi);
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05304073 init_ref_frame_bufs(cpi);
Cheng Chen46f30c72017-09-07 11:13:33 -07004074 alloc_util_frame_buffers(cpi);
4075
4076 init_motion_estimation(cpi); // TODO(agrange) This can be removed.
4077
4078 cpi->initial_width = cm->width;
4079 cpi->initial_height = cm->height;
4080 cpi->initial_mbs = cm->MBs;
4081 }
4082}
4083
4084// Returns 1 if the assigned width or height was <= 0.
4085static int set_size_literal(AV1_COMP *cpi, int width, int height) {
4086 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004087 const int num_planes = av1_num_planes(cm);
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004088 check_initial_width(cpi, cm->seq_params.use_highbitdepth,
4089 cm->seq_params.subsampling_x,
4090 cm->seq_params.subsampling_y);
Cheng Chen46f30c72017-09-07 11:13:33 -07004091
4092 if (width <= 0 || height <= 0) return 1;
4093
4094 cm->width = width;
Cheng Chen46f30c72017-09-07 11:13:33 -07004095 cm->height = height;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004096
4097 if (cpi->initial_width && cpi->initial_height &&
4098 (cm->width > cpi->initial_width || cm->height > cpi->initial_height)) {
4099 av1_free_context_buffers(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004100 av1_free_pc_tree(&cpi->td, num_planes);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004101 alloc_compressor_data(cpi);
4102 realloc_segmentation_maps(cpi);
4103 cpi->initial_width = cpi->initial_height = 0;
Cheng Chen46f30c72017-09-07 11:13:33 -07004104 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004105 update_frame_size(cpi);
4106
4107 return 0;
4108}
4109
Fergus Simpsonbc189932017-05-16 17:02:39 -07004110static void set_frame_size(AV1_COMP *cpi, int width, int height) {
Fergus Simpsonbc189932017-05-16 17:02:39 -07004111 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004112 const SequenceHeader *const seq_params = &cm->seq_params;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004113 const int num_planes = av1_num_planes(cm);
Fergus Simpsonbc189932017-05-16 17:02:39 -07004114 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004115 int ref_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004116
Fergus Simpsonbc189932017-05-16 17:02:39 -07004117 if (width != cm->width || height != cm->height) {
Fergus Simpson3502d082017-04-10 12:25:07 -07004118 // There has been a change in the encoded frame size
Cheng Chen46f30c72017-09-07 11:13:33 -07004119 set_size_literal(cpi, width, height);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004120 set_mv_search_params(cpi);
Urvang Joshic8b52d52018-03-23 13:16:51 -07004121 // Recalculate 'all_lossless' in case super-resolution was (un)selected.
Cheng Chen09c83a52018-06-05 12:27:36 -07004122 cm->all_lossless = cm->coded_lossless && !av1_superres_scaled(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004123 }
4124
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004125 if (cpi->oxcf.pass == 2) {
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004126 av1_set_target_rate(cpi, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004127 }
4128
David Turnere7ebf902018-12-04 14:04:55 +00004129 alloc_frame_mvs(cm, cm->cur_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004130
Cherma Rajan A71d20db2018-04-27 11:15:32 +05304131 // Allocate above context buffers
Cherma Rajan Af1479082018-05-09 14:26:34 +05304132 if (cm->num_allocated_above_context_planes < av1_num_planes(cm) ||
4133 cm->num_allocated_above_context_mi_col < cm->mi_cols ||
Cherma Rajan A71d20db2018-04-27 11:15:32 +05304134 cm->num_allocated_above_contexts < cm->tile_rows) {
4135 av1_free_above_context_buffers(cm, cm->num_allocated_above_contexts);
4136 if (av1_alloc_above_context_buffers(cm, cm->tile_rows))
4137 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
4138 "Failed to allocate context buffers");
4139 }
4140
Yaowu Xuc27fc142016-08-22 16:08:15 -07004141 // Reset the frame pointers to the current frame size.
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004142 if (aom_realloc_frame_buffer(
Jack Haughtonddb80602018-11-21 16:41:49 +00004143 &cm->cur_frame->buf, cm->width, cm->height, seq_params->subsampling_x,
4144 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman29909962019-01-09 10:31:21 +05304145 cpi->oxcf.border_in_pixels, cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -07004146 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004147 "Failed to allocate frame buffer");
4148
Rupert Swarbrickf88bc042017-10-18 10:45:51 +01004149 const int frame_width = cm->superres_upscaled_width;
4150 const int frame_height = cm->superres_upscaled_height;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004151 set_restoration_unit_size(frame_width, frame_height,
4152 seq_params->subsampling_x,
4153 seq_params->subsampling_y, cm->rst_info);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004154 for (int i = 0; i < num_planes; ++i)
Rupert Swarbrick1a96c3f2017-10-24 11:55:00 +01004155 cm->rst_info[i].frame_restoration_type = RESTORE_NONE;
Rupert Swarbrickf88bc042017-10-18 10:45:51 +01004156
4157 av1_alloc_restoration_buffers(cm);
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004158 alloc_util_frame_buffers(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004159 init_motion_estimation(cpi);
4160
4161 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00004162 RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
David Turnere7ebf902018-12-04 14:04:55 +00004163 if (buf != NULL) {
David Turnera21966b2018-12-05 14:48:49 +00004164 struct scale_factors *sf = get_ref_scale_factors(cm, ref_frame);
4165 av1_setup_scale_factors_for_frame(sf, buf->buf.y_crop_width,
David Turner1bcefb32018-11-19 17:54:00 +00004166 buf->buf.y_crop_height, cm->width,
Debargha Mukherjeee242a812018-03-07 21:43:09 -08004167 cm->height);
David Turnera21966b2018-12-05 14:48:49 +00004168 if (av1_is_scaled(sf)) aom_extend_frame_borders(&buf->buf, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004169 }
4170 }
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004171
Hui Su5ebd8702018-01-08 18:09:20 -08004172 av1_setup_scale_factors_for_frame(&cm->sf_identity, cm->width, cm->height,
Debargha Mukherjeee242a812018-03-07 21:43:09 -08004173 cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004174
4175 set_ref_ptrs(cm, xd, LAST_FRAME, LAST_FRAME);
4176}
4177
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004178static uint8_t calculate_next_resize_scale(const AV1_COMP *cpi) {
4179 // Choose an arbitrary random number
4180 static unsigned int seed = 56789;
4181 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Urvang Joshide71d142017-10-05 12:12:15 -07004182 if (oxcf->pass == 1) return SCALE_NUMERATOR;
4183 uint8_t new_denom = SCALE_NUMERATOR;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004184
Debargha Mukherjee2b7c2b32018-04-10 07:35:28 -07004185 if (cpi->common.seq_params.reduced_still_picture_hdr) return SCALE_NUMERATOR;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004186 switch (oxcf->resize_mode) {
Urvang Joshide71d142017-10-05 12:12:15 -07004187 case RESIZE_NONE: new_denom = SCALE_NUMERATOR; break;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004188 case RESIZE_FIXED:
David Turnerd2a592e2018-11-16 14:59:31 +00004189 if (cpi->common.current_frame.frame_type == KEY_FRAME)
Urvang Joshide71d142017-10-05 12:12:15 -07004190 new_denom = oxcf->resize_kf_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004191 else
Urvang Joshide71d142017-10-05 12:12:15 -07004192 new_denom = oxcf->resize_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004193 break;
Urvang Joshide71d142017-10-05 12:12:15 -07004194 case RESIZE_RANDOM: new_denom = lcg_rand16(&seed) % 9 + 8; break;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004195 default: assert(0);
4196 }
Urvang Joshide71d142017-10-05 12:12:15 -07004197 return new_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004198}
4199
Debargha Mukherjeee3cd5a52018-11-29 11:05:22 -08004200#define ENERGY_BY_Q2_THRESH 0.01
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004201#define ENERGY_BY_AC_THRESH 0.2
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004202
4203static uint8_t get_superres_denom_from_qindex_energy(int qindex, double *energy,
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004204 double threshq,
4205 double threshp) {
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004206 const double q = av1_convert_qindex_to_q(qindex, AOM_BITS_8);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004207 const double tq = threshq * q * q;
4208 const double tp = threshp * energy[1];
4209 const double thresh = AOMMIN(tq, tp);
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004210 int k;
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004211 for (k = 16; k > 8; --k) {
4212 if (energy[k - 1] > thresh) break;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004213 }
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004214 return 3 * SCALE_NUMERATOR - k;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004215}
4216
4217static uint8_t get_superres_denom_for_qindex(const AV1_COMP *cpi, int qindex) {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004218 double energy[16];
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004219 analyze_hor_freq(cpi, energy);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004220 /*
4221 printf("\nenergy = [");
4222 for (int k = 1; k < 16; ++k) printf("%f, ", energy[k]);
4223 printf("]\n");
4224 */
4225 return get_superres_denom_from_qindex_energy(
4226 qindex, energy, ENERGY_BY_Q2_THRESH, ENERGY_BY_AC_THRESH);
Debargha Mukherjeef48b0d22018-11-20 12:23:43 -08004227}
4228
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004229static uint8_t calculate_next_superres_scale(AV1_COMP *cpi) {
4230 // Choose an arbitrary random number
4231 static unsigned int seed = 34567;
4232 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Urvang Joshide71d142017-10-05 12:12:15 -07004233 if (oxcf->pass == 1) return SCALE_NUMERATOR;
4234 uint8_t new_denom = SCALE_NUMERATOR;
Urvang Joshi2c92b072018-03-19 17:23:31 -07004235
4236 // Make sure that superres mode of the frame is consistent with the
4237 // sequence-level flag.
4238 assert(IMPLIES(oxcf->superres_mode != SUPERRES_NONE,
4239 cpi->common.seq_params.enable_superres));
4240 assert(IMPLIES(!cpi->common.seq_params.enable_superres,
4241 oxcf->superres_mode == SUPERRES_NONE));
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004242
4243 switch (oxcf->superres_mode) {
Urvang Joshide71d142017-10-05 12:12:15 -07004244 case SUPERRES_NONE: new_denom = SCALE_NUMERATOR; break;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004245 case SUPERRES_FIXED:
David Turnerd2a592e2018-11-16 14:59:31 +00004246 if (cpi->common.current_frame.frame_type == KEY_FRAME)
Urvang Joshide71d142017-10-05 12:12:15 -07004247 new_denom = oxcf->superres_kf_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004248 else
Urvang Joshide71d142017-10-05 12:12:15 -07004249 new_denom = oxcf->superres_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004250 break;
Urvang Joshide71d142017-10-05 12:12:15 -07004251 case SUPERRES_RANDOM: new_denom = lcg_rand16(&seed) % 9 + 8; break;
Urvang Joshif1fa6862018-01-08 16:39:33 -08004252 case SUPERRES_QTHRESH: {
Debargha Mukherjeedf713102018-10-02 12:33:32 -07004253 // Do not use superres when screen content tools are used.
4254 if (cpi->common.allow_screen_content_tools) break;
Debargha Mukherjee2b2c5fd2018-11-14 13:21:24 -08004255 if (oxcf->rc_mode == AOM_VBR || oxcf->rc_mode == AOM_CQ)
4256 av1_set_target_rate(cpi, cpi->oxcf.width, cpi->oxcf.height);
Urvang Joshi2c92b072018-03-19 17:23:31 -07004257 int bottom_index, top_index;
4258 const int q = av1_rc_pick_q_and_bounds(
4259 cpi, cpi->oxcf.width, cpi->oxcf.height, &bottom_index, &top_index);
Debargha Mukherjeef48b0d22018-11-20 12:23:43 -08004260
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004261 const int qthresh = (frame_is_intra_only(&cpi->common))
4262 ? oxcf->superres_kf_qthresh
4263 : oxcf->superres_qthresh;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004264 if (q < qthresh) {
Urvang Joshide71d142017-10-05 12:12:15 -07004265 new_denom = SCALE_NUMERATOR;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004266 } else {
Debargha Mukherjeee3cd5a52018-11-29 11:05:22 -08004267 new_denom = get_superres_denom_for_qindex(cpi, q);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004268 }
4269 break;
Urvang Joshif1fa6862018-01-08 16:39:33 -08004270 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004271 default: assert(0);
4272 }
Urvang Joshide71d142017-10-05 12:12:15 -07004273 return new_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004274}
4275
Urvang Joshide71d142017-10-05 12:12:15 -07004276static int dimension_is_ok(int orig_dim, int resized_dim, int denom) {
4277 return (resized_dim * SCALE_NUMERATOR >= orig_dim * denom / 2);
4278}
4279
4280static int dimensions_are_ok(int owidth, int oheight, size_params_type *rsz) {
Urvang Joshi94ad3702017-12-06 11:38:08 -08004281 // Only need to check the width, as scaling is horizontal only.
4282 (void)oheight;
4283 return dimension_is_ok(owidth, rsz->resize_width, rsz->superres_denom);
Urvang Joshide71d142017-10-05 12:12:15 -07004284}
4285
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004286static int validate_size_scales(RESIZE_MODE resize_mode,
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004287 SUPERRES_MODE superres_mode, int owidth,
4288 int oheight, size_params_type *rsz) {
Urvang Joshide71d142017-10-05 12:12:15 -07004289 if (dimensions_are_ok(owidth, oheight, rsz)) { // Nothing to do.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004290 return 1;
Urvang Joshide71d142017-10-05 12:12:15 -07004291 }
4292
Urvang Joshi69fde2e2017-10-09 15:34:18 -07004293 // Calculate current resize scale.
Urvang Joshide71d142017-10-05 12:12:15 -07004294 int resize_denom =
4295 AOMMAX(DIVIDE_AND_ROUND(owidth * SCALE_NUMERATOR, rsz->resize_width),
4296 DIVIDE_AND_ROUND(oheight * SCALE_NUMERATOR, rsz->resize_height));
4297
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004298 if (resize_mode != RESIZE_RANDOM && superres_mode == SUPERRES_RANDOM) {
Urvang Joshide71d142017-10-05 12:12:15 -07004299 // Alter superres scale as needed to enforce conformity.
4300 rsz->superres_denom =
4301 (2 * SCALE_NUMERATOR * SCALE_NUMERATOR) / resize_denom;
4302 if (!dimensions_are_ok(owidth, oheight, rsz)) {
4303 if (rsz->superres_denom > SCALE_NUMERATOR) --rsz->superres_denom;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004304 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004305 } else if (resize_mode == RESIZE_RANDOM && superres_mode != SUPERRES_RANDOM) {
Urvang Joshide71d142017-10-05 12:12:15 -07004306 // Alter resize scale as needed to enforce conformity.
4307 resize_denom =
4308 (2 * SCALE_NUMERATOR * SCALE_NUMERATOR) / rsz->superres_denom;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004309 rsz->resize_width = owidth;
4310 rsz->resize_height = oheight;
4311 av1_calculate_scaled_size(&rsz->resize_width, &rsz->resize_height,
Urvang Joshide71d142017-10-05 12:12:15 -07004312 resize_denom);
4313 if (!dimensions_are_ok(owidth, oheight, rsz)) {
4314 if (resize_denom > SCALE_NUMERATOR) {
4315 --resize_denom;
4316 rsz->resize_width = owidth;
4317 rsz->resize_height = oheight;
4318 av1_calculate_scaled_size(&rsz->resize_width, &rsz->resize_height,
4319 resize_denom);
4320 }
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004321 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004322 } else if (resize_mode == RESIZE_RANDOM && superres_mode == SUPERRES_RANDOM) {
Urvang Joshide71d142017-10-05 12:12:15 -07004323 // Alter both resize and superres scales as needed to enforce conformity.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004324 do {
Urvang Joshide71d142017-10-05 12:12:15 -07004325 if (resize_denom > rsz->superres_denom)
4326 --resize_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004327 else
Urvang Joshide71d142017-10-05 12:12:15 -07004328 --rsz->superres_denom;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004329 rsz->resize_width = owidth;
4330 rsz->resize_height = oheight;
4331 av1_calculate_scaled_size(&rsz->resize_width, &rsz->resize_height,
Urvang Joshide71d142017-10-05 12:12:15 -07004332 resize_denom);
4333 } while (!dimensions_are_ok(owidth, oheight, rsz) &&
4334 (resize_denom > SCALE_NUMERATOR ||
4335 rsz->superres_denom > SCALE_NUMERATOR));
Urvang Joshif1fa6862018-01-08 16:39:33 -08004336 } else { // We are allowed to alter neither resize scale nor superres
4337 // scale.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004338 return 0;
4339 }
Urvang Joshide71d142017-10-05 12:12:15 -07004340 return dimensions_are_ok(owidth, oheight, rsz);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004341}
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004342
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004343// Calculates resize and superres params for next frame
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004344size_params_type av1_calculate_next_size_params(AV1_COMP *cpi) {
4345 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Debargha Mukherjee3a4959f2018-02-26 15:34:03 -08004346 size_params_type rsz = { oxcf->width, oxcf->height, SCALE_NUMERATOR };
Urvang Joshide71d142017-10-05 12:12:15 -07004347 int resize_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004348 if (oxcf->pass == 1) return rsz;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004349 if (cpi->resize_pending_width && cpi->resize_pending_height) {
4350 rsz.resize_width = cpi->resize_pending_width;
4351 rsz.resize_height = cpi->resize_pending_height;
4352 cpi->resize_pending_width = cpi->resize_pending_height = 0;
4353 } else {
Urvang Joshide71d142017-10-05 12:12:15 -07004354 resize_denom = calculate_next_resize_scale(cpi);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004355 rsz.resize_width = cpi->oxcf.width;
4356 rsz.resize_height = cpi->oxcf.height;
4357 av1_calculate_scaled_size(&rsz.resize_width, &rsz.resize_height,
Urvang Joshide71d142017-10-05 12:12:15 -07004358 resize_denom);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004359 }
Urvang Joshide71d142017-10-05 12:12:15 -07004360 rsz.superres_denom = calculate_next_superres_scale(cpi);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004361 if (!validate_size_scales(oxcf->resize_mode, oxcf->superres_mode, oxcf->width,
4362 oxcf->height, &rsz))
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004363 assert(0 && "Invalid scale parameters");
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004364 return rsz;
4365}
4366
Urvang Joshi22b150b2019-01-10 14:32:32 -08004367static void setup_frame_size_from_params(AV1_COMP *cpi,
4368 const size_params_type *rsz) {
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004369 int encode_width = rsz->resize_width;
4370 int encode_height = rsz->resize_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004371
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004372 AV1_COMMON *cm = &cpi->common;
4373 cm->superres_upscaled_width = encode_width;
4374 cm->superres_upscaled_height = encode_height;
Urvang Joshide71d142017-10-05 12:12:15 -07004375 cm->superres_scale_denominator = rsz->superres_denom;
Urvang Joshi69fde2e2017-10-09 15:34:18 -07004376 av1_calculate_scaled_superres_size(&encode_width, &encode_height,
4377 rsz->superres_denom);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004378 set_frame_size(cpi, encode_width, encode_height);
4379}
4380
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004381static void setup_frame_size(AV1_COMP *cpi) {
Urvang Joshi22b150b2019-01-10 14:32:32 -08004382 // Reset superres params from previous frame.
4383 cpi->common.superres_scale_denominator = SCALE_NUMERATOR;
4384 const size_params_type rsz = av1_calculate_next_size_params(cpi);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004385 setup_frame_size_from_params(cpi, &rsz);
4386}
4387
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004388static void superres_post_encode(AV1_COMP *cpi) {
4389 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004390 const int num_planes = av1_num_planes(cm);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004391
Cheng Chen09c83a52018-06-05 12:27:36 -07004392 if (!av1_superres_scaled(cm)) return;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004393
Urvang Joshid6b5d512018-03-20 13:34:38 -07004394 assert(cpi->oxcf.enable_superres);
4395 assert(!is_lossless_requested(&cpi->oxcf));
Urvang Joshic8b52d52018-03-23 13:16:51 -07004396 assert(!cm->all_lossless);
Urvang Joshid6b5d512018-03-20 13:34:38 -07004397
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004398 av1_superres_upscale(cm, NULL);
4399
4400 // If regular resizing is occurring the source will need to be downscaled to
4401 // match the upscaled superres resolution. Otherwise the original source is
4402 // used.
Cheng Chen09c83a52018-06-05 12:27:36 -07004403 if (!av1_resize_scaled(cm)) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004404 cpi->source = cpi->unscaled_source;
4405 if (cpi->last_source != NULL) cpi->last_source = cpi->unscaled_last_source;
4406 } else {
Fergus Simpsonabd43432017-06-12 15:54:43 -07004407 assert(cpi->unscaled_source->y_crop_width != cm->superres_upscaled_width);
4408 assert(cpi->unscaled_source->y_crop_height != cm->superres_upscaled_height);
Urvang Joshif1fa6862018-01-08 16:39:33 -08004409 // Do downscale. cm->(width|height) has been updated by
4410 // av1_superres_upscale
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004411 if (aom_realloc_frame_buffer(
4412 &cpi->scaled_source, cm->superres_upscaled_width,
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004413 cm->superres_upscaled_height, cm->seq_params.subsampling_x,
4414 cm->seq_params.subsampling_y, cm->seq_params.use_highbitdepth,
4415 AOM_BORDER_IN_PIXELS, cm->byte_alignment, NULL, NULL, NULL))
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004416 aom_internal_error(
4417 &cm->error, AOM_CODEC_MEM_ERROR,
4418 "Failed to reallocate scaled source buffer for superres");
4419 assert(cpi->scaled_source.y_crop_width == cm->superres_upscaled_width);
4420 assert(cpi->scaled_source.y_crop_height == cm->superres_upscaled_height);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004421 av1_resize_and_extend_frame(cpi->unscaled_source, &cpi->scaled_source,
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004422 (int)cm->seq_params.bit_depth, num_planes);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004423 cpi->source = &cpi->scaled_source;
4424 }
4425}
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004426
4427static void loopfilter_frame(AV1_COMP *cpi, AV1_COMMON *cm) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004428 const int num_planes = av1_num_planes(cm);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004429 MACROBLOCKD *xd = &cpi->td.mb.e_mbd;
Urvang Joshi14072aa2018-03-21 17:43:36 -07004430
Urvang Joshic8b52d52018-03-23 13:16:51 -07004431 assert(IMPLIES(is_lossless_requested(&cpi->oxcf),
4432 cm->coded_lossless && cm->all_lossless));
4433
4434 const int no_loopfilter = cm->coded_lossless || cm->large_scale_tile;
4435 const int no_cdef =
Debargha Mukherjee98a311c2018-03-25 16:33:11 -07004436 !cm->seq_params.enable_cdef || cm->coded_lossless || cm->large_scale_tile;
4437 const int no_restoration = !cm->seq_params.enable_restoration ||
4438 cm->all_lossless || cm->large_scale_tile;
Urvang Joshi14072aa2018-03-21 17:43:36 -07004439
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004440 struct loopfilter *lf = &cm->lf;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004441
4442 if (no_loopfilter) {
Cheng Chen179479f2017-08-04 10:56:39 -07004443 lf->filter_level[0] = 0;
4444 lf->filter_level[1] = 0;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004445 } else {
4446 struct aom_usec_timer timer;
4447
4448 aom_clear_system_state();
4449
4450 aom_usec_timer_start(&timer);
4451
4452 av1_pick_filter_level(cpi->source, cpi, cpi->sf.lpf_pick);
4453
4454 aom_usec_timer_mark(&timer);
4455 cpi->time_pick_lpf += aom_usec_timer_elapsed(&timer);
4456 }
4457
Debargha Mukherjee2382b142018-02-26 14:31:32 -08004458 if (lf->filter_level[0] || lf->filter_level[1]) {
Deepa K G964e72e2018-05-16 16:56:01 +05304459 if (cpi->num_workers > 1)
David Turnerc29e1a92018-12-06 14:10:14 +00004460 av1_loop_filter_frame_mt(&cm->cur_frame->buf, cm, xd, 0, num_planes, 0,
Cheng Chene3600cd2018-09-21 18:45:42 -07004461#if LOOP_FILTER_BITMASK
4462 0,
4463#endif
Deepa K G964e72e2018-05-16 16:56:01 +05304464 cpi->workers, cpi->num_workers,
4465 &cpi->lf_row_sync);
4466 else
David Turnerc29e1a92018-12-06 14:10:14 +00004467 av1_loop_filter_frame(&cm->cur_frame->buf, cm, xd,
Cheng Chen84b09932018-08-12 17:35:13 -07004468#if LOOP_FILTER_BITMASK
4469 0,
Cheng Chen8ab1f442018-04-27 18:01:52 -07004470#endif
Cheng Chen84b09932018-08-12 17:35:13 -07004471 0, num_planes, 0);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004472 }
Debargha Mukherjeee168a782017-08-31 12:30:10 -07004473
Yaowu Xu35ee2342017-11-08 11:50:46 -08004474 if (!no_restoration)
David Turnerc29e1a92018-12-06 14:10:14 +00004475 av1_loop_restoration_save_boundary_lines(&cm->cur_frame->buf, cm, 0);
Ola Hugosson1e7f2d02017-09-22 21:36:26 +02004476
Yaowu Xu35ee2342017-11-08 11:50:46 -08004477 if (no_cdef) {
David Turnerebf96f42018-11-14 16:57:57 +00004478 cm->cdef_info.cdef_bits = 0;
4479 cm->cdef_info.cdef_strengths[0] = 0;
4480 cm->cdef_info.nb_cdef_strengths = 1;
4481 cm->cdef_info.cdef_uv_strengths[0] = 0;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004482 } else {
Steinar Midtskogen59782122017-07-20 08:49:43 +02004483 // Find CDEF parameters
David Turnerc29e1a92018-12-06 14:10:14 +00004484 av1_cdef_search(&cm->cur_frame->buf, cpi->source, cm, xd,
Debargha Mukherjeed7338aa2017-11-04 07:34:50 -07004485 cpi->sf.fast_cdef_search);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004486
4487 // Apply the filter
David Turnerc29e1a92018-12-06 14:10:14 +00004488 av1_cdef_frame(&cm->cur_frame->buf, cm, xd);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004489 }
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004490
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004491 superres_post_encode(cpi);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004492
Yaowu Xu35ee2342017-11-08 11:50:46 -08004493 if (no_restoration) {
4494 cm->rst_info[0].frame_restoration_type = RESTORE_NONE;
4495 cm->rst_info[1].frame_restoration_type = RESTORE_NONE;
4496 cm->rst_info[2].frame_restoration_type = RESTORE_NONE;
4497 } else {
David Turnerc29e1a92018-12-06 14:10:14 +00004498 av1_loop_restoration_save_boundary_lines(&cm->cur_frame->buf, cm, 1);
Yaowu Xu35ee2342017-11-08 11:50:46 -08004499 av1_pick_filter_restoration(cpi->source, cpi);
4500 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
4501 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
4502 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05304503 if (cpi->num_workers > 1)
David Turnerc29e1a92018-12-06 14:10:14 +00004504 av1_loop_restoration_filter_frame_mt(&cm->cur_frame->buf, cm, 0,
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05304505 cpi->workers, cpi->num_workers,
4506 &cpi->lr_row_sync, &cpi->lr_ctxt);
4507 else
David Turnerc29e1a92018-12-06 14:10:14 +00004508 av1_loop_restoration_filter_frame(&cm->cur_frame->buf, cm, 0,
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05304509 &cpi->lr_ctxt);
Yaowu Xu35ee2342017-11-08 11:50:46 -08004510 }
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004511 }
Fergus Simpsonbc189932017-05-16 17:02:39 -07004512}
4513
David Turner996b2c12018-12-07 15:52:30 +00004514static int get_refresh_frame_flags(const AV1_COMP *const cpi) {
4515 const AV1_COMMON *const cm = &cpi->common;
4516
4517 // Switch frames and shown key-frames overwrite all reference slots
4518 if ((cm->current_frame.frame_type == KEY_FRAME && cm->show_frame) ||
4519 frame_is_sframe(cm))
4520 return 0xFF;
4521
4522 int refresh_mask = 0;
4523
4524 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
4525 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
4526 // the 3 LAST reference frames will be updated accordingly, i.e.:
4527 // (1) The original virtual index for LAST3_FRAME will become the new virtual
4528 // index for LAST_FRAME; and
4529 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
4530 // shifted and become the new virtual indexes for LAST2_FRAME and
4531 // LAST3_FRAME.
4532 refresh_mask |=
4533 (cpi->refresh_last_frame << get_ref_frame_map_idx(cm, LAST3_FRAME));
4534
4535#if USE_SYMM_MULTI_LAYER
4536 const int bwd_ref_frame =
4537 (cpi->new_bwdref_update_rule == 1) ? EXTREF_FRAME : BWDREF_FRAME;
4538#else
4539 const int bwd_ref_frame = BWDREF_FRAME;
4540#endif
4541 refresh_mask |=
4542 (cpi->refresh_bwd_ref_frame << get_ref_frame_map_idx(cm, bwd_ref_frame));
4543
4544 refresh_mask |=
4545 (cpi->refresh_alt2_ref_frame << get_ref_frame_map_idx(cm, ALTREF2_FRAME));
4546
4547 if (av1_preserve_existing_gf(cpi)) {
4548 // We have decided to preserve the previously existing golden frame as our
4549 // new ARF frame. However, in the short term we leave it in the GF slot and,
4550 // if we're updating the GF with the current decoded frame, we save it
4551 // instead to the ARF slot.
4552 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
4553 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
4554 // there so that it can be done outside of the recode loop.
4555 // Note: This is highly specific to the use of ARF as a forward reference,
4556 // and this needs to be generalized as other uses are implemented
4557 // (like RTC/temporal scalability).
4558
4559 if (!cpi->preserve_arf_as_gld) {
4560 refresh_mask |= (cpi->refresh_golden_frame
4561 << get_ref_frame_map_idx(cm, ALTREF_FRAME));
4562 }
4563 } else {
4564 refresh_mask |=
4565 (cpi->refresh_golden_frame << get_ref_frame_map_idx(cm, GOLDEN_FRAME));
4566 refresh_mask |=
4567 (cpi->refresh_alt_ref_frame << get_ref_frame_map_idx(cm, ALTREF_FRAME));
4568 }
4569 return refresh_mask;
4570}
4571
David Turnerf2b334c2018-12-13 13:00:55 +00004572static void fix_interp_filter(InterpFilter *const interp_filter,
4573 const FRAME_COUNTS *const counts) {
4574 if (*interp_filter == SWITCHABLE) {
4575 // Check to see if only one of the filters is actually used
4576 int count[SWITCHABLE_FILTERS] = { 0 };
4577 int num_filters_used = 0;
4578 for (int i = 0; i < SWITCHABLE_FILTERS; ++i) {
4579 for (int j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
4580 count[i] += counts->switchable_interp[j][i];
4581 num_filters_used += (count[i] > 0);
4582 }
4583 if (num_filters_used == 1) {
4584 // Only one filter is used. So set the filter at frame level
4585 for (int i = 0; i < SWITCHABLE_FILTERS; ++i) {
4586 if (count[i]) {
4587 if (i == EIGHTTAP_REGULAR) *interp_filter = i;
4588 break;
4589 }
4590 }
4591 }
4592 }
4593}
4594
David Turner996b2c12018-12-07 15:52:30 +00004595static void finalize_encoded_frame(AV1_COMP *const cpi) {
4596 AV1_COMMON *const cm = &cpi->common;
David Turner99e990e2018-12-10 12:54:26 +00004597 CurrentFrame *const current_frame = &cm->current_frame;
David Turner996b2c12018-12-07 15:52:30 +00004598
4599 // This bitfield indicates which reference frame slots will be overwritten by
4600 // the current frame
David Turner99e990e2018-12-10 12:54:26 +00004601 current_frame->refresh_frame_flags = get_refresh_frame_flags(cpi);
4602
4603 if (!encode_show_existing_frame(cm)) {
4604 // Refresh fb_of_context_type[]: see encoder.h for explanation
4605 if (current_frame->frame_type == KEY_FRAME) {
4606 // All ref frames are refreshed, pick one that will live long enough
4607 cpi->fb_of_context_type[REGULAR_FRAME] = 0;
4608 } else {
4609 // If more than one frame is refreshed, it doesn't matter which one we
4610 // pick so pick the first. LST sometimes doesn't refresh any: this is ok
4611 const int current_frame_ref_type = get_current_frame_ref_type(cpi);
4612 for (int i = 0; i < REF_FRAMES; i++) {
4613 if (current_frame->refresh_frame_flags & (1 << i)) {
4614 cpi->fb_of_context_type[current_frame_ref_type] = i;
4615 break;
4616 }
4617 }
4618 }
4619 }
4620
4621 if (!cm->seq_params.reduced_still_picture_hdr &&
4622 encode_show_existing_frame(cm)) {
4623 RefCntBuffer *const frame_to_show =
4624 cm->ref_frame_map[cpi->existing_fb_idx_to_show];
4625
Wan-Teh Chang88cd1662019-01-14 12:38:41 -08004626 if (frame_to_show == NULL) {
David Turner99e990e2018-12-10 12:54:26 +00004627 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
4628 "Buffer does not contain a reconstructed frame");
4629 }
Wan-Teh Chang88cd1662019-01-14 12:38:41 -08004630 assert(frame_to_show->ref_count > 0);
David Turner99e990e2018-12-10 12:54:26 +00004631 assign_frame_buffer_p(&cm->cur_frame, frame_to_show);
4632 if (cm->reset_decoder_state && frame_to_show->frame_type != KEY_FRAME) {
4633 aom_internal_error(
4634 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
4635 "show_existing_frame to reset state on KEY_FRAME only");
4636 }
4637 }
David Turner08f909c2018-12-18 13:29:14 +00004638
4639 if (!encode_show_existing_frame(cm) &&
4640 cm->seq_params.film_grain_params_present &&
4641 (cm->show_frame || cm->showable_frame)) {
4642 // Copy the current frame's film grain params to the its corresponding
4643 // RefCntBuffer slot.
4644 cm->cur_frame->film_grain_params = cm->film_grain_params;
4645
4646 // We must update the parameters if this is not an INTER_FRAME
4647 if (current_frame->frame_type != INTER_FRAME)
4648 cm->cur_frame->film_grain_params.update_parameters = 1;
4649
4650 // Iterate the random seed for the next frame.
4651 cm->film_grain_params.random_seed += 3381;
4652 if (cm->film_grain_params.random_seed == 0)
4653 cm->film_grain_params.random_seed = 7391;
4654 }
David Turnerf2b334c2018-12-13 13:00:55 +00004655
4656 // Initialise all tiles' contexts from the global frame context
4657 for (int tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
4658 for (int tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
4659 const int tile_idx = tile_row * cm->tile_cols + tile_col;
4660 cpi->tile_data[tile_idx].tctx = *cm->fc;
4661 }
4662 }
4663
4664 fix_interp_filter(&cm->interp_filter, cpi->td.counts);
David Turner996b2c12018-12-07 15:52:30 +00004665}
4666
David Turner2f3b5df2019-01-02 14:30:50 +00004667// Called after encode_with_recode_loop() has just encoded a frame and packed
4668// its bitstream. This function works out whether we under- or over-shot
4669// our bitrate target and adjusts q as appropriate. Also decides whether
4670// or not we should do another recode loop, indicated by *loop
4671static void recode_loop_update_q(AV1_COMP *const cpi, int *const loop,
4672 int *const q, int *const q_low,
4673 int *const q_high, const int top_index,
4674 const int bottom_index,
4675 int *const undershoot_seen,
4676 int *const overshoot_seen,
4677 const int loop_at_this_size) {
4678 AV1_COMMON *const cm = &cpi->common;
4679 RATE_CONTROL *const rc = &cpi->rc;
4680
4681 int frame_over_shoot_limit = 0, frame_under_shoot_limit = 0;
4682 av1_rc_compute_frame_size_bounds(cpi, rc->this_frame_target,
4683 &frame_under_shoot_limit,
4684 &frame_over_shoot_limit);
4685 if (frame_over_shoot_limit == 0) frame_over_shoot_limit = 1;
4686
4687 if ((cm->current_frame.frame_type == KEY_FRAME) &&
4688 rc->this_key_frame_forced &&
4689 (rc->projected_frame_size < rc->max_frame_bandwidth)) {
4690 int last_q = *q;
4691 int64_t kf_err;
4692
4693 int64_t high_err_target = cpi->ambient_err;
4694 int64_t low_err_target = cpi->ambient_err >> 1;
4695
4696 if (cm->seq_params.use_highbitdepth) {
4697 kf_err = aom_highbd_get_y_sse(cpi->source, &cm->cur_frame->buf);
4698 } else {
4699 kf_err = aom_get_y_sse(cpi->source, &cm->cur_frame->buf);
4700 }
4701 // Prevent possible divide by zero error below for perfect KF
4702 kf_err += !kf_err;
4703
4704 // The key frame is not good enough or we can afford
4705 // to make it better without undue risk of popping.
4706 if ((kf_err > high_err_target &&
4707 rc->projected_frame_size <= frame_over_shoot_limit) ||
4708 (kf_err > low_err_target &&
4709 rc->projected_frame_size <= frame_under_shoot_limit)) {
4710 // Lower q_high
4711 *q_high = *q > *q_low ? *q - 1 : *q_low;
4712
4713 // Adjust Q
4714 *q = (int)((*q * high_err_target) / kf_err);
4715 *q = AOMMIN(*q, (*q_high + *q_low) >> 1);
4716 } else if (kf_err < low_err_target &&
4717 rc->projected_frame_size >= frame_under_shoot_limit) {
4718 // The key frame is much better than the previous frame
4719 // Raise q_low
4720 *q_low = *q < *q_high ? *q + 1 : *q_high;
4721
4722 // Adjust Q
4723 *q = (int)((*q * low_err_target) / kf_err);
4724 *q = AOMMIN(*q, (*q_high + *q_low + 1) >> 1);
4725 }
4726
4727 // Clamp Q to upper and lower limits:
4728 *q = clamp(*q, *q_low, *q_high);
4729
4730 *loop = *q != last_q;
4731 } else if (recode_loop_test(cpi, frame_over_shoot_limit,
4732 frame_under_shoot_limit, *q,
4733 AOMMAX(*q_high, top_index), bottom_index)) {
4734 // Is the projected frame size out of range and are we allowed
4735 // to attempt to recode.
4736 int last_q = *q;
4737 int retries = 0;
4738
4739 // Frame size out of permitted range:
4740 // Update correction factor & compute new Q to try...
4741 // Frame is too large
4742 if (rc->projected_frame_size > rc->this_frame_target) {
4743 // Special case if the projected size is > the max allowed.
4744 if (rc->projected_frame_size >= rc->max_frame_bandwidth)
4745 *q_high = rc->worst_quality;
4746
4747 // Raise Qlow as to at least the current value
4748 *q_low = *q < *q_high ? *q + 1 : *q_high;
4749
4750 if (*undershoot_seen || loop_at_this_size > 1) {
4751 // Update rate_correction_factor unless
4752 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4753
4754 *q = (*q_high + *q_low + 1) / 2;
4755 } else {
4756 // Update rate_correction_factor unless
4757 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4758
4759 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4760 AOMMAX(*q_high, top_index), cm->width,
4761 cm->height);
4762
4763 while (*q < *q_low && retries < 10) {
4764 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4765 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4766 AOMMAX(*q_high, top_index), cm->width,
4767 cm->height);
4768 retries++;
4769 }
4770 }
4771
4772 *overshoot_seen = 1;
4773 } else {
4774 // Frame is too small
4775 *q_high = *q > *q_low ? *q - 1 : *q_low;
4776
4777 if (*overshoot_seen || loop_at_this_size > 1) {
4778 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4779 *q = (*q_high + *q_low) / 2;
4780 } else {
4781 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4782 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4783 top_index, cm->width, cm->height);
4784 // Special case reset for qlow for constrained quality.
4785 // This should only trigger where there is very substantial
4786 // undershoot on a frame and the auto cq level is above
4787 // the user passsed in value.
4788 if (cpi->oxcf.rc_mode == AOM_CQ && *q < *q_low) {
4789 *q_low = *q;
4790 }
4791
4792 while (*q > *q_high && retries < 10) {
4793 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4794 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4795 top_index, cm->width, cm->height);
4796 retries++;
4797 }
4798 }
4799
4800 *undershoot_seen = 1;
4801 }
4802
4803 // Clamp Q to upper and lower limits:
4804 *q = clamp(*q, *q_low, *q_high);
4805
4806 *loop = (*q != last_q);
4807 } else {
4808 *loop = 0;
4809 }
4810}
4811
Tom Finegane4099e32018-01-23 12:01:51 -08004812static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004813 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004814 RATE_CONTROL *const rc = &cpi->rc;
David Turner2f3b5df2019-01-02 14:30:50 +00004815 const int allow_recode = cpi->sf.recode_loop != DISALLOW_RECODE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004816
4817 set_size_independent_vars(cpi);
4818
Yaowu Xu9b0f7032017-07-31 11:01:19 -07004819 cpi->source->buf_8bit_valid = 0;
Yaowu Xu9b0f7032017-07-31 11:01:19 -07004820
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004821 setup_frame_size(cpi);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004822
David Turner2f3b5df2019-01-02 14:30:50 +00004823 int top_index = 0, bottom_index = 0;
4824 int q = 0, q_low = 0, q_high = 0;
4825 set_size_dependent_vars(cpi, &q, &bottom_index, &top_index);
4826 q_low = bottom_index;
4827 q_high = top_index;
4828
4829 // Loop variables
4830 int loop_count = 0;
4831 int loop_at_this_size = 0;
4832 int loop = 0;
4833 int overshoot_seen = 0;
4834 int undershoot_seen = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004835 do {
Yaowu Xuf883b422016-08-30 14:01:10 -07004836 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07004837
Urvang Joshif1fa6862018-01-08 16:39:33 -08004838 // if frame was scaled calculate global_motion_search again if already
4839 // done
David Turner2f3b5df2019-01-02 14:30:50 +00004840 if (loop_count > 0 && cpi->source && cpi->global_motion_search_done) {
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004841 if (cpi->source->y_crop_width != cm->width ||
David Turner2f3b5df2019-01-02 14:30:50 +00004842 cpi->source->y_crop_height != cm->height) {
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004843 cpi->global_motion_search_done = 0;
David Turner2f3b5df2019-01-02 14:30:50 +00004844 }
4845 }
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004846 cpi->source =
4847 av1_scale_if_required(cm, cpi->unscaled_source, &cpi->scaled_source);
David Turner2f3b5df2019-01-02 14:30:50 +00004848 if (cpi->unscaled_last_source != NULL) {
Debargha Mukherjee17e7b082017-08-13 09:33:03 -07004849 cpi->last_source = av1_scale_if_required(cm, cpi->unscaled_last_source,
4850 &cpi->scaled_last_source);
David Turner2f3b5df2019-01-02 14:30:50 +00004851 }
Debargha Mukherjee17e7b082017-08-13 09:33:03 -07004852
David Turner2f3b5df2019-01-02 14:30:50 +00004853 if (!frame_is_intra_only(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004854 if (loop_count > 0) {
4855 release_scaled_references(cpi);
4856 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004857 scale_references(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004858 }
Yaowu Xuf883b422016-08-30 14:01:10 -07004859 av1_set_quantizer(cm, q);
Debargha Mukherjeef48b0d22018-11-20 12:23:43 -08004860 // printf("Frame %d/%d: q = %d, frame_type = %d superres_denom = %d\n",
4861 // cm->current_frame.frame_number, cm->show_frame, q,
4862 // cm->current_frame.frame_type, cm->superres_scale_denominator);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004863
David Turner2f3b5df2019-01-02 14:30:50 +00004864 if (loop_count == 0) {
4865 setup_frame(cpi);
4866 } else if (get_primary_ref_frame_buf(cm) == NULL) {
4867 // Base q-index may have changed, so we need to assign proper default coef
4868 // probs before every iteration.
Yaowu Xuf883b422016-08-30 14:01:10 -07004869 av1_default_coef_probs(cm);
Hui Su3694c832017-11-10 14:15:58 -08004870 av1_setup_frame_contexts(cm);
David Barkerfc91b392018-03-09 15:32:03 +00004871 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004872
Yaowu Xuc27fc142016-08-22 16:08:15 -07004873 if (cpi->oxcf.aq_mode == VARIANCE_AQ) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004874 av1_vaq_frame_setup(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004875 } else if (cpi->oxcf.aq_mode == COMPLEXITY_AQ) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004876 av1_setup_in_frame_q_adj(cpi);
David Turner2f3b5df2019-01-02 14:30:50 +00004877 } else if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && !allow_recode) {
4878 suppress_active_map(cpi);
4879 av1_cyclic_refresh_setup(cpi);
4880 apply_active_map(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004881 }
David Turner2f3b5df2019-01-02 14:30:50 +00004882
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00004883 if (cm->seg.enabled) {
David Barkercab37552018-03-21 11:56:24 +00004884 if (!cm->seg.update_data && cm->prev_frame) {
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00004885 segfeatures_copy(&cm->seg, &cm->prev_frame->seg);
David Barker11c93562018-06-05 12:00:07 +01004886 } else {
4887 calculate_segdata(&cm->seg);
Yue Chend90d3432018-03-16 11:28:42 -07004888 }
David Barkercab37552018-03-21 11:56:24 +00004889 } else {
4890 memset(&cm->seg, 0, sizeof(cm->seg));
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00004891 }
David Barkercab37552018-03-21 11:56:24 +00004892 segfeatures_copy(&cm->cur_frame->seg, &cm->seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004893
David Turner2f3b5df2019-01-02 14:30:50 +00004894 if (allow_recode) save_coding_context(cpi);
4895
Yaowu Xuc27fc142016-08-22 16:08:15 -07004896 // transform / motion compensation build reconstruction frame
Yaowu Xuf883b422016-08-30 14:01:10 -07004897 av1_encode_frame(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004898
David Turner2f3b5df2019-01-02 14:30:50 +00004899 // Update some stats from cyclic refresh, and check if we should not update
4900 // golden reference, for 1 pass CBR.
4901 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ &&
4902 cm->current_frame.frame_type != KEY_FRAME &&
4903 (cpi->oxcf.pass == 0 && cpi->oxcf.rc_mode == AOM_CBR)) {
4904 av1_cyclic_refresh_check_golden_update(cpi);
4905 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004906
Yaowu Xuf883b422016-08-30 14:01:10 -07004907 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07004908
4909 // Dummy pack of the bitstream using up to date stats to get an
4910 // accurate estimate of output frame size to determine if we need
4911 // to recode.
4912 if (cpi->sf.recode_loop >= ALLOW_RECODE_KFARFGF) {
Jingning Han8f661602017-08-19 08:16:50 -07004913 restore_coding_context(cpi);
Tom Finegane4099e32018-01-23 12:01:51 -08004914
David Turner996b2c12018-12-07 15:52:30 +00004915 finalize_encoded_frame(cpi);
David Turner35cba132018-12-10 15:48:15 +00004916 int largest_tile_id = 0; // Output from bitstream: unused here
4917 if (av1_pack_bitstream(cpi, dest, size, &largest_tile_id) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08004918 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004919
4920 rc->projected_frame_size = (int)(*size) << 3;
4921 restore_coding_context(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004922 }
4923
David Turner2f3b5df2019-01-02 14:30:50 +00004924 if (allow_recode && cpi->oxcf.rc_mode != AOM_Q) {
4925 // Update q and decide whether to do a recode loop
4926 recode_loop_update_q(cpi, &loop, &q, &q_low, &q_high, top_index,
4927 bottom_index, &undershoot_seen, &overshoot_seen,
4928 loop_at_this_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004929 }
4930
4931 // Special case for overlay frame.
4932 if (rc->is_src_frame_alt_ref &&
4933 rc->projected_frame_size < rc->max_frame_bandwidth)
4934 loop = 0;
4935
David Turner2f3b5df2019-01-02 14:30:50 +00004936 if (allow_recode && !cpi->sf.gm_disable_recode &&
4937 recode_loop_test_global_motion(cpi)) {
4938 loop = 1;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08004939 }
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08004940
Yaowu Xuc27fc142016-08-22 16:08:15 -07004941 if (loop) {
4942 ++loop_count;
4943 ++loop_at_this_size;
4944
4945#if CONFIG_INTERNAL_STATS
4946 ++cpi->tot_recode_hits;
4947#endif
4948 }
4949 } while (loop);
Tom Finegane4099e32018-01-23 12:01:51 -08004950
4951 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004952}
4953
Yaowu Xuc27fc142016-08-22 16:08:15 -07004954#define DUMP_RECON_FRAMES 0
4955
4956#if DUMP_RECON_FRAMES == 1
4957// NOTE(zoeliu): For debug - Output the filtered reconstructed video.
Yaowu Xuf883b422016-08-30 14:01:10 -07004958static void dump_filtered_recon_frames(AV1_COMP *cpi) {
4959 AV1_COMMON *const cm = &cpi->common;
David Turnerd2a592e2018-11-16 14:59:31 +00004960 const CurrentFrame *const current_frame = &cm->current_frame;
David Turnerc29e1a92018-12-06 14:10:14 +00004961 const YV12_BUFFER_CONFIG *recon_buf = &cm->cur_frame->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004962
Zoe Liub4f31032017-11-03 23:48:35 -07004963 if (recon_buf == NULL) {
David Turnerd2a592e2018-11-16 14:59:31 +00004964 printf("Frame %d is not ready.\n", current_frame->frame_number);
Zoe Liub4f31032017-11-03 23:48:35 -07004965 return;
4966 }
4967
Zoe Liu27deb382018-03-27 15:13:56 -07004968 static const int flag_list[REF_FRAMES] = { 0,
4969 AOM_LAST_FLAG,
4970 AOM_LAST2_FLAG,
4971 AOM_LAST3_FLAG,
4972 AOM_GOLD_FLAG,
4973 AOM_BWD_FLAG,
4974 AOM_ALT2_FLAG,
4975 AOM_ALT_FLAG };
Zoe Liub4f31032017-11-03 23:48:35 -07004976 printf(
4977 "\n***Frame=%d (frame_offset=%d, show_frame=%d, "
4978 "show_existing_frame=%d) "
4979 "[LAST LAST2 LAST3 GOLDEN BWD ALT2 ALT]=[",
David Turnerd2a592e2018-11-16 14:59:31 +00004980 current_frame->frame_number, current_frame->order_hint, cm->show_frame,
Zoe Liub4f31032017-11-03 23:48:35 -07004981 cm->show_existing_frame);
4982 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00004983 const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
4984 const int ref_offset = buf != NULL ? (int)buf->order_hint : -1;
David Turner1bcefb32018-11-19 17:54:00 +00004985 printf(" %d(%c-%d-%4.2f)", ref_offset,
4986 (cpi->ref_frame_flags & flag_list[ref_frame]) ? 'Y' : 'N',
David Turnera21966b2018-12-05 14:48:49 +00004987 buf ? (int)buf->frame_rf_level : -1,
4988 buf ? rate_factor_deltas[buf->frame_rf_level] : -1);
Zoe Liub4f31032017-11-03 23:48:35 -07004989 }
4990 printf(" ]\n");
Zoe Liub4f31032017-11-03 23:48:35 -07004991
4992 if (!cm->show_frame) {
4993 printf("Frame %d is a no show frame, so no image dump.\n",
David Turnerd2a592e2018-11-16 14:59:31 +00004994 current_frame->frame_number);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004995 return;
4996 }
4997
Zoe Liub4f31032017-11-03 23:48:35 -07004998 int h;
4999 char file_name[256] = "/tmp/enc_filtered_recon.yuv";
5000 FILE *f_recon = NULL;
5001
David Turnerd2a592e2018-11-16 14:59:31 +00005002 if (current_frame->frame_number == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005003 if ((f_recon = fopen(file_name, "wb")) == NULL) {
5004 printf("Unable to open file %s to write.\n", file_name);
5005 return;
5006 }
5007 } else {
5008 if ((f_recon = fopen(file_name, "ab")) == NULL) {
5009 printf("Unable to open file %s to append.\n", file_name);
5010 return;
5011 }
5012 }
5013 printf(
Zoe Liuf40a9572017-10-13 12:37:19 -07005014 "\nFrame=%5d, encode_update_type[%5d]=%1d, frame_offset=%d, "
5015 "show_frame=%d, show_existing_frame=%d, source_alt_ref_active=%d, "
5016 "refresh_alt_ref_frame=%d, rf_level=%d, "
5017 "y_stride=%4d, uv_stride=%4d, cm->width=%4d, cm->height=%4d\n\n",
David Turnerd2a592e2018-11-16 14:59:31 +00005018 current_frame->frame_number, cpi->twopass.gf_group.index,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005019 cpi->twopass.gf_group.update_type[cpi->twopass.gf_group.index],
David Turnerd2a592e2018-11-16 14:59:31 +00005020 current_frame->order_hint, cm->show_frame, cm->show_existing_frame,
Zoe Liuf40a9572017-10-13 12:37:19 -07005021 cpi->rc.source_alt_ref_active, cpi->refresh_alt_ref_frame,
5022 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index],
5023 recon_buf->y_stride, recon_buf->uv_stride, cm->width, cm->height);
Zoe Liue9b15e22017-07-19 15:53:01 -07005024#if 0
5025 int ref_frame;
5026 printf("get_ref_frame_map_idx: [");
5027 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame)
David Turnera21966b2018-12-05 14:48:49 +00005028 printf(" %d", get_ref_frame_map_idx(cm, ref_frame));
Zoe Liue9b15e22017-07-19 15:53:01 -07005029 printf(" ]\n");
Zoe Liue9b15e22017-07-19 15:53:01 -07005030#endif // 0
Yaowu Xuc27fc142016-08-22 16:08:15 -07005031
5032 // --- Y ---
5033 for (h = 0; h < cm->height; ++h) {
5034 fwrite(&recon_buf->y_buffer[h * recon_buf->y_stride], 1, cm->width,
5035 f_recon);
5036 }
5037 // --- U ---
5038 for (h = 0; h < (cm->height >> 1); ++h) {
5039 fwrite(&recon_buf->u_buffer[h * recon_buf->uv_stride], 1, (cm->width >> 1),
5040 f_recon);
5041 }
5042 // --- V ---
5043 for (h = 0; h < (cm->height >> 1); ++h) {
5044 fwrite(&recon_buf->v_buffer[h * recon_buf->uv_stride], 1, (cm->width >> 1),
5045 f_recon);
5046 }
5047
5048 fclose(f_recon);
5049}
5050#endif // DUMP_RECON_FRAMES
5051
Wei-Ting Linfb7dc062018-06-28 18:26:13 -07005052static INLINE int is_frame_droppable(AV1_COMP *cpi) {
5053 return !(cpi->refresh_alt_ref_frame || cpi->refresh_alt2_ref_frame ||
5054 cpi->refresh_bwd_ref_frame || cpi->refresh_golden_frame ||
5055 cpi->refresh_last_frame);
5056}
5057
Sachin Kumar Gargfd39b232019-01-03 17:41:09 +05305058static int setup_interp_filter_search_mask(AV1_COMP *cpi) {
5059 InterpFilters ifilter;
5060 int ref_total[REF_FRAMES] = { 0 };
5061 MV_REFERENCE_FRAME ref;
5062 int mask = 0;
5063 int arf_idx = ALTREF_FRAME;
5064 if (cpi->common.last_frame_type == KEY_FRAME || cpi->refresh_alt_ref_frame)
5065 return mask;
5066 for (ref = LAST_FRAME; ref <= ALTREF_FRAME; ++ref)
5067 for (ifilter = EIGHTTAP_REGULAR; ifilter <= MULTITAP_SHARP; ++ifilter)
5068 ref_total[ref] += cpi->interp_filter_selected[ref][ifilter];
5069
5070 for (ifilter = EIGHTTAP_REGULAR; ifilter <= MULTITAP_SHARP; ++ifilter) {
5071 if ((ref_total[LAST_FRAME] &&
5072 cpi->interp_filter_selected[LAST_FRAME][ifilter] * 30 <=
5073 ref_total[LAST_FRAME]) &&
5074 (((cpi->interp_filter_selected[LAST2_FRAME][ifilter] * 20) +
5075 (cpi->interp_filter_selected[LAST3_FRAME][ifilter] * 20) +
5076 (cpi->interp_filter_selected[GOLDEN_FRAME][ifilter] * 20) +
5077 (cpi->interp_filter_selected[BWDREF_FRAME][ifilter] * 10) +
5078 (cpi->interp_filter_selected[ALTREF2_FRAME][ifilter] * 10) +
5079 (cpi->interp_filter_selected[arf_idx][ifilter] * 10)) <
5080 (ref_total[LAST2_FRAME] + ref_total[LAST3_FRAME] +
5081 ref_total[GOLDEN_FRAME] + ref_total[BWDREF_FRAME] +
5082 ref_total[ALTREF2_FRAME] + ref_total[ALTREF_FRAME])))
5083 mask |= 1 << ifilter;
5084 }
5085 return mask;
5086}
5087
Tom Finegane4099e32018-01-23 12:01:51 -08005088static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size, uint8_t *dest,
Tom Finegane4099e32018-01-23 12:01:51 -08005089 unsigned int *frame_flags) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005090 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005091 SequenceHeader *const seq_params = &cm->seq_params;
David Turnerd2a592e2018-11-16 14:59:31 +00005092 CurrentFrame *const current_frame = &cm->current_frame;
Yaowu Xuf883b422016-08-30 14:01:10 -07005093 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005094 struct segmentation *const seg = &cm->seg;
Thomas Davies4822e142017-10-10 11:30:36 +01005095
Yaowu Xuf883b422016-08-30 14:01:10 -07005096 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07005097
Fangwen Fu8d164de2016-12-14 13:40:54 -08005098 // frame type has been decided outside of this function call
David Turnerd2a592e2018-11-16 14:59:31 +00005099 cm->cur_frame->frame_type = current_frame->frame_type;
Debargha Mukherjee07a7c1f2018-03-21 17:39:13 -07005100
Yunqing Wang9612d552018-05-15 14:58:30 -07005101 cm->large_scale_tile = cpi->oxcf.large_scale_tile;
5102 cm->single_tile_decoding = cpi->oxcf.single_tile_decoding;
Yunqing Wang9612d552018-05-15 14:58:30 -07005103
sarahparker21dbca42018-03-30 17:43:44 -07005104 cm->allow_ref_frame_mvs &= frame_might_allow_ref_frame_mvs(cm);
Yunqing Wangd48fb162018-06-15 10:55:28 -07005105 // cm->allow_ref_frame_mvs needs to be written into the frame header while
5106 // cm->large_scale_tile is 1, therefore, "cm->large_scale_tile=1" case is
5107 // separated from frame_might_allow_ref_frame_mvs().
5108 cm->allow_ref_frame_mvs &= !cm->large_scale_tile;
5109
Debargha Mukherjee1d7217e2018-03-26 13:32:13 -07005110 cm->allow_warped_motion =
Debargha Mukherjeea5b810a2018-03-26 19:19:55 -07005111 cpi->oxcf.allow_warped_motion && frame_might_allow_warped_motion(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005112
Jingning Hand8a15a62017-10-30 10:53:42 -07005113 // Reset the frame packet stamp index.
David Turnerd2a592e2018-11-16 14:59:31 +00005114 if (current_frame->frame_type == KEY_FRAME && cm->show_frame)
5115 current_frame->frame_number = 0;
Jingning Hand8a15a62017-10-30 10:53:42 -07005116
Sachin Kumar Gargfd39b232019-01-03 17:41:09 +05305117 cm->last_frame_type = current_frame->frame_type;
5118 if (cpi->oxcf.pass == 2 && cpi->sf.adaptive_interp_filter_search)
5119 cpi->sf.interp_filter_search_mask = setup_interp_filter_search_mask(cpi);
5120
Sarah Parker33005522018-07-27 14:46:25 -07005121 if (encode_show_existing_frame(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005122 // NOTE(zoeliu): In BIDIR_PRED, the existing frame to show is the current
5123 // BWDREF_FRAME in the reference frame buffer.
David Turnerd2a592e2018-11-16 14:59:31 +00005124 if (current_frame->frame_type == KEY_FRAME) {
Sarah Parkerb9041612018-05-22 19:06:47 -07005125 cm->reset_decoder_state = 1;
5126 } else {
David Turnerd2a592e2018-11-16 14:59:31 +00005127 current_frame->frame_type = INTER_FRAME;
Sarah Parkerb9041612018-05-22 19:06:47 -07005128 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005129 cm->show_frame = 1;
5130 cpi->frame_flags = *frame_flags;
5131
Jingning Han8f661602017-08-19 08:16:50 -07005132 restore_coding_context(cpi);
Zoe Liub4f31032017-11-03 23:48:35 -07005133
David Turner996b2c12018-12-07 15:52:30 +00005134 finalize_encoded_frame(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005135 // Build the bitstream
David Turner35cba132018-12-10 15:48:15 +00005136 int largest_tile_id = 0; // Output from bitstream: unused here
5137 if (av1_pack_bitstream(cpi, dest, size, &largest_tile_id) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08005138 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005139
David Turner90311862018-11-29 13:34:36 +00005140 if (seq_params->frame_id_numbers_present_flag &&
5141 current_frame->frame_type == KEY_FRAME) {
5142 // Displaying a forward key-frame, so reset the ref buffer IDs
5143 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
5144 for (int i = 0; i < REF_FRAMES; i++)
5145 cm->ref_frame_id[i] = display_frame_id;
5146 }
5147
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07005148 cpi->seq_params_locked = 1;
5149
Zoe Liub4f31032017-11-03 23:48:35 -07005150 // Update current frame offset.
Jack Haughtonddb80602018-11-21 16:41:49 +00005151 current_frame->order_hint = cm->cur_frame->order_hint;
Zoe Liub4f31032017-11-03 23:48:35 -07005152
Yaowu Xuc27fc142016-08-22 16:08:15 -07005153#if DUMP_RECON_FRAMES == 1
5154 // NOTE(zoeliu): For debug - Output the filtered reconstructed video.
5155 dump_filtered_recon_frames(cpi);
5156#endif // DUMP_RECON_FRAMES
5157
5158 // Update the LAST_FRAME in the reference frame buffer.
Zoe Liue9b15e22017-07-19 15:53:01 -07005159 // NOTE:
5160 // (1) For BWDREF_FRAME as the show_existing_frame, the reference frame
5161 // update has been done previously when handling the LAST_BIPRED_FRAME
5162 // right before BWDREF_FRAME (in the display order);
5163 // (2) For INTNL_OVERLAY as the show_existing_frame, the reference frame
Urvang Joshif1fa6862018-01-08 16:39:33 -08005164 // update will be done when the following is called, which will
5165 // exchange
Zoe Liue9b15e22017-07-19 15:53:01 -07005166 // the virtual indexes between LAST_FRAME and ALTREF2_FRAME, so that
Urvang Joshif1fa6862018-01-08 16:39:33 -08005167 // LAST3 will get retired, LAST2 becomes LAST3, LAST becomes LAST2,
5168 // and
Zoe Liue9b15e22017-07-19 15:53:01 -07005169 // ALTREF2_FRAME will serve as the new LAST_FRAME.
Cheng Chen46f30c72017-09-07 11:13:33 -07005170 update_reference_frames(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005171
5172 // Update frame flags
5173 cpi->frame_flags &= ~FRAMEFLAGS_GOLDEN;
5174 cpi->frame_flags &= ~FRAMEFLAGS_BWDREF;
5175 cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
5176
5177 *frame_flags = cpi->frame_flags & ~FRAMEFLAGS_KEY;
5178
Yaowu Xuc27fc142016-08-22 16:08:15 -07005179 // Since we allocate a spot for the OVERLAY frame in the gf group, we need
5180 // to do post-encoding update accordingly.
5181 if (cpi->rc.is_src_frame_alt_ref) {
Debargha Mukherjee7166f222017-09-05 21:32:42 -07005182 av1_set_target_rate(cpi, cm->width, cm->height);
Yaowu Xuf883b422016-08-30 14:01:10 -07005183 av1_rc_postencode_update(cpi, *size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005184 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005185
David Turnerd2a592e2018-11-16 14:59:31 +00005186 ++current_frame->frame_number;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005187
Tom Finegane4099e32018-01-23 12:01:51 -08005188 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005189 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005190
5191 // Set default state for segment based loop filter update flags.
5192 cm->lf.mode_ref_delta_update = 0;
5193
Yaowu Xuc27fc142016-08-22 16:08:15 -07005194 // Set various flags etc to special state if it is a key frame.
Tarek AMARAc9813852018-03-05 18:40:18 -05005195 if (frame_is_intra_only(cm) || frame_is_sframe(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005196 // Reset the loop filter deltas and segmentation map.
Yaowu Xuf883b422016-08-30 14:01:10 -07005197 av1_reset_segment_features(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005198
5199 // If segmentation is enabled force a map update for key frames.
5200 if (seg->enabled) {
5201 seg->update_map = 1;
5202 seg->update_data = 1;
5203 }
5204
5205 // The alternate reference frame cannot be active for a key frame.
5206 cpi->rc.source_alt_ref_active = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005207 }
Thomas Daviesaf6df172016-11-09 14:04:18 +00005208 if (cpi->oxcf.mtu == 0) {
5209 cm->num_tg = cpi->oxcf.num_tile_groups;
5210 } else {
Yaowu Xu859a5272016-11-10 15:32:21 -08005211 // Use a default value for the purposes of weighting costs in probability
5212 // updates
Thomas Daviesaf6df172016-11-09 14:04:18 +00005213 cm->num_tg = DEFAULT_MAX_NUM_TG;
5214 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005215
5216 // For 1 pass CBR, check if we are dropping this frame.
5217 // Never drop on key frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07005218 if (oxcf->pass == 0 && oxcf->rc_mode == AOM_CBR &&
David Turnerd2a592e2018-11-16 14:59:31 +00005219 current_frame->frame_type != KEY_FRAME) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005220 if (av1_rc_drop_frame(cpi)) {
5221 av1_rc_postencode_update_drop_frame(cpi);
Tom Finegane4099e32018-01-23 12:01:51 -08005222 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005223 }
5224 }
5225
Yaowu Xuf883b422016-08-30 14:01:10 -07005226 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07005227
5228#if CONFIG_INTERNAL_STATS
5229 memset(cpi->mode_chosen_counts, 0,
5230 MAX_MODES * sizeof(*cpi->mode_chosen_counts));
5231#endif
5232
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005233 if (seq_params->frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005234 /* Non-normative definition of current_frame_id ("frame counter" with
Johann123e8a62017-12-28 14:40:49 -08005235 * wraparound) */
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005236 if (cm->current_frame_id == -1) {
David Barker49a76562016-12-07 14:50:21 +00005237 int lsb, msb;
Yaowu Xud3e7c682017-12-21 14:08:25 -08005238 /* quasi-random initialization of current_frame_id for a key frame */
Alex Conversef77fd0b2017-04-20 11:00:24 -07005239 if (cpi->source->flags & YV12_FLAG_HIGHBITDEPTH) {
5240 lsb = CONVERT_TO_SHORTPTR(cpi->source->y_buffer)[0] & 0xff;
5241 msb = CONVERT_TO_SHORTPTR(cpi->source->y_buffer)[1] & 0xff;
David Barker49a76562016-12-07 14:50:21 +00005242 } else {
Alex Conversef77fd0b2017-04-20 11:00:24 -07005243 lsb = cpi->source->y_buffer[0] & 0xff;
5244 msb = cpi->source->y_buffer[1] & 0xff;
David Barker49a76562016-12-07 14:50:21 +00005245 }
David Turner760a2f42018-12-07 15:25:36 +00005246 cm->current_frame_id =
5247 ((msb << 8) + lsb) % (1 << seq_params->frame_id_length);
Tarek AMARAc9813852018-03-05 18:40:18 -05005248
5249 // S_frame is meant for stitching different streams of different
5250 // resolutions together, so current_frame_id must be the
5251 // same across different streams of the same content current_frame_id
5252 // should be the same and not random. 0x37 is a chosen number as start
5253 // point
5254 if (cpi->oxcf.sframe_enabled) cm->current_frame_id = 0x37;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005255 } else {
5256 cm->current_frame_id =
David Turner760a2f42018-12-07 15:25:36 +00005257 (cm->current_frame_id + 1 + (1 << seq_params->frame_id_length)) %
5258 (1 << seq_params->frame_id_length);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005259 }
5260 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005261
Hui Su483a8452018-02-26 12:28:48 -08005262 switch (cpi->oxcf.cdf_update_mode) {
5263 case 0: // No CDF update for any frames(4~6% compression loss).
5264 cm->disable_cdf_update = 1;
5265 break;
5266 case 1: // Enable CDF update for all frames.
5267 cm->disable_cdf_update = 0;
5268 break;
5269 case 2:
5270 // Strategically determine at which frames to do CDF update.
5271 // Currently only enable CDF update for all-intra and no-show frames(1.5%
5272 // compression loss).
5273 // TODO(huisu@google.com): design schemes for various trade-offs between
5274 // compression quality and decoding speed.
Hui Sub1b76b32018-02-27 15:24:48 -08005275 cm->disable_cdf_update =
5276 (frame_is_intra_only(cm) || !cm->show_frame) ? 0 : 1;
Hui Su483a8452018-02-26 12:28:48 -08005277 break;
5278 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005279 cm->timing_info_present &= !seq_params->reduced_still_picture_hdr;
Hui Su483a8452018-02-26 12:28:48 -08005280
David Turner2f3b5df2019-01-02 14:30:50 +00005281 if (encode_with_recode_loop(cpi, size, dest) != AOM_CODEC_OK)
5282 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005283
5284#ifdef OUTPUT_YUV_SKINMAP
David Turnerd2a592e2018-11-16 14:59:31 +00005285 if (cpi->common.current_frame.frame_number > 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005286 av1_compute_skin_map(cpi, yuv_skinmap_file);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005287 }
5288#endif // OUTPUT_YUV_SKINMAP
5289
5290 // Special case code to reduce pulsing when key frames are forced at a
5291 // fixed interval. Note the reconstruction error if it is the frame before
5292 // the force key frame
5293 if (cpi->rc.next_key_frame_forced && cpi->rc.frames_to_key == 1) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005294 if (seq_params->use_highbitdepth) {
Jack Haughtonddb80602018-11-21 16:41:49 +00005295 cpi->ambient_err = aom_highbd_get_y_sse(cpi->source, &cm->cur_frame->buf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005296 } else {
Jack Haughtonddb80602018-11-21 16:41:49 +00005297 cpi->ambient_err = aom_get_y_sse(cpi->source, &cm->cur_frame->buf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005298 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005299 }
5300
Tarek AMARAc9813852018-03-05 18:40:18 -05005301 // If the encoder forced a KEY_FRAME decision or if frame is an S_FRAME
David Turnerd2a592e2018-11-16 14:59:31 +00005302 if ((current_frame->frame_type == KEY_FRAME && cm->show_frame) ||
5303 frame_is_sframe(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005304 cpi->refresh_last_frame = 1;
5305 }
5306
David Turnerc29e1a92018-12-06 14:10:14 +00005307 cm->cur_frame->buf.color_primaries = seq_params->color_primaries;
5308 cm->cur_frame->buf.transfer_characteristics =
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005309 seq_params->transfer_characteristics;
David Turnerc29e1a92018-12-06 14:10:14 +00005310 cm->cur_frame->buf.matrix_coefficients = seq_params->matrix_coefficients;
5311 cm->cur_frame->buf.monochrome = seq_params->monochrome;
5312 cm->cur_frame->buf.chroma_sample_position =
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005313 seq_params->chroma_sample_position;
David Turnerc29e1a92018-12-06 14:10:14 +00005314 cm->cur_frame->buf.color_range = seq_params->color_range;
5315 cm->cur_frame->buf.render_width = cm->render_width;
5316 cm->cur_frame->buf.render_height = cm->render_height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005317
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02005318 // TODO(zoeliu): For non-ref frames, loop filtering may need to be turned
5319 // off.
Yaowu Xuc27fc142016-08-22 16:08:15 -07005320
5321 // Pick the loop filter level for the frame.
Cheng Chen68dc9142018-05-02 17:46:28 -07005322 if (!cm->allow_intrabc) {
David Barker218556e2018-02-14 14:23:12 +00005323 loopfilter_frame(cpi, cm);
Hui Su06463e42018-02-23 22:17:36 -08005324 } else {
Hui Su06463e42018-02-23 22:17:36 -08005325 cm->lf.filter_level[0] = 0;
5326 cm->lf.filter_level[1] = 0;
David Turnerebf96f42018-11-14 16:57:57 +00005327 cm->cdef_info.cdef_bits = 0;
5328 cm->cdef_info.cdef_strengths[0] = 0;
5329 cm->cdef_info.nb_cdef_strengths = 1;
5330 cm->cdef_info.cdef_uv_strengths[0] = 0;
Hui Su06463e42018-02-23 22:17:36 -08005331 cm->rst_info[0].frame_restoration_type = RESTORE_NONE;
5332 cm->rst_info[1].frame_restoration_type = RESTORE_NONE;
5333 cm->rst_info[2].frame_restoration_type = RESTORE_NONE;
Hui Su06463e42018-02-23 22:17:36 -08005334 }
David Barker218556e2018-02-14 14:23:12 +00005335
5336 // TODO(debargha): Fix mv search range on encoder side
David Turnerc29e1a92018-12-06 14:10:14 +00005337 // aom_extend_frame_inner_borders(&cm->cur_frame->buf, av1_num_planes(cm));
5338 aom_extend_frame_borders(&cm->cur_frame->buf, av1_num_planes(cm));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005339
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07005340#ifdef OUTPUT_YUV_REC
David Turnerc29e1a92018-12-06 14:10:14 +00005341 aom_write_one_yuv_frame(cm, &cm->cur_frame->buf);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07005342#endif
5343
David Turner996b2c12018-12-07 15:52:30 +00005344 finalize_encoded_frame(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005345 // Build the bitstream
David Turner35cba132018-12-10 15:48:15 +00005346 int largest_tile_id = 0; // Output from pack_bitstream
5347 if (av1_pack_bitstream(cpi, dest, size, &largest_tile_id) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08005348 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005349
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07005350 cpi->seq_params_locked = 1;
5351
David Turner996b2c12018-12-07 15:52:30 +00005352 // Update reference frame ids for reference frames this frame will overwrite
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005353 if (seq_params->frame_id_numbers_present_flag) {
David Turner996b2c12018-12-07 15:52:30 +00005354 for (int i = 0; i < REF_FRAMES; i++) {
5355 if ((current_frame->refresh_frame_flags >> i) & 1) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005356 cm->ref_frame_id[i] = cm->current_frame_id;
5357 }
5358 }
5359 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005360
Yaowu Xuc27fc142016-08-22 16:08:15 -07005361#if DUMP_RECON_FRAMES == 1
5362 // NOTE(zoeliu): For debug - Output the filtered reconstructed video.
Zoe Liub4f31032017-11-03 23:48:35 -07005363 dump_filtered_recon_frames(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005364#endif // DUMP_RECON_FRAMES
5365
Soo-Chul Han934af352017-10-15 15:21:51 -04005366 if (cm->seg.enabled) {
5367 if (cm->seg.update_map) {
5368 update_reference_segmentation_map(cpi);
Yue Chend90d3432018-03-16 11:28:42 -07005369 } else if (cm->last_frame_seg_map) {
David Turnerb757ce02018-11-12 15:01:28 +00005370 memcpy(cm->cur_frame->seg_map, cm->last_frame_seg_map,
Soo-Chul Han934af352017-10-15 15:21:51 -04005371 cm->mi_cols * cm->mi_rows * sizeof(uint8_t));
5372 }
5373 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005374
5375 if (frame_is_intra_only(cm) == 0) {
5376 release_scaled_references(cpi);
5377 }
5378
Cheng Chen46f30c72017-09-07 11:13:33 -07005379 update_reference_frames(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005380
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08005381#if CONFIG_ENTROPY_STATS
Yue Chencc6a6ef2018-05-21 16:21:05 -07005382 av1_accumulate_frame_counts(&aggregate_fc, &cpi->counts);
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08005383#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005384
Hui Sudc54be62018-03-14 19:14:28 -07005385 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
David Turner35cba132018-12-10 15:48:15 +00005386 *cm->fc = cpi->tile_data[largest_tile_id].tctx;
Hui Sudc54be62018-03-14 19:14:28 -07005387 av1_reset_cdf_symbol_counters(cm->fc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005388 }
5389
5390 if (cpi->refresh_golden_frame == 1)
5391 cpi->frame_flags |= FRAMEFLAGS_GOLDEN;
5392 else
5393 cpi->frame_flags &= ~FRAMEFLAGS_GOLDEN;
5394
5395 if (cpi->refresh_alt_ref_frame == 1)
5396 cpi->frame_flags |= FRAMEFLAGS_ALTREF;
5397 else
5398 cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
5399
Yaowu Xuc27fc142016-08-22 16:08:15 -07005400 if (cpi->refresh_bwd_ref_frame == 1)
5401 cpi->frame_flags |= FRAMEFLAGS_BWDREF;
5402 else
5403 cpi->frame_flags &= ~FRAMEFLAGS_BWDREF;
Sachin Kumar Gargfd39b232019-01-03 17:41:09 +05305404 cm->last_frame_type = current_frame->frame_type;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005405
Yaowu Xuf883b422016-08-30 14:01:10 -07005406 av1_rc_postencode_update(cpi, *size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005407
David Turnerd2a592e2018-11-16 14:59:31 +00005408 if (current_frame->frame_type == KEY_FRAME) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005409 // Tell the caller that the frame was coded as a key frame
5410 *frame_flags = cpi->frame_flags | FRAMEFLAGS_KEY;
5411 } else {
5412 *frame_flags = cpi->frame_flags & ~FRAMEFLAGS_KEY;
5413 }
5414
5415 // Clear the one shot update flags for segmentation map and mode/ref loop
5416 // filter deltas.
5417 cm->seg.update_map = 0;
5418 cm->seg.update_data = 0;
5419 cm->lf.mode_ref_delta_update = 0;
5420
Wei-Ting Linfb7dc062018-06-28 18:26:13 -07005421 // A droppable frame might not be shown but it always
5422 // takes a space in the gf group. Therefore, even when
5423 // it is not shown, we still need update the count down.
5424
Yaowu Xuc27fc142016-08-22 16:08:15 -07005425 if (cm->show_frame) {
Urvang Joshif1fa6862018-01-08 16:39:33 -08005426 // TODO(zoeliu): We may only swamp mi and prev_mi for those frames that
5427 // are
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02005428 // being used as reference.
Cheng Chen46f30c72017-09-07 11:13:33 -07005429 swap_mi_and_prev_mi(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005430 // Don't increment frame counters if this was an altref buffer
5431 // update not a real frame
Wei-Ting Lin96ee0eb2018-06-22 15:27:22 -07005432
David Turnerd2a592e2018-11-16 14:59:31 +00005433 ++current_frame->frame_number;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005434 }
5435
Tom Finegane4099e32018-01-23 12:01:51 -08005436 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005437}
5438
David Turner056f7cd2019-01-07 17:48:13 +00005439int av1_encode(AV1_COMP *const cpi, uint8_t *const dest,
5440 const EncodeFrameParams *const frame_params,
5441 EncodeFrameResults *const frame_results) {
David Turner07dbd8e2019-01-08 17:16:25 +00005442 AV1_COMMON *const cm = &cpi->common;
5443
David Turner056f7cd2019-01-07 17:48:13 +00005444 // TODO(david.turner@argondesign.com): Copy data from frame_params to cpi and
5445 // cm as appropriate
5446
David Turner07dbd8e2019-01-08 17:16:25 +00005447 cm->error_resilient_mode = frame_params->error_resilient_mode;
5448 cpi->ref_frame_flags = frame_params->ref_frame_flags;
5449
David Turner056f7cd2019-01-07 17:48:13 +00005450 if (encode_frame_to_data_rate(cpi, &frame_results->size, dest,
5451 frame_params->frame_flags) != AOM_CODEC_OK) {
5452 return AOM_CODEC_ERROR;
5453 }
5454
5455 return AOM_CODEC_OK;
5456}
5457
Sarah Parker3491dd22018-08-08 18:38:31 -07005458static INLINE void update_keyframe_counters(AV1_COMP *cpi) {
5459 // TODO(zoeliu): To investigate whether we should treat BWDREF_FRAME
5460 // differently here for rc->avg_frame_bandwidth.
5461 if (cpi->common.show_frame || cpi->rc.is_bwd_ref_frame) {
5462 if (!cpi->common.show_existing_frame || cpi->rc.is_src_frame_alt_ref ||
David Turnerd2a592e2018-11-16 14:59:31 +00005463 cpi->common.current_frame.frame_type == KEY_FRAME) {
Sarah Parker3491dd22018-08-08 18:38:31 -07005464 // If this is a show_existing_frame with a source other than altref,
5465 // or if it is not a displayed forward keyframe, the keyframe update
5466 // counters were incremented when it was originally encoded.
5467 cpi->rc.frames_since_key++;
5468 cpi->rc.frames_to_key--;
5469 }
5470 }
5471}
5472
5473static INLINE void update_frames_till_gf_update(AV1_COMP *cpi) {
5474 // TODO(weitinglin): Updating this counter for is_frame_droppable
5475 // is a work-around to handle the condition when a frame is drop.
5476 // We should fix the cpi->common.show_frame flag
5477 // instead of checking the other condition to update the counter properly.
5478 if (cpi->common.show_frame || is_frame_droppable(cpi)) {
5479 // Decrement count down till next gf
5480 if (cpi->rc.frames_till_gf_update_due > 0)
5481 cpi->rc.frames_till_gf_update_due--;
5482 }
5483}
5484
5485static INLINE void update_twopass_gf_group_index(AV1_COMP *cpi) {
5486 // Increment the gf group index ready for the next frame. If this is
5487 // a show_existing_frame with a source other than altref, or if it is not
5488 // a displayed forward keyframe, the index was incremented when it was
5489 // originally encoded.
5490 if (!cpi->common.show_existing_frame || cpi->rc.is_src_frame_alt_ref ||
David Turnerd2a592e2018-11-16 14:59:31 +00005491 cpi->common.current_frame.frame_type == KEY_FRAME) {
Sarah Parker3491dd22018-08-08 18:38:31 -07005492 ++cpi->twopass.gf_group.index;
5493 }
5494}
5495
5496static void update_rc_counts(AV1_COMP *cpi) {
5497 update_keyframe_counters(cpi);
5498 update_frames_till_gf_update(cpi);
5499 if (cpi->oxcf.pass == 2) update_twopass_gf_group_index(cpi);
5500}
5501
Debargha Mukherjee57378252018-09-21 18:29:37 -07005502static void set_additional_frame_flags(AV1_COMMON *const cm,
5503 unsigned int *frame_flags) {
5504 if (frame_is_intra_only(cm)) *frame_flags |= FRAMEFLAGS_INTRAONLY;
5505 if (frame_is_sframe(cm)) *frame_flags |= FRAMEFLAGS_SWITCH;
5506 if (cm->error_resilient_mode) *frame_flags |= FRAMEFLAGS_ERROR_RESILIENT;
5507}
5508
Tom Finegane4099e32018-01-23 12:01:51 -08005509static int Pass0Encode(AV1_COMP *cpi, size_t *size, uint8_t *dest,
David Turner056f7cd2019-01-07 17:48:13 +00005510 unsigned int *frame_flags) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005511 if (cpi->oxcf.rc_mode == AOM_CBR) {
5512 av1_rc_get_one_pass_cbr_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005513 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07005514 av1_rc_get_one_pass_vbr_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005515 }
David Turner056f7cd2019-01-07 17:48:13 +00005516 if (av1_encode_strategy(cpi, size, dest, frame_flags) != AOM_CODEC_OK) {
Debargha Mukherjeeff48c092018-04-04 23:53:40 -07005517 return AOM_CODEC_ERROR;
5518 }
Debargha Mukherjee57378252018-09-21 18:29:37 -07005519 set_additional_frame_flags(&cpi->common, frame_flags);
5520
Sarah Parker3491dd22018-08-08 18:38:31 -07005521 update_rc_counts(cpi);
Debargha Mukherjeeff48c092018-04-04 23:53:40 -07005522 check_show_existing_frame(cpi);
5523 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005524}
5525
Tom Finegane4099e32018-01-23 12:01:51 -08005526static int Pass2Encode(AV1_COMP *cpi, size_t *size, uint8_t *dest,
5527 unsigned int *frame_flags) {
Angie Chiang5b5f4df2017-12-06 10:41:12 -08005528#if CONFIG_MISMATCH_DEBUG
5529 mismatch_move_frame_idx_w();
5530#endif
Angie Chiang4d55d762017-12-13 16:18:37 -08005531#if TXCOEFF_COST_TIMER
5532 AV1_COMMON *cm = &cpi->common;
5533 cm->txcoeff_cost_timer = 0;
5534 cm->txcoeff_cost_count = 0;
5535#endif
Tom Finegane4099e32018-01-23 12:01:51 -08005536
David Turner056f7cd2019-01-07 17:48:13 +00005537 if (av1_encode_strategy(cpi, size, dest, frame_flags) != AOM_CODEC_OK) {
Tom Finegane4099e32018-01-23 12:01:51 -08005538 return AOM_CODEC_ERROR;
5539 }
Debargha Mukherjee57378252018-09-21 18:29:37 -07005540 set_additional_frame_flags(&cpi->common, frame_flags);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005541
Angie Chiang4d55d762017-12-13 16:18:37 -08005542#if TXCOEFF_COST_TIMER
5543 cm->cum_txcoeff_cost_timer += cm->txcoeff_cost_timer;
5544 fprintf(stderr,
5545 "\ntxb coeff cost block number: %ld, frame time: %ld, cum time %ld "
5546 "in us\n",
5547 cm->txcoeff_cost_count, cm->txcoeff_cost_timer,
5548 cm->cum_txcoeff_cost_timer);
5549#endif
5550
Sarah Parker3491dd22018-08-08 18:38:31 -07005551 av1_twopass_postencode_update(cpi);
5552 update_rc_counts(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005553 check_show_existing_frame(cpi);
Tom Finegane4099e32018-01-23 12:01:51 -08005554 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005555}
5556
Neil Birkbecka2893ab2018-06-08 14:45:13 -07005557#if CONFIG_DENOISE
5558static int apply_denoise_2d(AV1_COMP *cpi, YV12_BUFFER_CONFIG *sd,
5559 int block_size, float noise_level,
5560 int64_t time_stamp, int64_t end_time) {
5561 AV1_COMMON *const cm = &cpi->common;
5562 if (!cpi->denoise_and_model) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005563 cpi->denoise_and_model = aom_denoise_and_model_alloc(
5564 cm->seq_params.bit_depth, block_size, noise_level);
Neil Birkbecka2893ab2018-06-08 14:45:13 -07005565 if (!cpi->denoise_and_model) {
5566 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
5567 "Error allocating denoise and model");
5568 return -1;
5569 }
5570 }
5571 if (!cpi->film_grain_table) {
5572 cpi->film_grain_table = aom_malloc(sizeof(*cpi->film_grain_table));
5573 if (!cpi->film_grain_table) {
5574 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
5575 "Error allocating grain table");
5576 return -1;
5577 }
5578 memset(cpi->film_grain_table, 0, sizeof(*cpi->film_grain_table));
5579 }
5580 if (aom_denoise_and_model_run(cpi->denoise_and_model, sd,
5581 &cm->film_grain_params)) {
5582 if (cm->film_grain_params.apply_grain) {
5583 aom_film_grain_table_append(cpi->film_grain_table, time_stamp, end_time,
5584 &cm->film_grain_params);
5585 }
5586 }
5587 return 0;
5588}
5589#endif
5590
James Zern3e2613b2017-03-30 23:14:40 -07005591int av1_receive_raw_frame(AV1_COMP *cpi, aom_enc_frame_flags_t frame_flags,
Yaowu Xuf883b422016-08-30 14:01:10 -07005592 YV12_BUFFER_CONFIG *sd, int64_t time_stamp,
5593 int64_t end_time) {
5594 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005595 const SequenceHeader *const seq_params = &cm->seq_params;
Yaowu Xuf883b422016-08-30 14:01:10 -07005596 struct aom_usec_timer timer;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005597 int res = 0;
5598 const int subsampling_x = sd->subsampling_x;
5599 const int subsampling_y = sd->subsampling_y;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005600 const int use_highbitdepth = (sd->flags & YV12_FLAG_HIGHBITDEPTH) != 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005601
Yaowu Xuc27fc142016-08-22 16:08:15 -07005602 check_initial_width(cpi, use_highbitdepth, subsampling_x, subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005603
Yaowu Xuf883b422016-08-30 14:01:10 -07005604 aom_usec_timer_start(&timer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005605
Neil Birkbecka2893ab2018-06-08 14:45:13 -07005606#if CONFIG_DENOISE
5607 if (cpi->oxcf.noise_level > 0)
5608 if (apply_denoise_2d(cpi, sd, cpi->oxcf.noise_block_size,
5609 cpi->oxcf.noise_level, time_stamp, end_time) < 0)
5610 res = -1;
5611#endif // CONFIG_DENOISE
5612
Yaowu Xuf883b422016-08-30 14:01:10 -07005613 if (av1_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
Yaowu Xud3e7c682017-12-21 14:08:25 -08005614 use_highbitdepth, frame_flags))
Yaowu Xuc27fc142016-08-22 16:08:15 -07005615 res = -1;
Yaowu Xuf883b422016-08-30 14:01:10 -07005616 aom_usec_timer_mark(&timer);
5617 cpi->time_receive_data += aom_usec_timer_elapsed(&timer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005618
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005619 if ((seq_params->profile == PROFILE_0) && !seq_params->monochrome &&
Yaowu Xuc27fc142016-08-22 16:08:15 -07005620 (subsampling_x != 1 || subsampling_y != 1)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005621 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005622 "Non-4:2:0 color format requires profile 1 or 2");
Yaowu Xuc27fc142016-08-22 16:08:15 -07005623 res = -1;
5624 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005625 if ((seq_params->profile == PROFILE_1) &&
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005626 !(subsampling_x == 0 && subsampling_y == 0)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005627 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005628 "Profile 1 requires 4:4:4 color format");
5629 res = -1;
5630 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005631 if ((seq_params->profile == PROFILE_2) &&
5632 (seq_params->bit_depth <= AOM_BITS_10) &&
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005633 !(subsampling_x == 1 && subsampling_y == 0)) {
5634 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
5635 "Profile 2 bit-depth < 10 requires 4:2:2 color format");
Yaowu Xuc27fc142016-08-22 16:08:15 -07005636 res = -1;
5637 }
5638
5639 return res;
5640}
5641
Yaowu Xuf883b422016-08-30 14:01:10 -07005642static void adjust_frame_rate(AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005643 const struct lookahead_entry *source) {
5644 int64_t this_duration;
5645 int step = 0;
5646
5647 if (source->ts_start == cpi->first_time_stamp_ever) {
5648 this_duration = source->ts_end - source->ts_start;
5649 step = 1;
5650 } else {
5651 int64_t last_duration =
5652 cpi->last_end_time_stamp_seen - cpi->last_time_stamp_seen;
5653
5654 this_duration = source->ts_end - cpi->last_end_time_stamp_seen;
5655
5656 // do a step update if the duration changes by 10%
5657 if (last_duration)
5658 step = (int)((this_duration - last_duration) * 10 / last_duration);
5659 }
5660
5661 if (this_duration) {
5662 if (step) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005663 av1_new_framerate(cpi, 10000000.0 / this_duration);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005664 } else {
5665 // Average this frame's rate into the last second's average
5666 // frame rate. If we haven't seen 1 second yet, then average
5667 // over the whole interval seen.
Yaowu Xuf883b422016-08-30 14:01:10 -07005668 const double interval = AOMMIN(
Yaowu Xuc27fc142016-08-22 16:08:15 -07005669 (double)(source->ts_end - cpi->first_time_stamp_ever), 10000000.0);
5670 double avg_duration = 10000000.0 / cpi->framerate;
5671 avg_duration *= (interval - avg_duration + this_duration);
5672 avg_duration /= interval;
5673
Yaowu Xuf883b422016-08-30 14:01:10 -07005674 av1_new_framerate(cpi, 10000000.0 / avg_duration);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005675 }
5676 }
5677 cpi->last_time_stamp_seen = source->ts_start;
5678 cpi->last_end_time_stamp_seen = source->ts_end;
5679}
5680
5681// Returns 0 if this is not an alt ref else the offset of the source frame
5682// used as the arf midpoint.
Yaowu Xuf883b422016-08-30 14:01:10 -07005683static int get_arf_src_index(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005684 RATE_CONTROL *const rc = &cpi->rc;
5685 int arf_src_index = 0;
5686 if (is_altref_enabled(cpi)) {
5687 if (cpi->oxcf.pass == 2) {
5688 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5689 if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
5690 arf_src_index = gf_group->arf_src_offset[gf_group->index];
5691 }
5692 } else if (rc->source_alt_ref_pending) {
5693 arf_src_index = rc->frames_till_gf_update_due;
5694 }
5695 }
5696 return arf_src_index;
5697}
5698
Yaowu Xuf883b422016-08-30 14:01:10 -07005699static int get_brf_src_index(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005700 int brf_src_index = 0;
5701 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5702
5703 // TODO(zoeliu): We need to add the check on the -bwd_ref command line setup
5704 // flag.
5705 if (gf_group->bidir_pred_enabled[gf_group->index]) {
5706 if (cpi->oxcf.pass == 2) {
5707 if (gf_group->update_type[gf_group->index] == BRF_UPDATE)
5708 brf_src_index = gf_group->brf_src_offset[gf_group->index];
5709 } else {
5710 // TODO(zoeliu): To re-visit the setup for this scenario
5711 brf_src_index = cpi->rc.bipred_group_interval - 1;
5712 }
5713 }
5714
5715 return brf_src_index;
5716}
Zoe Liue9b15e22017-07-19 15:53:01 -07005717
Zoe Liue9b15e22017-07-19 15:53:01 -07005718// Returns 0 if this is not an alt ref else the offset of the source frame
5719// used as the arf midpoint.
5720static int get_arf2_src_index(AV1_COMP *cpi) {
5721 int arf2_src_index = 0;
5722 if (is_altref_enabled(cpi) && cpi->num_extra_arfs) {
5723 if (cpi->oxcf.pass == 2) {
5724 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5725 if (gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE) {
5726 arf2_src_index = gf_group->arf_src_offset[gf_group->index];
5727 }
5728 }
5729 }
5730 return arf2_src_index;
5731}
Yaowu Xuc27fc142016-08-22 16:08:15 -07005732
Yaowu Xuf883b422016-08-30 14:01:10 -07005733static void check_src_altref(AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005734 const struct lookahead_entry *source) {
5735 RATE_CONTROL *const rc = &cpi->rc;
5736
5737 // If pass == 2, the parameters set here will be reset in
Yaowu Xuf883b422016-08-30 14:01:10 -07005738 // av1_rc_get_second_pass_params()
Yaowu Xuc27fc142016-08-22 16:08:15 -07005739
5740 if (cpi->oxcf.pass == 2) {
5741 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5742 rc->is_src_frame_alt_ref =
Yaowu Xuc27fc142016-08-22 16:08:15 -07005743 (gf_group->update_type[gf_group->index] == INTNL_OVERLAY_UPDATE) ||
Yaowu Xuc27fc142016-08-22 16:08:15 -07005744 (gf_group->update_type[gf_group->index] == OVERLAY_UPDATE);
Zoe Liue9b15e22017-07-19 15:53:01 -07005745 rc->is_src_frame_ext_arf =
5746 gf_group->update_type[gf_group->index] == INTNL_OVERLAY_UPDATE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005747 } else {
5748 rc->is_src_frame_alt_ref =
5749 cpi->alt_ref_source && (source == cpi->alt_ref_source);
5750 }
5751
5752 if (rc->is_src_frame_alt_ref) {
5753 // Current frame is an ARF overlay frame.
5754 cpi->alt_ref_source = NULL;
5755
Zoe Liue9b15e22017-07-19 15:53:01 -07005756 if (rc->is_src_frame_ext_arf && !cpi->common.show_existing_frame) {
5757 // For INTNL_OVERLAY, when show_existing_frame == 0, they do need to
5758 // refresh the LAST_FRAME, i.e. LAST3 gets retired, LAST2 becomes LAST3,
5759 // LAST becomes LAST2, and INTNL_OVERLAY becomes LAST.
5760 cpi->refresh_last_frame = 1;
5761 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -07005762 // Don't refresh the last buffer for an ARF overlay frame. It will
5763 // become the GF so preserve last as an alternative prediction option.
5764 cpi->refresh_last_frame = 0;
Zoe Liue9b15e22017-07-19 15:53:01 -07005765 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005766 }
5767}
5768
5769#if CONFIG_INTERNAL_STATS
Yaowu Xuf883b422016-08-30 14:01:10 -07005770extern double av1_get_blockiness(const unsigned char *img1, int img1_pitch,
5771 const unsigned char *img2, int img2_pitch,
5772 int width, int height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005773
5774static void adjust_image_stat(double y, double u, double v, double all,
5775 ImageStat *s) {
Wan-Teh Changc25c92a2018-04-23 15:04:14 -07005776 s->stat[STAT_Y] += y;
5777 s->stat[STAT_U] += u;
5778 s->stat[STAT_V] += v;
5779 s->stat[STAT_ALL] += all;
Yaowu Xuf883b422016-08-30 14:01:10 -07005780 s->worst = AOMMIN(s->worst, all);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005781}
5782
Angie Chiang08a22a62017-07-17 17:29:17 -07005783static void compute_internal_stats(AV1_COMP *cpi, int frame_bytes) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005784 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005785 double samples = 0.0;
5786 uint32_t in_bit_depth = 8;
5787 uint32_t bit_depth = 8;
5788
Angie Chiang08a22a62017-07-17 17:29:17 -07005789#if CONFIG_INTER_STATS_ONLY
David Turnerd2a592e2018-11-16 14:59:31 +00005790 if (cm->current_frame.frame_type == KEY_FRAME) return; // skip key frame
Angie Chiang08a22a62017-07-17 17:29:17 -07005791#endif
5792 cpi->bytes += frame_bytes;
5793
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005794 if (cm->seq_params.use_highbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005795 in_bit_depth = cpi->oxcf.input_bit_depth;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005796 bit_depth = cm->seq_params.bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005797 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005798 if (cm->show_frame) {
Alex Conversef77fd0b2017-04-20 11:00:24 -07005799 const YV12_BUFFER_CONFIG *orig = cpi->source;
David Turnerc29e1a92018-12-06 14:10:14 +00005800 const YV12_BUFFER_CONFIG *recon = &cpi->common.cur_frame->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005801 double y, u, v, frame_all;
5802
5803 cpi->count++;
5804 if (cpi->b_calculate_psnr) {
5805 PSNR_STATS psnr;
5806 double frame_ssim2 = 0.0, weight = 0.0;
Yaowu Xuf883b422016-08-30 14:01:10 -07005807 aom_clear_system_state();
Yaowu Xud3e7c682017-12-21 14:08:25 -08005808 // TODO(yaowu): unify these two versions into one.
Yaowu Xuf883b422016-08-30 14:01:10 -07005809 aom_calc_highbd_psnr(orig, recon, &psnr, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005810
5811 adjust_image_stat(psnr.psnr[1], psnr.psnr[2], psnr.psnr[3], psnr.psnr[0],
5812 &cpi->psnr);
5813 cpi->total_sq_error += psnr.sse[0];
5814 cpi->total_samples += psnr.samples[0];
5815 samples = psnr.samples[0];
Yaowu Xud3e7c682017-12-21 14:08:25 -08005816 // TODO(yaowu): unify these two versions into one.
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005817 if (cm->seq_params.use_highbitdepth)
Yaowu Xuc27fc142016-08-22 16:08:15 -07005818 frame_ssim2 =
Yaowu Xuf883b422016-08-30 14:01:10 -07005819 aom_highbd_calc_ssim(orig, recon, &weight, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005820 else
Yaowu Xuf883b422016-08-30 14:01:10 -07005821 frame_ssim2 = aom_calc_ssim(orig, recon, &weight);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005822
Yaowu Xuf883b422016-08-30 14:01:10 -07005823 cpi->worst_ssim = AOMMIN(cpi->worst_ssim, frame_ssim2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005824 cpi->summed_quality += frame_ssim2 * weight;
5825 cpi->summed_weights += weight;
5826
5827#if 0
5828 {
5829 FILE *f = fopen("q_used.stt", "a");
Zoe Liuee202be2017-11-17 12:14:33 -08005830 double y2 = psnr.psnr[1];
5831 double u2 = psnr.psnr[2];
5832 double v2 = psnr.psnr[3];
5833 double frame_psnr2 = psnr.psnr[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07005834 fprintf(f, "%5d : Y%f7.3:U%f7.3:V%f7.3:F%f7.3:S%7.3f\n",
David Turnerd2a592e2018-11-16 14:59:31 +00005835 cm->current_frame.frame_number, y2, u2, v2,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005836 frame_psnr2, frame_ssim2);
5837 fclose(f);
5838 }
5839#endif
5840 }
5841 if (cpi->b_calculate_blockiness) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005842 if (!cm->seq_params.use_highbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005843 const double frame_blockiness =
Yaowu Xuf883b422016-08-30 14:01:10 -07005844 av1_get_blockiness(orig->y_buffer, orig->y_stride, recon->y_buffer,
5845 recon->y_stride, orig->y_width, orig->y_height);
5846 cpi->worst_blockiness = AOMMAX(cpi->worst_blockiness, frame_blockiness);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005847 cpi->total_blockiness += frame_blockiness;
5848 }
5849
5850 if (cpi->b_calculate_consistency) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005851 if (!cm->seq_params.use_highbitdepth) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005852 const double this_inconsistency = aom_get_ssim_metrics(
Yaowu Xuc27fc142016-08-22 16:08:15 -07005853 orig->y_buffer, orig->y_stride, recon->y_buffer, recon->y_stride,
5854 orig->y_width, orig->y_height, cpi->ssim_vars, &cpi->metrics, 1);
5855
5856 const double peak = (double)((1 << in_bit_depth) - 1);
5857 const double consistency =
Yaowu Xuf883b422016-08-30 14:01:10 -07005858 aom_sse_to_psnr(samples, peak, cpi->total_inconsistency);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005859 if (consistency > 0.0)
5860 cpi->worst_consistency =
Yaowu Xuf883b422016-08-30 14:01:10 -07005861 AOMMIN(cpi->worst_consistency, consistency);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005862 cpi->total_inconsistency += this_inconsistency;
5863 }
5864 }
5865 }
5866
5867 frame_all =
Yaowu Xuf883b422016-08-30 14:01:10 -07005868 aom_calc_fastssim(orig, recon, &y, &u, &v, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005869 adjust_image_stat(y, u, v, frame_all, &cpi->fastssim);
Yaowu Xuf883b422016-08-30 14:01:10 -07005870 frame_all = aom_psnrhvs(orig, recon, &y, &u, &v, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005871 adjust_image_stat(y, u, v, frame_all, &cpi->psnrhvs);
5872 }
5873}
5874#endif // CONFIG_INTERNAL_STATS
5875
RogerZhou3b635242017-09-19 10:06:46 -07005876static int is_integer_mv(AV1_COMP *cpi, const YV12_BUFFER_CONFIG *cur_picture,
5877 const YV12_BUFFER_CONFIG *last_picture,
5878 hash_table *last_hash_table) {
5879 aom_clear_system_state();
5880 // check use hash ME
5881 int k;
5882 uint32_t hash_value_1;
5883 uint32_t hash_value_2;
5884
5885 const int block_size = 8;
5886 const double threshold_current = 0.8;
5887 const double threshold_average = 0.95;
5888 const int max_history_size = 32;
5889 int T = 0; // total block
5890 int C = 0; // match with collocated block
5891 int S = 0; // smooth region but not match with collocated block
5892 int M = 0; // match with other block
5893
5894 const int pic_width = cur_picture->y_width;
5895 const int pic_height = cur_picture->y_height;
5896 for (int i = 0; i + block_size <= pic_height; i += block_size) {
5897 for (int j = 0; j + block_size <= pic_width; j += block_size) {
5898 const int x_pos = j;
5899 const int y_pos = i;
5900 int match = 1;
5901 T++;
5902
5903 // check whether collocated block match with current
5904 uint8_t *p_cur = cur_picture->y_buffer;
5905 uint8_t *p_ref = last_picture->y_buffer;
5906 int stride_cur = cur_picture->y_stride;
5907 int stride_ref = last_picture->y_stride;
5908 p_cur += (y_pos * stride_cur + x_pos);
5909 p_ref += (y_pos * stride_ref + x_pos);
5910
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005911 if (cur_picture->flags & YV12_FLAG_HIGHBITDEPTH) {
5912 uint16_t *p16_cur = CONVERT_TO_SHORTPTR(p_cur);
5913 uint16_t *p16_ref = CONVERT_TO_SHORTPTR(p_ref);
5914 for (int tmpY = 0; tmpY < block_size && match; tmpY++) {
5915 for (int tmpX = 0; tmpX < block_size && match; tmpX++) {
5916 if (p16_cur[tmpX] != p16_ref[tmpX]) {
5917 match = 0;
5918 }
RogerZhou3b635242017-09-19 10:06:46 -07005919 }
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005920 p16_cur += stride_cur;
5921 p16_ref += stride_ref;
RogerZhou3b635242017-09-19 10:06:46 -07005922 }
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005923 } else {
5924 for (int tmpY = 0; tmpY < block_size && match; tmpY++) {
5925 for (int tmpX = 0; tmpX < block_size && match; tmpX++) {
5926 if (p_cur[tmpX] != p_ref[tmpX]) {
5927 match = 0;
5928 }
5929 }
5930 p_cur += stride_cur;
5931 p_ref += stride_ref;
5932 }
RogerZhou3b635242017-09-19 10:06:46 -07005933 }
5934
5935 if (match) {
5936 C++;
5937 continue;
5938 }
5939
5940 if (av1_hash_is_horizontal_perfect(cur_picture, block_size, x_pos,
5941 y_pos) ||
5942 av1_hash_is_vertical_perfect(cur_picture, block_size, x_pos, y_pos)) {
5943 S++;
5944 continue;
5945 }
5946
5947 av1_get_block_hash_value(
5948 cur_picture->y_buffer + y_pos * stride_cur + x_pos, stride_cur,
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005949 block_size, &hash_value_1, &hash_value_2,
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05305950 (cur_picture->flags & YV12_FLAG_HIGHBITDEPTH), &cpi->td.mb);
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005951 // Hashing does not work for highbitdepth currently.
5952 // TODO(Roger): Make it work for highbitdepth.
5953 if (av1_use_hash_me(&cpi->common)) {
5954 if (av1_has_exact_match(last_hash_table, hash_value_1, hash_value_2)) {
5955 M++;
5956 }
RogerZhou3b635242017-09-19 10:06:46 -07005957 }
5958 }
5959 }
5960
5961 assert(T > 0);
5962 double csm_rate = ((double)(C + S + M)) / ((double)(T));
5963 double m_rate = ((double)(M)) / ((double)(T));
5964
5965 cpi->csm_rate_array[cpi->rate_index] = csm_rate;
5966 cpi->m_rate_array[cpi->rate_index] = m_rate;
5967
5968 cpi->rate_index = (cpi->rate_index + 1) % max_history_size;
5969 cpi->rate_size++;
5970 cpi->rate_size = AOMMIN(cpi->rate_size, max_history_size);
5971
5972 if (csm_rate < threshold_current) {
5973 return 0;
5974 }
5975
5976 if (C == T) {
5977 return 1;
5978 }
5979
5980 double csm_average = 0.0;
5981 double m_average = 0.0;
5982
5983 for (k = 0; k < cpi->rate_size; k++) {
5984 csm_average += cpi->csm_rate_array[k];
5985 m_average += cpi->m_rate_array[k];
5986 }
5987 csm_average /= cpi->rate_size;
5988 m_average /= cpi->rate_size;
5989
5990 if (csm_average < threshold_average) {
5991 return 0;
5992 }
5993
5994 if (M > (T - C - S) / 3) {
5995 return 1;
5996 }
5997
5998 if (csm_rate > 0.99 && m_rate > 0.01) {
5999 return 1;
6000 }
6001
6002 if (csm_average + m_average > 1.01) {
6003 return 1;
6004 }
6005
6006 return 0;
6007}
RogerZhou3b635242017-09-19 10:06:46 -07006008
Yue Chen7cae98f2018-08-24 10:43:16 -07006009// Code for temporal dependency model
6010typedef struct GF_PICTURE {
6011 YV12_BUFFER_CONFIG *frame;
6012 int ref_frame[7];
6013} GF_PICTURE;
6014
Sarah Parkercf644442018-10-11 15:23:44 -07006015static void init_gop_frames(AV1_COMP *cpi, GF_PICTURE *gf_picture,
6016 const GF_GROUP *gf_group, int *tpl_group_frames) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006017 AV1_COMMON *cm = &cpi->common;
6018 const SequenceHeader *const seq_params = &cm->seq_params;
6019 int frame_idx = 0;
6020 int i;
6021 int gld_index = -1;
6022 int alt_index = -1;
6023 int lst_index = -1;
6024 int extend_frame_count = 0;
6025 int pframe_qindex = cpi->tpl_stats[2].base_qindex;
6026
6027 RefCntBuffer *frame_bufs = cm->buffer_pool->frame_bufs;
6028 int recon_frame_index[INTER_REFS_PER_FRAME + 1] = { -1, -1, -1, -1,
6029 -1, -1, -1, -1 };
6030
6031 // TODO(jingning): To be used later for gf frame type parsing.
6032 (void)gf_group;
6033
6034 for (i = 0; i < FRAME_BUFFERS && frame_idx < INTER_REFS_PER_FRAME + 1; ++i) {
6035 if (frame_bufs[i].ref_count == 0) {
David Turnere7ebf902018-12-04 14:04:55 +00006036 alloc_frame_mvs(cm, &frame_bufs[i]);
Yue Chen7cae98f2018-08-24 10:43:16 -07006037 if (aom_realloc_frame_buffer(
6038 &frame_bufs[i].buf, cm->width, cm->height,
6039 seq_params->subsampling_x, seq_params->subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +05306040 seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
Yue Chen7cae98f2018-08-24 10:43:16 -07006041 cm->byte_alignment, NULL, NULL, NULL))
6042 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
6043 "Failed to allocate frame buffer");
6044
6045 recon_frame_index[frame_idx] = i;
6046 ++frame_idx;
6047 }
6048 }
6049
6050 for (i = 0; i < INTER_REFS_PER_FRAME + 1; ++i) {
6051 assert(recon_frame_index[i] >= 0);
6052 cpi->tpl_recon_frames[i] = &frame_bufs[recon_frame_index[i]].buf;
6053 }
6054
6055 *tpl_group_frames = 0;
6056
6057 // Initialize Golden reference frame.
David Turnera21966b2018-12-05 14:48:49 +00006058 gf_picture[0].frame = NULL;
6059 RefCntBuffer *ref_buf = get_ref_frame_buf(cm, GOLDEN_FRAME);
6060 if (ref_buf) gf_picture[0].frame = &ref_buf->buf;
Yue Chen7cae98f2018-08-24 10:43:16 -07006061 for (i = 0; i < 7; ++i) gf_picture[0].ref_frame[i] = -1;
6062 gld_index = 0;
6063 ++*tpl_group_frames;
6064
6065 // Initialize ARF frame
6066 gf_picture[1].frame = cpi->source;
6067 gf_picture[1].ref_frame[0] = gld_index;
6068 gf_picture[1].ref_frame[1] = lst_index;
6069 gf_picture[1].ref_frame[2] = alt_index;
6070 // TODO(yuec) Need o figure out full AV1 reference model
6071 for (i = 3; i < 7; ++i) gf_picture[1].ref_frame[i] = -1;
6072 alt_index = 1;
6073 ++*tpl_group_frames;
6074
6075 // Initialize P frames
6076 for (frame_idx = 2; frame_idx < MAX_LAG_BUFFERS; ++frame_idx) {
6077 struct lookahead_entry *buf =
6078 av1_lookahead_peek(cpi->lookahead, frame_idx - 2);
6079
6080 if (buf == NULL) break;
6081
6082 gf_picture[frame_idx].frame = &buf->img;
6083 gf_picture[frame_idx].ref_frame[0] = gld_index;
6084 gf_picture[frame_idx].ref_frame[1] = lst_index;
6085 gf_picture[frame_idx].ref_frame[2] = alt_index;
6086 for (i = 3; i < 7; ++i) gf_picture[frame_idx].ref_frame[i] = -1;
6087
6088 ++*tpl_group_frames;
6089 lst_index = frame_idx;
6090
6091 if (frame_idx == cpi->rc.baseline_gf_interval + 1) break;
6092 }
6093
6094 gld_index = frame_idx;
6095 lst_index = AOMMAX(0, frame_idx - 1);
6096 alt_index = -1;
6097 ++frame_idx;
6098
6099 // Extend two frames outside the current gf group.
6100 for (; frame_idx < MAX_LAG_BUFFERS && extend_frame_count < 2; ++frame_idx) {
6101 struct lookahead_entry *buf =
6102 av1_lookahead_peek(cpi->lookahead, frame_idx - 2);
6103
6104 if (buf == NULL) break;
6105
6106 cpi->tpl_stats[frame_idx].base_qindex = pframe_qindex;
6107
6108 gf_picture[frame_idx].frame = &buf->img;
6109 gf_picture[frame_idx].ref_frame[0] = gld_index;
6110 gf_picture[frame_idx].ref_frame[1] = lst_index;
6111 gf_picture[frame_idx].ref_frame[2] = alt_index;
6112 for (i = 3; i < 7; ++i) gf_picture[frame_idx].ref_frame[i] = -1;
6113 lst_index = frame_idx;
6114 ++*tpl_group_frames;
6115 ++extend_frame_count;
6116 }
6117}
6118
Sarah Parkercf644442018-10-11 15:23:44 -07006119static void init_tpl_stats(AV1_COMP *cpi) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006120 int frame_idx;
6121 for (frame_idx = 0; frame_idx < MAX_LAG_BUFFERS; ++frame_idx) {
6122 TplDepFrame *tpl_frame = &cpi->tpl_stats[frame_idx];
6123 memset(tpl_frame->tpl_stats_ptr, 0,
6124 tpl_frame->height * tpl_frame->width *
6125 sizeof(*tpl_frame->tpl_stats_ptr));
6126 tpl_frame->is_valid = 0;
6127 }
6128}
6129
Sarah Parkercf644442018-10-11 15:23:44 -07006130static uint32_t motion_compensated_prediction(AV1_COMP *cpi, ThreadData *td,
6131 uint8_t *cur_frame_buf,
6132 uint8_t *ref_frame_buf,
6133 int stride, BLOCK_SIZE bsize,
6134 int mi_row, int mi_col) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006135 AV1_COMMON *cm = &cpi->common;
6136 MACROBLOCK *const x = &td->mb;
6137 MACROBLOCKD *const xd = &x->e_mbd;
6138 MV_SPEED_FEATURES *const mv_sf = &cpi->sf.mv;
6139 const SEARCH_METHODS search_method = NSTEP;
6140 int step_param;
6141 int sadpb = x->sadperbit16;
6142 uint32_t bestsme = UINT_MAX;
6143 int distortion;
6144 uint32_t sse;
6145 int cost_list[5];
6146 const MvLimits tmp_mv_limits = x->mv_limits;
6147
6148 MV best_ref_mv1 = { 0, 0 };
6149 MV best_ref_mv1_full; /* full-pixel value of best_ref_mv1 */
6150
6151 best_ref_mv1_full.col = best_ref_mv1.col >> 3;
6152 best_ref_mv1_full.row = best_ref_mv1.row >> 3;
6153
6154 // Setup frame pointers
6155 x->plane[0].src.buf = cur_frame_buf;
6156 x->plane[0].src.stride = stride;
6157 xd->plane[0].pre[0].buf = ref_frame_buf;
6158 xd->plane[0].pre[0].stride = stride;
6159
6160 step_param = mv_sf->reduce_first_step_size;
6161 step_param = AOMMIN(step_param, MAX_MVSEARCH_STEPS - 2);
6162
6163 av1_set_mv_search_range(&x->mv_limits, &best_ref_mv1);
6164
6165 av1_full_pixel_search(cpi, x, bsize, &best_ref_mv1_full, step_param,
6166 search_method, 0, sadpb, cond_cost_list(cpi, cost_list),
6167 &best_ref_mv1, INT_MAX, 0, (MI_SIZE * mi_col),
6168 (MI_SIZE * mi_row), 0);
6169
6170 /* restore UMV window */
6171 x->mv_limits = tmp_mv_limits;
6172
6173 const int pw = block_size_wide[bsize];
6174 const int ph = block_size_high[bsize];
6175 bestsme = cpi->find_fractional_mv_step(
6176 x, cm, mi_row, mi_col, &best_ref_mv1, cpi->common.allow_high_precision_mv,
6177 x->errorperbit, &cpi->fn_ptr[bsize], 0, mv_sf->subpel_iters_per_step,
6178 cond_cost_list(cpi, cost_list), NULL, NULL, &distortion, &sse, NULL, NULL,
6179 0, 0, pw, ph, 1, 1);
6180
6181 return bestsme;
6182}
6183
Sarah Parkercf644442018-10-11 15:23:44 -07006184static int get_overlap_area(int grid_pos_row, int grid_pos_col, int ref_pos_row,
6185 int ref_pos_col, int block, BLOCK_SIZE bsize) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006186 int width = 0, height = 0;
6187 int bw = 4 << mi_size_wide_log2[bsize];
6188 int bh = 4 << mi_size_high_log2[bsize];
6189
6190 switch (block) {
6191 case 0:
6192 width = grid_pos_col + bw - ref_pos_col;
6193 height = grid_pos_row + bh - ref_pos_row;
6194 break;
6195 case 1:
6196 width = ref_pos_col + bw - grid_pos_col;
6197 height = grid_pos_row + bh - ref_pos_row;
6198 break;
6199 case 2:
6200 width = grid_pos_col + bw - ref_pos_col;
6201 height = ref_pos_row + bh - grid_pos_row;
6202 break;
6203 case 3:
6204 width = ref_pos_col + bw - grid_pos_col;
6205 height = ref_pos_row + bh - grid_pos_row;
6206 break;
6207 default: assert(0);
6208 }
6209
6210 return width * height;
6211}
6212
Sarah Parkercf644442018-10-11 15:23:44 -07006213static int round_floor(int ref_pos, int bsize_pix) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006214 int round;
6215 if (ref_pos < 0)
6216 round = -(1 + (-ref_pos - 1) / bsize_pix);
6217 else
6218 round = ref_pos / bsize_pix;
6219
6220 return round;
6221}
6222
Sarah Parkercf644442018-10-11 15:23:44 -07006223static void tpl_model_store(TplDepStats *tpl_stats, int mi_row, int mi_col,
6224 BLOCK_SIZE bsize, int stride,
6225 const TplDepStats *src_stats) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006226 const int mi_height = mi_size_high[bsize];
6227 const int mi_width = mi_size_wide[bsize];
6228 int idx, idy;
6229
6230 int64_t intra_cost = src_stats->intra_cost / (mi_height * mi_width);
6231 int64_t inter_cost = src_stats->inter_cost / (mi_height * mi_width);
6232
6233 TplDepStats *tpl_ptr;
6234
6235 intra_cost = AOMMAX(1, intra_cost);
6236 inter_cost = AOMMAX(1, inter_cost);
6237
6238 for (idy = 0; idy < mi_height; ++idy) {
6239 tpl_ptr = &tpl_stats[(mi_row + idy) * stride + mi_col];
6240 for (idx = 0; idx < mi_width; ++idx) {
6241 tpl_ptr->intra_cost = intra_cost;
6242 tpl_ptr->inter_cost = inter_cost;
6243 tpl_ptr->mc_dep_cost = tpl_ptr->intra_cost + tpl_ptr->mc_flow;
6244 tpl_ptr->ref_frame_index = src_stats->ref_frame_index;
6245 tpl_ptr->mv.as_int = src_stats->mv.as_int;
6246 ++tpl_ptr;
6247 }
6248 }
6249}
6250
Sarah Parkercf644442018-10-11 15:23:44 -07006251static void tpl_model_update_b(TplDepFrame *tpl_frame, TplDepStats *tpl_stats,
6252 int mi_row, int mi_col, const BLOCK_SIZE bsize) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006253 TplDepFrame *ref_tpl_frame = &tpl_frame[tpl_stats->ref_frame_index];
6254 TplDepStats *ref_stats = ref_tpl_frame->tpl_stats_ptr;
6255 MV mv = tpl_stats->mv.as_mv;
6256 int mv_row = mv.row >> 3;
6257 int mv_col = mv.col >> 3;
6258
6259 int ref_pos_row = mi_row * MI_SIZE + mv_row;
6260 int ref_pos_col = mi_col * MI_SIZE + mv_col;
6261
6262 const int bw = 4 << mi_size_wide_log2[bsize];
6263 const int bh = 4 << mi_size_high_log2[bsize];
6264 const int mi_height = mi_size_high[bsize];
6265 const int mi_width = mi_size_wide[bsize];
6266 const int pix_num = bw * bh;
6267
6268 // top-left on grid block location in pixel
6269 int grid_pos_row_base = round_floor(ref_pos_row, bh) * bh;
6270 int grid_pos_col_base = round_floor(ref_pos_col, bw) * bw;
6271 int block;
6272
6273 for (block = 0; block < 4; ++block) {
6274 int grid_pos_row = grid_pos_row_base + bh * (block >> 1);
6275 int grid_pos_col = grid_pos_col_base + bw * (block & 0x01);
6276
6277 if (grid_pos_row >= 0 && grid_pos_row < ref_tpl_frame->mi_rows * MI_SIZE &&
6278 grid_pos_col >= 0 && grid_pos_col < ref_tpl_frame->mi_cols * MI_SIZE) {
6279 int overlap_area = get_overlap_area(
6280 grid_pos_row, grid_pos_col, ref_pos_row, ref_pos_col, block, bsize);
6281 int ref_mi_row = round_floor(grid_pos_row, bh) * mi_height;
6282 int ref_mi_col = round_floor(grid_pos_col, bw) * mi_width;
6283
6284 int64_t mc_flow = tpl_stats->mc_dep_cost -
6285 (tpl_stats->mc_dep_cost * tpl_stats->inter_cost) /
6286 tpl_stats->intra_cost;
6287
6288 int idx, idy;
6289
6290 for (idy = 0; idy < mi_height; ++idy) {
6291 for (idx = 0; idx < mi_width; ++idx) {
6292 TplDepStats *des_stats =
6293 &ref_stats[(ref_mi_row + idy) * ref_tpl_frame->stride +
6294 (ref_mi_col + idx)];
6295
6296 des_stats->mc_flow += (mc_flow * overlap_area) / pix_num;
6297 des_stats->mc_ref_cost +=
6298 ((tpl_stats->intra_cost - tpl_stats->inter_cost) * overlap_area) /
6299 pix_num;
6300 assert(overlap_area >= 0);
6301 }
6302 }
6303 }
6304 }
6305}
6306
Sarah Parkercf644442018-10-11 15:23:44 -07006307static void tpl_model_update(TplDepFrame *tpl_frame, TplDepStats *tpl_stats,
6308 int mi_row, int mi_col, const BLOCK_SIZE bsize) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006309 int idx, idy;
6310 const int mi_height = mi_size_high[bsize];
6311 const int mi_width = mi_size_wide[bsize];
6312
6313 for (idy = 0; idy < mi_height; ++idy) {
6314 for (idx = 0; idx < mi_width; ++idx) {
6315 TplDepStats *tpl_ptr =
6316 &tpl_stats[(mi_row + idy) * tpl_frame->stride + (mi_col + idx)];
6317 tpl_model_update_b(tpl_frame, tpl_ptr, mi_row + idy, mi_col + idx,
6318 BLOCK_4X4);
6319 }
6320 }
6321}
6322
Sarah Parkercf644442018-10-11 15:23:44 -07006323static void get_quantize_error(MACROBLOCK *x, int plane, tran_low_t *coeff,
6324 tran_low_t *qcoeff, tran_low_t *dqcoeff,
6325 TX_SIZE tx_size, int64_t *recon_error,
6326 int64_t *sse) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006327 const struct macroblock_plane *const p = &x->plane[plane];
6328 const SCAN_ORDER *const scan_order = &av1_default_scan_orders[tx_size];
6329 uint16_t eob;
6330 int pix_num = 1 << num_pels_log2_lookup[txsize_to_bsize[tx_size]];
6331 const int shift = tx_size == TX_32X32 ? 0 : 2;
6332
6333 av1_quantize_fp_32x32(coeff, pix_num, p->zbin_QTX, p->round_fp_QTX,
6334 p->quant_fp_QTX, p->quant_shift_QTX, qcoeff, dqcoeff,
6335 p->dequant_QTX, &eob, scan_order->scan,
6336 scan_order->iscan);
6337
6338 *recon_error = av1_block_error(coeff, dqcoeff, pix_num, sse) >> shift;
6339 *recon_error = AOMMAX(*recon_error, 1);
6340
6341 *sse = (*sse) >> shift;
6342 *sse = AOMMAX(*sse, 1);
6343}
6344
Sarah Parkercf644442018-10-11 15:23:44 -07006345static void wht_fwd_txfm(int16_t *src_diff, int bw, tran_low_t *coeff,
6346 TX_SIZE tx_size) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006347 switch (tx_size) {
6348 case TX_8X8: aom_hadamard_8x8(src_diff, bw, coeff); break;
6349 case TX_16X16: aom_hadamard_16x16(src_diff, bw, coeff); break;
6350 case TX_32X32: aom_hadamard_32x32(src_diff, bw, coeff); break;
6351 default: assert(0);
6352 }
6353}
6354
Sarah Parkercf644442018-10-11 15:23:44 -07006355static void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, MACROBLOCKD *xd,
6356 struct scale_factors *sf, GF_PICTURE *gf_picture,
6357 int frame_idx, int16_t *src_diff, tran_low_t *coeff,
6358 tran_low_t *qcoeff, tran_low_t *dqcoeff, int mi_row,
6359 int mi_col, BLOCK_SIZE bsize, TX_SIZE tx_size,
6360 YV12_BUFFER_CONFIG *ref_frame[], uint8_t *predictor,
6361 int64_t *recon_error, int64_t *sse,
6362 TplDepStats *tpl_stats) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006363 AV1_COMMON *cm = &cpi->common;
6364 ThreadData *td = &cpi->td;
6365
6366 const int bw = 4 << mi_size_wide_log2[bsize];
6367 const int bh = 4 << mi_size_high_log2[bsize];
6368 const int pix_num = bw * bh;
6369 int best_rf_idx = -1;
6370 int_mv best_mv;
6371 int64_t best_inter_cost = INT64_MAX;
6372 int64_t inter_cost;
6373 int rf_idx;
6374 const InterpFilters kernel =
6375 av1_make_interp_filters(EIGHTTAP_REGULAR, EIGHTTAP_REGULAR);
6376
6377 int64_t best_intra_cost = INT64_MAX;
6378 int64_t intra_cost;
6379 PREDICTION_MODE mode;
6380 int mb_y_offset = mi_row * MI_SIZE * xd->cur_buf->y_stride + mi_col * MI_SIZE;
6381 MB_MODE_INFO mi_above, mi_left;
6382
6383 memset(tpl_stats, 0, sizeof(*tpl_stats));
6384
6385 xd->mb_to_top_edge = -((mi_row * MI_SIZE) * 8);
6386 xd->mb_to_bottom_edge = ((cm->mi_rows - 1 - mi_row) * MI_SIZE) * 8;
6387 xd->mb_to_left_edge = -((mi_col * MI_SIZE) * 8);
6388 xd->mb_to_right_edge = ((cm->mi_cols - 1 - mi_col) * MI_SIZE) * 8;
6389 xd->above_mbmi = (mi_row > 0) ? &mi_above : NULL;
6390 xd->left_mbmi = (mi_col > 0) ? &mi_left : NULL;
6391
6392 // Intra prediction search
6393 for (mode = DC_PRED; mode <= PAETH_PRED; ++mode) {
6394 uint8_t *src, *dst;
6395 int src_stride, dst_stride;
6396
6397 src = xd->cur_buf->y_buffer + mb_y_offset;
6398 src_stride = xd->cur_buf->y_stride;
6399
6400 dst = &predictor[0];
6401 dst_stride = bw;
6402
6403 xd->mi[0]->sb_type = bsize;
6404 xd->mi[0]->ref_frame[0] = INTRA_FRAME;
6405
6406 av1_predict_intra_block(
6407 cm, xd, block_size_wide[bsize], block_size_high[bsize], tx_size, mode,
6408 0, 0, FILTER_INTRA_MODES, src, src_stride, dst, dst_stride, 0, 0, 0);
6409
6410 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
6411 aom_highbd_subtract_block(bh, bw, src_diff, bw, src, src_stride, dst,
6412 dst_stride, xd->bd);
6413 } else {
6414 aom_subtract_block(bh, bw, src_diff, bw, src, src_stride, dst,
6415 dst_stride);
6416 }
6417
6418 wht_fwd_txfm(src_diff, bw, coeff, tx_size);
6419
6420 intra_cost = aom_satd(coeff, pix_num);
6421
6422 if (intra_cost < best_intra_cost) best_intra_cost = intra_cost;
6423 }
6424
6425 // Motion compensated prediction
6426 best_mv.as_int = 0;
6427
6428 (void)mb_y_offset;
6429 // Motion estimation column boundary
6430 x->mv_limits.col_min = -((mi_col * MI_SIZE) + (17 - 2 * AOM_INTERP_EXTEND));
6431 x->mv_limits.col_max =
6432 ((cm->mi_cols - 1 - mi_col) * MI_SIZE) + (17 - 2 * AOM_INTERP_EXTEND);
6433
6434 for (rf_idx = 0; rf_idx < 7; ++rf_idx) {
6435 if (ref_frame[rf_idx] == NULL) continue;
6436
6437 motion_compensated_prediction(cpi, td, xd->cur_buf->y_buffer + mb_y_offset,
6438 ref_frame[rf_idx]->y_buffer + mb_y_offset,
6439 xd->cur_buf->y_stride, bsize, mi_row, mi_col);
6440
6441 // TODO(jingning): Not yet support high bit-depth in the next three
6442 // steps.
6443 ConvolveParams conv_params = get_conv_params(0, 0, xd->bd);
6444 WarpTypesAllowed warp_types;
6445 memset(&warp_types, 0, sizeof(WarpTypesAllowed));
6446
6447 av1_build_inter_predictor(
6448 ref_frame[rf_idx]->y_buffer + mb_y_offset, ref_frame[rf_idx]->y_stride,
6449 &predictor[0], bw, &x->best_mv.as_mv, sf, bw, bh, &conv_params, kernel,
6450 &warp_types, mi_col * MI_SIZE, mi_row * MI_SIZE, 0, 0, MV_PRECISION_Q3,
6451 mi_col * MI_SIZE, mi_row * MI_SIZE, xd, 0);
6452 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
6453 aom_highbd_subtract_block(
6454 bh, bw, src_diff, bw, xd->cur_buf->y_buffer + mb_y_offset,
6455 xd->cur_buf->y_stride, &predictor[0], bw, xd->bd);
6456 } else {
6457 aom_subtract_block(bh, bw, src_diff, bw,
6458 xd->cur_buf->y_buffer + mb_y_offset,
6459 xd->cur_buf->y_stride, &predictor[0], bw);
6460 }
6461 wht_fwd_txfm(src_diff, bw, coeff, tx_size);
6462
6463 inter_cost = aom_satd(coeff, pix_num);
6464 if (inter_cost < best_inter_cost) {
6465 best_rf_idx = rf_idx;
6466 best_inter_cost = inter_cost;
6467 best_mv.as_int = x->best_mv.as_int;
6468 get_quantize_error(x, 0, coeff, qcoeff, dqcoeff, tx_size, recon_error,
6469 sse);
6470 }
6471 }
6472 best_intra_cost = AOMMAX(best_intra_cost, 1);
6473 best_inter_cost = AOMMIN(best_intra_cost, best_inter_cost);
6474 tpl_stats->inter_cost = best_inter_cost << TPL_DEP_COST_SCALE_LOG2;
6475 tpl_stats->intra_cost = best_intra_cost << TPL_DEP_COST_SCALE_LOG2;
6476 tpl_stats->mc_dep_cost = tpl_stats->intra_cost + tpl_stats->mc_flow;
6477
6478 tpl_stats->ref_frame_index = gf_picture[frame_idx].ref_frame[best_rf_idx];
6479 tpl_stats->mv.as_int = best_mv.as_int;
6480}
6481
Sarah Parkercf644442018-10-11 15:23:44 -07006482static void mc_flow_dispenser(AV1_COMP *cpi, GF_PICTURE *gf_picture,
6483 int frame_idx) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006484 TplDepFrame *tpl_frame = &cpi->tpl_stats[frame_idx];
6485 YV12_BUFFER_CONFIG *this_frame = gf_picture[frame_idx].frame;
6486 YV12_BUFFER_CONFIG *ref_frame[7] = {
6487 NULL, NULL, NULL, NULL, NULL, NULL, NULL
6488 };
6489
6490 AV1_COMMON *cm = &cpi->common;
6491 struct scale_factors sf;
6492 int rdmult, idx;
6493 ThreadData *td = &cpi->td;
6494 MACROBLOCK *x = &td->mb;
6495 MACROBLOCKD *xd = &x->e_mbd;
6496 int mi_row, mi_col;
6497
6498 DECLARE_ALIGNED(16, uint16_t, predictor16[32 * 32 * 3]);
6499 DECLARE_ALIGNED(16, uint8_t, predictor8[32 * 32 * 3]);
6500 uint8_t *predictor;
6501 DECLARE_ALIGNED(16, int16_t, src_diff[32 * 32]);
6502 DECLARE_ALIGNED(16, tran_low_t, coeff[32 * 32]);
6503 DECLARE_ALIGNED(16, tran_low_t, qcoeff[32 * 32]);
6504 DECLARE_ALIGNED(16, tran_low_t, dqcoeff[32 * 32]);
6505
6506 const BLOCK_SIZE bsize = BLOCK_32X32;
6507 const TX_SIZE tx_size = max_txsize_lookup[bsize];
6508 const int mi_height = mi_size_high[bsize];
6509 const int mi_width = mi_size_wide[bsize];
6510 int64_t recon_error, sse;
6511
6512 // Setup scaling factor
6513 av1_setup_scale_factors_for_frame(
6514 &sf, this_frame->y_crop_width, this_frame->y_crop_height,
6515 this_frame->y_crop_width, this_frame->y_crop_height);
6516
6517 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH)
6518 predictor = CONVERT_TO_BYTEPTR(predictor16);
6519 else
6520 predictor = predictor8;
6521
6522 // Prepare reference frame pointers. If any reference frame slot is
6523 // unavailable, the pointer will be set to Null.
6524 for (idx = 0; idx < 7; ++idx) {
6525 int rf_idx = gf_picture[frame_idx].ref_frame[idx];
6526 if (rf_idx != -1) ref_frame[idx] = gf_picture[rf_idx].frame;
6527 }
6528
6529 xd->mi = cm->mi_grid_visible;
6530 xd->mi[0] = cm->mi;
6531 xd->cur_buf = this_frame;
6532
6533 // Get rd multiplier set up.
6534 rdmult = (int)av1_compute_rd_mult(cpi, tpl_frame->base_qindex);
6535 if (rdmult < 1) rdmult = 1;
6536 set_error_per_bit(&cpi->td.mb, rdmult);
6537 av1_initialize_me_consts(cpi, &cpi->td.mb, tpl_frame->base_qindex);
6538
6539 tpl_frame->is_valid = 1;
6540
6541 cm->base_qindex = tpl_frame->base_qindex;
6542 av1_frame_init_quantizer(cpi);
6543
6544 for (mi_row = 0; mi_row < cm->mi_rows; mi_row += mi_height) {
6545 // Motion estimation row boundary
6546 x->mv_limits.row_min = -((mi_row * MI_SIZE) + (17 - 2 * AOM_INTERP_EXTEND));
6547 x->mv_limits.row_max =
6548 (cm->mi_rows - 1 - mi_row) * MI_SIZE + (17 - 2 * AOM_INTERP_EXTEND);
6549 for (mi_col = 0; mi_col < cm->mi_cols; mi_col += mi_width) {
6550 TplDepStats tpl_stats;
6551 mode_estimation(cpi, x, xd, &sf, gf_picture, frame_idx, src_diff, coeff,
6552 qcoeff, dqcoeff, mi_row, mi_col, bsize, tx_size,
6553 ref_frame, predictor, &recon_error, &sse, &tpl_stats);
6554
6555 // Motion flow dependency dispenser.
6556 tpl_model_store(tpl_frame->tpl_stats_ptr, mi_row, mi_col, bsize,
6557 tpl_frame->stride, &tpl_stats);
6558
6559 tpl_model_update(cpi->tpl_stats, tpl_frame->tpl_stats_ptr, mi_row, mi_col,
6560 bsize);
6561 }
6562 }
6563}
6564
6565static void setup_tpl_stats(AV1_COMP *cpi) {
6566 GF_PICTURE gf_picture[MAX_LAG_BUFFERS];
6567 const GF_GROUP *gf_group = &cpi->twopass.gf_group;
6568 int tpl_group_frames = 0;
6569 int frame_idx;
6570
6571 init_gop_frames(cpi, gf_picture, gf_group, &tpl_group_frames);
6572
6573 init_tpl_stats(cpi);
6574
6575 // Backward propagation from tpl_group_frames to 1.
6576 for (frame_idx = tpl_group_frames - 1; frame_idx > 0; --frame_idx)
6577 mc_flow_dispenser(cpi, gf_picture, frame_idx);
6578}
6579
David Turner0308a5a2019-01-07 10:36:16 +00006580// Determine whether there is a forced keyframe pending in the lookahead buffer
6581static int is_forced_keyframe_pending(struct lookahead_ctx *lookahead,
6582 const int up_to_index) {
6583 for (int i = 0; i <= up_to_index; i++) {
6584 const struct lookahead_entry *e = av1_lookahead_peek(lookahead, i);
6585 if (e == NULL) {
6586 // We have reached the end of the lookahead buffer and not early-returned
6587 // so there isn't a forced key-frame pending.
6588 return 0;
6589 } else if (e->flags == AOM_EFLAG_FORCE_KF) {
6590 return 1;
6591 } else {
6592 continue;
6593 }
6594 }
6595 return 0; // Never reached
6596}
6597
6598// Don't allow a show_existing_frame to coincide with an error resilient or
6599// S-Frame. An exception can be made in the case of a keyframe, since it does
6600// not depend on any previous frames.
6601static int allow_show_existing(const AV1_COMP *const cpi) {
6602 if (cpi->common.current_frame.frame_number == 0) return 0;
6603
6604 const struct lookahead_entry *lookahead_src =
6605 av1_lookahead_peek(cpi->lookahead, 0);
6606 if (lookahead_src == NULL) return 1;
6607
6608 const int is_error_resilient =
6609 cpi->oxcf.error_resilient_mode ||
6610 (lookahead_src->flags & AOM_EFLAG_ERROR_RESILIENT);
6611 const int is_s_frame =
6612 cpi->oxcf.s_frame_mode || (lookahead_src->flags & AOM_EFLAG_SET_S_FRAME);
6613 const int is_key_frame =
6614 (cpi->rc.frames_to_key == 0) || (cpi->frame_flags & FRAMEFLAGS_KEY);
6615 return !(is_error_resilient || is_s_frame) || is_key_frame;
6616}
6617
David Turner85287b42019-01-10 16:11:59 +00006618// Called if this frame is an ARF or ARF2. Also handles forward-keyframes
6619// For an ARF set arf2=0, for ARF2 set arf2=1
6620// temporal_filtered is set to 1 if we temporally filter the ARF frame, so that
6621// the correct post-filter buffer can be used.
6622static struct lookahead_entry *setup_arf_or_arf2(AV1_COMP *const cpi,
6623 const int arf_src_index,
6624 const int arf2,
6625 int *temporal_filtered) {
6626 AV1_COMMON *const cm = &cpi->common;
6627 RATE_CONTROL *const rc = &cpi->rc;
6628 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
6629
6630 assert(arf_src_index <= rc->frames_to_key);
6631 *temporal_filtered = 0;
6632
6633 struct lookahead_entry *source =
6634 av1_lookahead_peek(cpi->lookahead, arf_src_index);
6635
6636 if (source != NULL) {
6637 cm->showable_frame = 1;
6638 cpi->alt_ref_source = source;
6639
6640 // When arf_src_index == rc->frames_to_key, it indicates a fwd_kf
6641 if (!arf2 && arf_src_index == rc->frames_to_key) {
6642 // Skip temporal filtering and mark as intra_only if we have a fwd_kf
6643 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
6644 int which_arf = gf_group->arf_update_idx[gf_group->index];
6645 cpi->is_arf_filter_off[which_arf] = 1;
6646 cpi->no_show_kf = 1;
6647 } else {
6648 if (oxcf->arnr_max_frames > 0) {
6649 // Produce the filtered ARF frame.
6650 av1_temporal_filter(cpi, arf_src_index);
6651 aom_extend_frame_borders(&cpi->alt_ref_buffer, av1_num_planes(cm));
6652 *temporal_filtered = 1;
6653 }
6654 }
6655 cm->show_frame = 0;
6656
6657 if (oxcf->pass < 2) {
6658 // In second pass, the buffer updates configure will be set
6659 // in the function av1_rc_get_second_pass_params
6660 if (!arf2) {
6661 av1_configure_buffer_updates_firstpass(cpi, ARF_UPDATE);
6662 } else {
6663 av1_configure_buffer_updates_firstpass(cpi, INTNL_ARF_UPDATE);
6664 }
6665 }
6666 }
6667 rc->source_alt_ref_pending = 0;
6668 return source;
6669}
6670
Andrey Norkin795ba872018-03-06 13:24:14 -08006671int av1_get_compressed_data(AV1_COMP *cpi, unsigned int *frame_flags,
6672 size_t *size, uint8_t *dest, int64_t *time_stamp,
6673 int64_t *time_end, int flush,
6674 const aom_rational_t *timebase) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006675 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
6676 AV1_COMMON *const cm = &cpi->common;
David Turnerd2a592e2018-11-16 14:59:31 +00006677 CurrentFrame *const current_frame = &cm->current_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006678 RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -07006679 struct aom_usec_timer cmptimer;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006680 struct lookahead_entry *last_source = NULL;
6681 struct lookahead_entry *source = NULL;
6682 int arf_src_index;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006683 int brf_src_index;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006684 int i;
6685
6686#if CONFIG_BITSTREAM_DEBUG
6687 assert(cpi->oxcf.max_threads == 0 &&
6688 "bitstream debug tool does not support multithreading");
6689 bitstream_queue_record_write();
David Turnerd2a592e2018-11-16 14:59:31 +00006690 bitstream_queue_set_frame_write(current_frame->frame_number * 2 +
6691 cm->show_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006692#endif
6693
Dominic Symesd4929012018-01-31 17:32:01 +01006694 cm->showable_frame = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07006695 aom_usec_timer_start(&cmptimer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006696
RogerZhou3b635242017-09-19 10:06:46 -07006697 set_high_precision_mv(cpi, ALTREF_HIGH_PRECISION_MV, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006698
Debargha Mukherjeeba7b8fe2018-03-15 23:10:07 -07006699 // Normal defaults
sarahparker27d686a2018-03-30 17:43:44 -07006700 cm->refresh_frame_context = oxcf->frame_parallel_decoding_mode
6701 ? REFRESH_FRAME_CONTEXT_DISABLED
6702 : REFRESH_FRAME_CONTEXT_BACKWARD;
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01006703 if (oxcf->large_scale_tile)
James Zernf34dfc82018-02-23 16:53:33 -08006704 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_DISABLED;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006705
Wei-Ting Lin2e8d0452018-06-27 09:32:39 -07006706 // default reference buffers update config
6707 av1_configure_buffer_updates_firstpass(cpi, LF_UPDATE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006708
Sarah Parkerb9041612018-05-22 19:06:47 -07006709 // Initialize fields related to forward keyframes
Sarah Parkeraf32a7b2018-06-29 14:59:05 -07006710 cpi->no_show_kf = 0;
Zoe Liub4991202017-12-21 15:31:06 -08006711 cm->reset_decoder_state = 0;
Zoe Liub4991202017-12-21 15:31:06 -08006712
David Turner0308a5a2019-01-07 10:36:16 +00006713 if (oxcf->pass == 2 && cm->show_existing_frame && allow_show_existing(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07006714 // Manage the source buffer and flush out the source frame that has been
6715 // coded already; Also get prepared for PSNR calculation if needed.
Yaowu Xuf883b422016-08-30 14:01:10 -07006716 if ((source = av1_lookahead_pop(cpi->lookahead, flush)) == NULL) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07006717 *size = 0;
6718 return -1;
6719 }
sarahparker21dbca42018-03-30 17:43:44 -07006720 av1_apply_encoding_flags(cpi, source->flags);
Alex Conversef77fd0b2017-04-20 11:00:24 -07006721 cpi->source = &source->img;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006722 // TODO(zoeliu): To track down to determine whether it's needed to adjust
6723 // the frame rate.
6724 *time_stamp = source->ts_start;
6725 *time_end = source->ts_end;
6726
6727 // We need to adjust frame rate for an overlay frame
Zoe Liue04abf72017-04-19 15:37:11 -07006728 if (cpi->rc.is_src_frame_alt_ref) adjust_frame_rate(cpi, source);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006729
David Turner0308a5a2019-01-07 10:36:16 +00006730 if (assign_cur_frame_new_fb(cm) == NULL) return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006731
6732 // Clear down mmx registers
Yaowu Xuf883b422016-08-30 14:01:10 -07006733 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006734
6735 // Start with a 0 size frame.
6736 *size = 0;
6737
6738 // We need to update the gf_group for show_existing overlay frame
Zoe Liue04abf72017-04-19 15:37:11 -07006739 if (cpi->rc.is_src_frame_alt_ref) av1_rc_get_second_pass_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006740
Tom Finegane4099e32018-01-23 12:01:51 -08006741 if (Pass2Encode(cpi, size, dest, frame_flags) != AOM_CODEC_OK)
6742 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006743
6744 if (cpi->b_calculate_psnr) generate_psnr_packet(cpi);
6745
6746#if CONFIG_INTERNAL_STATS
Angie Chiang08a22a62017-07-17 17:29:17 -07006747 compute_internal_stats(cpi, (int)(*size));
Yaowu Xuc27fc142016-08-22 16:08:15 -07006748#endif // CONFIG_INTERNAL_STATS
6749
6750 // Clear down mmx registers
Yaowu Xuf883b422016-08-30 14:01:10 -07006751 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006752
6753 cm->show_existing_frame = 0;
6754 return 0;
6755 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006756
David Turner85287b42019-01-10 16:11:59 +00006757 int temporal_filtered = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006758 // Should we encode an arf frame.
6759 arf_src_index = get_arf_src_index(cpi);
David Turner0308a5a2019-01-07 10:36:16 +00006760 if (arf_src_index &&
6761 is_forced_keyframe_pending(cpi->lookahead, arf_src_index)) {
6762 arf_src_index = 0;
6763 flush = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006764 }
6765
6766 if (arf_src_index) {
David Turner85287b42019-01-10 16:11:59 +00006767 source = setup_arf_or_arf2(cpi, arf_src_index, 0, &temporal_filtered);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006768 }
6769
David Turner85287b42019-01-10 16:11:59 +00006770 // Should we encode an arf2 frame (mutually exclusive to ARF)
Zoe Liue9b15e22017-07-19 15:53:01 -07006771 arf_src_index = get_arf2_src_index(cpi);
David Turner0308a5a2019-01-07 10:36:16 +00006772 if (arf_src_index &&
6773 is_forced_keyframe_pending(cpi->lookahead, arf_src_index)) {
6774 arf_src_index = 0;
6775 flush = 1;
Zoe Liue9b15e22017-07-19 15:53:01 -07006776 }
6777
6778 if (arf_src_index) {
David Turner85287b42019-01-10 16:11:59 +00006779 source = setup_arf_or_arf2(cpi, arf_src_index, 1, &temporal_filtered);
Zoe Liue9b15e22017-07-19 15:53:01 -07006780 }
Zoe Liue9b15e22017-07-19 15:53:01 -07006781
Yaowu Xuc27fc142016-08-22 16:08:15 -07006782 rc->is_bwd_ref_frame = 0;
6783 brf_src_index = get_brf_src_index(cpi);
6784 if (brf_src_index) {
6785 assert(brf_src_index <= rc->frames_to_key);
Yaowu Xuf883b422016-08-30 14:01:10 -07006786 if ((source = av1_lookahead_peek(cpi->lookahead, brf_src_index)) != NULL) {
Dominic Symesd4929012018-01-31 17:32:01 +01006787 cm->showable_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006788 cm->show_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006789
Wei-Ting Lin2e8d0452018-06-27 09:32:39 -07006790 if (oxcf->pass < 2) {
6791 // In second pass, the buffer updates configure will be set
6792 // in the function av1_rc_get_second_pass_params
6793 av1_configure_buffer_updates_firstpass(cpi, BIPRED_UPDATE);
6794 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006795 }
6796 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006797
6798 if (!source) {
6799 // Get last frame source.
David Turnerd2a592e2018-11-16 14:59:31 +00006800 if (current_frame->frame_number > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006801 if ((last_source = av1_lookahead_peek(cpi->lookahead, -1)) == NULL)
Yaowu Xuc27fc142016-08-22 16:08:15 -07006802 return -1;
6803 }
David Turnerd2a592e2018-11-16 14:59:31 +00006804 if (current_frame->frame_number > 0) assert(last_source != NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006805 // Read in the source frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07006806 source = av1_lookahead_pop(cpi->lookahead, flush);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006807
6808 if (source != NULL) {
6809 cm->show_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006810
6811 // Check to see if the frame should be encoded as an arf overlay.
6812 check_src_altref(cpi, source);
6813 }
6814 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006815 if (source) {
David Turner85287b42019-01-10 16:11:59 +00006816 if (temporal_filtered) {
6817 cpi->unscaled_source = &cpi->alt_ref_buffer;
6818 cpi->source = &cpi->alt_ref_buffer;
6819 } else {
6820 cpi->unscaled_source = &source->img;
6821 cpi->source = &source->img;
6822 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006823 cpi->unscaled_last_source = last_source != NULL ? &last_source->img : NULL;
6824
6825 *time_stamp = source->ts_start;
6826 *time_end = source->ts_end;
Sarah Parker73556772018-03-28 18:28:05 -07006827 av1_apply_encoding_flags(cpi, source->flags);
Yaowu Xuf883b422016-08-30 14:01:10 -07006828 *frame_flags = (source->flags & AOM_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006829
6830 } else {
6831 *size = 0;
6832 if (flush && oxcf->pass == 1 && !cpi->twopass.first_pass_done) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006833 av1_end_first_pass(cpi); /* get last stats packet */
Yaowu Xuc27fc142016-08-22 16:08:15 -07006834 cpi->twopass.first_pass_done = 1;
6835 }
6836 return -1;
6837 }
6838
6839 if (source->ts_start < cpi->first_time_stamp_ever) {
6840 cpi->first_time_stamp_ever = source->ts_start;
6841 cpi->last_end_time_stamp_seen = source->ts_start;
6842 }
6843
6844 // Clear down mmx registers
Yaowu Xuf883b422016-08-30 14:01:10 -07006845 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006846
6847 // adjust frame rates based on timestamps given
6848 if (cm->show_frame) adjust_frame_rate(cpi, source);
6849
David Turner0308a5a2019-01-07 10:36:16 +00006850 if (assign_cur_frame_new_fb(cm) == NULL) return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006851
Zoe Liuf452fdf2017-11-02 23:08:12 -07006852 // Retain the RF_LEVEL for the current newly coded frame.
David Turner1bcefb32018-11-19 17:54:00 +00006853 cm->cur_frame->frame_rf_level =
Zoe Liuf452fdf2017-11-02 23:08:12 -07006854 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index];
Zoe Liuf452fdf2017-11-02 23:08:12 -07006855
Yaowu Xu9b0f7032017-07-31 11:01:19 -07006856 cm->cur_frame->buf.buf_8bit_valid = 0;
Neil Birkbeckeb895ef2018-03-14 17:51:03 -07006857
Neil Birkbecka2893ab2018-06-08 14:45:13 -07006858 if (cpi->film_grain_table) {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07006859 cm->seq_params.film_grain_params_present = aom_film_grain_table_lookup(
Neil Birkbecka2893ab2018-06-08 14:45:13 -07006860 cpi->film_grain_table, *time_stamp, *time_end, 0 /* =erase */,
Neil Birkbeckeb895ef2018-03-14 17:51:03 -07006861 &cm->film_grain_params);
6862 }
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07006863 cm->cur_frame->film_grain_params_present =
6864 cm->seq_params.film_grain_params_present;
Zoe Liu6cfaff92016-10-18 17:12:11 -07006865
Andrey Norkin795ba872018-03-06 13:24:14 -08006866 // only one operating point supported now
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07006867 const int64_t pts64 = ticks_to_timebase_units(timebase, *time_stamp);
6868 if (pts64 < 0 || pts64 > UINT32_MAX) return AOM_CODEC_ERROR;
6869 cpi->common.frame_presentation_time = (uint32_t)pts64;
Andrey Norkin795ba872018-03-06 13:24:14 -08006870
Yaowu Xuc27fc142016-08-22 16:08:15 -07006871 // Start with a 0 size frame.
6872 *size = 0;
6873
6874 cpi->frame_flags = *frame_flags;
6875
6876 if (oxcf->pass == 2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006877 av1_rc_get_second_pass_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006878 } else if (oxcf->pass == 1) {
Fergus Simpsonbc189932017-05-16 17:02:39 -07006879 setup_frame_size(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006880 }
6881
6882 if (cpi->oxcf.pass != 0 || frame_is_intra_only(cm) == 1) {
David Turnere7ebf902018-12-04 14:04:55 +00006883 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) cpi->scaled_ref_buf[i] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006884 }
6885
Yaowu Xuc27fc142016-08-22 16:08:15 -07006886 cm->using_qmatrix = cpi->oxcf.using_qm;
6887 cm->min_qmlevel = cpi->oxcf.qm_minlevel;
6888 cm->max_qmlevel = cpi->oxcf.qm_maxlevel;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006889
David Turner936235c2018-11-28 13:42:01 +00006890 if (cm->seq_params.frame_id_numbers_present_flag && *time_stamp == 0) {
6891 cpi->common.current_frame_id = -1;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01006892 }
Zoe Liuca0cd3f2018-02-26 15:07:50 -08006893
Debargha Mukherjeee41a6672018-02-27 11:56:31 -08006894 if (oxcf->pass != 1 && cpi->common.allow_screen_content_tools &&
6895 !frame_is_intra_only(cm)) {
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00006896 if (cpi->common.seq_params.force_integer_mv == 2) {
RogerZhou3b635242017-09-19 10:06:46 -07006897 struct lookahead_entry *previous_entry =
Debargha Mukherjeea71e3db2018-02-28 07:47:17 -08006898 av1_lookahead_peek(cpi->lookahead, cpi->previous_index);
6899 if (!previous_entry)
6900 cpi->common.cur_frame_force_integer_mv = 0;
6901 else
6902 cpi->common.cur_frame_force_integer_mv = is_integer_mv(
6903 cpi, cpi->source, &previous_entry->img, cpi->previous_hash_table);
RogerZhou3b635242017-09-19 10:06:46 -07006904 } else {
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00006905 cpi->common.cur_frame_force_integer_mv =
6906 cpi->common.seq_params.force_integer_mv;
RogerZhou3b635242017-09-19 10:06:46 -07006907 }
6908 } else {
RogerZhou10a03802017-10-26 11:49:48 -07006909 cpi->common.cur_frame_force_integer_mv = 0;
RogerZhou3b635242017-09-19 10:06:46 -07006910 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01006911
Yue Chen7cae98f2018-08-24 10:43:16 -07006912 if (cpi->twopass.gf_group.index == 1 && cpi->oxcf.enable_tpl_model) {
6913 set_frame_size(cpi, cm->width, cm->height);
6914 setup_tpl_stats(cpi);
6915 }
6916
Yaowu Xuc27fc142016-08-22 16:08:15 -07006917 if (oxcf->pass == 1) {
6918 cpi->td.mb.e_mbd.lossless[0] = is_lossless_requested(oxcf);
Yaowu Xuf883b422016-08-30 14:01:10 -07006919 av1_first_pass(cpi, source);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006920 } else if (oxcf->pass == 2) {
Tom Finegane4099e32018-01-23 12:01:51 -08006921 if (Pass2Encode(cpi, size, dest, frame_flags) != AOM_CODEC_OK)
6922 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006923 } else {
6924 // One pass encode
David Turner056f7cd2019-01-07 17:48:13 +00006925 if (Pass0Encode(cpi, size, dest, frame_flags) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08006926 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006927 }
RogerZhoucc5d35d2017-08-07 22:20:15 -07006928 if (oxcf->pass != 1 && cpi->common.allow_screen_content_tools) {
Debargha Mukherjeee41a6672018-02-27 11:56:31 -08006929 cpi->previous_hash_table = &cm->cur_frame->hash_table;
RogerZhou3b635242017-09-19 10:06:46 -07006930 {
6931 int l;
6932 for (l = -MAX_PRE_FRAMES; l < cpi->lookahead->max_sz; l++) {
6933 if ((cpi->lookahead->buf + l) == source) {
Debargha Mukherjeee41a6672018-02-27 11:56:31 -08006934 cpi->previous_index = l;
RogerZhou3b635242017-09-19 10:06:46 -07006935 break;
6936 }
6937 }
6938
6939 if (l == cpi->lookahead->max_sz) {
6940 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
6941 "Failed to find last frame original buffer");
6942 }
6943 }
RogerZhoucc5d35d2017-08-07 22:20:15 -07006944 }
6945
Yunqing Wang267e3272017-11-09 14:23:22 -08006946 if (!cm->large_scale_tile) {
David Turner1bcefb32018-11-19 17:54:00 +00006947 cm->cur_frame->frame_context = *cm->fc;
Yunqing Wang267e3272017-11-09 14:23:22 -08006948 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006949
Yunqing Wangb041d8a2017-11-15 12:31:18 -08006950#define EXT_TILE_DEBUG 0
6951#if EXT_TILE_DEBUG
6952 if (cm->large_scale_tile && oxcf->pass == 2) {
6953 char fn[20] = "./fc";
David Turnerd2a592e2018-11-16 14:59:31 +00006954 fn[4] = current_frame->frame_number / 100 + '0';
6955 fn[5] = (current_frame->frame_number % 100) / 10 + '0';
6956 fn[6] = (current_frame->frame_number % 10) + '0';
Yunqing Wangb041d8a2017-11-15 12:31:18 -08006957 fn[7] = '\0';
6958 av1_print_frame_contexts(cm->fc, fn);
6959 }
6960#endif // EXT_TILE_DEBUG
6961#undef EXT_TILE_DEBUG
Yaowu Xuc7119a72018-03-29 09:59:37 -07006962
Dominic Symesd4929012018-01-31 17:32:01 +01006963 cm->showable_frame = !cm->show_frame && cm->showable_frame;
Yunqing Wangb041d8a2017-11-15 12:31:18 -08006964
Yaowu Xuc27fc142016-08-22 16:08:15 -07006965 // No frame encoded, or frame was dropped, release scaled references.
6966 if ((*size == 0) && (frame_is_intra_only(cm) == 0)) {
6967 release_scaled_references(cpi);
6968 }
6969
6970 if (*size > 0) {
Debargha Mukherjee8adee102018-09-25 11:01:00 -07006971 cpi->droppable = is_frame_droppable(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006972 }
6973
Yaowu Xuf883b422016-08-30 14:01:10 -07006974 aom_usec_timer_mark(&cmptimer);
6975 cpi->time_compress_data += aom_usec_timer_elapsed(&cmptimer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006976
6977 if (cpi->b_calculate_psnr && oxcf->pass != 1 && cm->show_frame)
6978 generate_psnr_packet(cpi);
6979
6980#if CONFIG_INTERNAL_STATS
6981 if (oxcf->pass != 1) {
Angie Chiang08a22a62017-07-17 17:29:17 -07006982 compute_internal_stats(cpi, (int)(*size));
Yaowu Xuc27fc142016-08-22 16:08:15 -07006983 }
6984#endif // CONFIG_INTERNAL_STATS
Debargha Mukherjee0857e662019-01-04 16:22:09 -08006985#if CONFIG_SPEED_STATS
6986 if (cpi->oxcf.pass != 1) {
6987 cpi->tx_search_count += cpi->td.mb.tx_search_count;
6988 cpi->td.mb.tx_search_count = 0;
6989 }
6990#endif // CONFIG_SPEED_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07006991
Yaowu Xuf883b422016-08-30 14:01:10 -07006992 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006993
6994 return 0;
6995}
6996
Yaowu Xuf883b422016-08-30 14:01:10 -07006997int av1_get_preview_raw_frame(AV1_COMP *cpi, YV12_BUFFER_CONFIG *dest) {
6998 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006999 if (!cm->show_frame) {
7000 return -1;
7001 } else {
7002 int ret;
David Turnerc29e1a92018-12-06 14:10:14 +00007003 if (cm->cur_frame != NULL) {
7004 *dest = cm->cur_frame->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007005 dest->y_width = cm->width;
7006 dest->y_height = cm->height;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07007007 dest->uv_width = cm->width >> cm->seq_params.subsampling_x;
7008 dest->uv_height = cm->height >> cm->seq_params.subsampling_y;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007009 ret = 0;
7010 } else {
7011 ret = -1;
7012 }
Yaowu Xuf883b422016-08-30 14:01:10 -07007013 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07007014 return ret;
7015 }
7016}
7017
Yaowu Xuf883b422016-08-30 14:01:10 -07007018int av1_get_last_show_frame(AV1_COMP *cpi, YV12_BUFFER_CONFIG *frame) {
David Turnere7ebf902018-12-04 14:04:55 +00007019 if (cpi->last_show_frame_buf == NULL) return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007020
David Turnere7ebf902018-12-04 14:04:55 +00007021 *frame = cpi->last_show_frame_buf->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007022 return 0;
7023}
7024
Yunqing Wangff9bfca2018-06-06 11:46:08 -07007025static int equal_dimensions_and_border(const YV12_BUFFER_CONFIG *a,
7026 const YV12_BUFFER_CONFIG *b) {
7027 return a->y_height == b->y_height && a->y_width == b->y_width &&
7028 a->uv_height == b->uv_height && a->uv_width == b->uv_width &&
7029 a->y_stride == b->y_stride && a->uv_stride == b->uv_stride &&
7030 a->border == b->border &&
7031 (a->flags & YV12_FLAG_HIGHBITDEPTH) ==
7032 (b->flags & YV12_FLAG_HIGHBITDEPTH);
7033}
7034
Yunqing Wang93b18f32018-06-08 21:08:29 -07007035aom_codec_err_t av1_copy_new_frame_enc(AV1_COMMON *cm,
7036 YV12_BUFFER_CONFIG *new_frame,
7037 YV12_BUFFER_CONFIG *sd) {
Yunqing Wangff9bfca2018-06-06 11:46:08 -07007038 const int num_planes = av1_num_planes(cm);
7039 if (!equal_dimensions_and_border(new_frame, sd))
7040 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
7041 "Incorrect buffer dimensions");
7042 else
7043 aom_yv12_copy_frame(new_frame, sd, num_planes);
7044
7045 return cm->error.error_code;
7046}
7047
Yaowu Xuf883b422016-08-30 14:01:10 -07007048int av1_set_internal_size(AV1_COMP *cpi, AOM_SCALING horiz_mode,
7049 AOM_SCALING vert_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07007050 int hr = 0, hs = 0, vr = 0, vs = 0;
7051
7052 if (horiz_mode > ONETWO || vert_mode > ONETWO) return -1;
7053
7054 Scale2Ratio(horiz_mode, &hr, &hs);
7055 Scale2Ratio(vert_mode, &vr, &vs);
7056
7057 // always go to the next whole number
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07007058 cpi->resize_pending_width = (hs - 1 + cpi->oxcf.width * hr) / hs;
7059 cpi->resize_pending_height = (vs - 1 + cpi->oxcf.height * vr) / vs;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007060
7061 return 0;
7062}
7063
Yaowu Xuf883b422016-08-30 14:01:10 -07007064int av1_get_quantizer(AV1_COMP *cpi) { return cpi->common.base_qindex; }
Yaowu Xuc27fc142016-08-22 16:08:15 -07007065
Soo-Chul Han29c46fb2018-03-23 16:02:00 -04007066int av1_convert_sect5obus_to_annexb(uint8_t *buffer, size_t *frame_size) {
7067 size_t output_size = 0;
7068 size_t total_bytes_read = 0;
7069 size_t remaining_size = *frame_size;
7070 uint8_t *buff_ptr = buffer;
7071
7072 // go through each OBUs
7073 while (total_bytes_read < *frame_size) {
7074 uint8_t saved_obu_header[2];
7075 uint64_t obu_payload_size;
7076 size_t length_of_payload_size;
7077 size_t length_of_obu_size;
7078 uint32_t obu_header_size = (buff_ptr[0] >> 2) & 0x1 ? 2 : 1;
7079 size_t obu_bytes_read = obu_header_size; // bytes read for current obu
7080
7081 // save the obu header (1 or 2 bytes)
7082 memmove(saved_obu_header, buff_ptr, obu_header_size);
7083 // clear the obu_has_size_field
7084 saved_obu_header[0] = saved_obu_header[0] & (~0x2);
7085
7086 // get the payload_size and length of payload_size
7087 if (aom_uleb_decode(buff_ptr + obu_header_size, remaining_size,
7088 &obu_payload_size, &length_of_payload_size) != 0) {
7089 return AOM_CODEC_ERROR;
7090 }
7091 obu_bytes_read += length_of_payload_size;
7092
7093 // calculate the length of size of the obu header plus payload
7094 length_of_obu_size =
7095 aom_uleb_size_in_bytes((uint64_t)(obu_header_size + obu_payload_size));
7096
7097 // move the rest of data to new location
7098 memmove(buff_ptr + length_of_obu_size + obu_header_size,
7099 buff_ptr + obu_bytes_read, remaining_size - obu_bytes_read);
Yaowu Xu9e494202018-04-03 11:19:49 -07007100 obu_bytes_read += (size_t)obu_payload_size;
Soo-Chul Han29c46fb2018-03-23 16:02:00 -04007101
7102 // write the new obu size
7103 const uint64_t obu_size = obu_header_size + obu_payload_size;
7104 size_t coded_obu_size;
7105 if (aom_uleb_encode(obu_size, sizeof(obu_size), buff_ptr,
7106 &coded_obu_size) != 0) {
7107 return AOM_CODEC_ERROR;
7108 }
7109
7110 // write the saved (modified) obu_header following obu size
7111 memmove(buff_ptr + length_of_obu_size, saved_obu_header, obu_header_size);
7112
7113 total_bytes_read += obu_bytes_read;
7114 remaining_size -= obu_bytes_read;
7115 buff_ptr += length_of_obu_size + obu_size;
Yaowu Xu9e494202018-04-03 11:19:49 -07007116 output_size += length_of_obu_size + (size_t)obu_size;
Soo-Chul Han29c46fb2018-03-23 16:02:00 -04007117 }
7118
7119 *frame_size = output_size;
7120 return AOM_CODEC_OK;
7121}
7122
Yaowu Xuf883b422016-08-30 14:01:10 -07007123void av1_apply_encoding_flags(AV1_COMP *cpi, aom_enc_frame_flags_t flags) {
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007124 // TODO(yunqingwang): For what references to use, external encoding flags
7125 // should be consistent with internal reference frame selection. Need to
7126 // ensure that there is not conflict between the two. In AV1 encoder, the
7127 // priority rank for 7 reference frames are: LAST, ALTREF, LAST2, LAST3,
7128 // GOLDEN, BWDREF, ALTREF2. If only one reference frame is used, it must be
7129 // LAST.
Yunqing Wangf2e7a392017-11-08 00:27:21 -08007130 cpi->ext_ref_frame_flags = AOM_REFFRAME_ALL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007131 if (flags &
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007132 (AOM_EFLAG_NO_REF_LAST | AOM_EFLAG_NO_REF_LAST2 | AOM_EFLAG_NO_REF_LAST3 |
7133 AOM_EFLAG_NO_REF_GF | AOM_EFLAG_NO_REF_ARF | AOM_EFLAG_NO_REF_BWD |
7134 AOM_EFLAG_NO_REF_ARF2)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07007135 if (flags & AOM_EFLAG_NO_REF_LAST) {
Yunqing Wangf2e7a392017-11-08 00:27:21 -08007136 cpi->ext_ref_frame_flags = 0;
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007137 } else {
7138 int ref = AOM_REFFRAME_ALL;
7139
7140 if (flags & AOM_EFLAG_NO_REF_LAST2) ref ^= AOM_LAST2_FLAG;
7141 if (flags & AOM_EFLAG_NO_REF_LAST3) ref ^= AOM_LAST3_FLAG;
7142
7143 if (flags & AOM_EFLAG_NO_REF_GF) ref ^= AOM_GOLD_FLAG;
7144
7145 if (flags & AOM_EFLAG_NO_REF_ARF) {
7146 ref ^= AOM_ALT_FLAG;
7147 ref ^= AOM_BWD_FLAG;
7148 ref ^= AOM_ALT2_FLAG;
7149 } else {
7150 if (flags & AOM_EFLAG_NO_REF_BWD) ref ^= AOM_BWD_FLAG;
7151 if (flags & AOM_EFLAG_NO_REF_ARF2) ref ^= AOM_ALT2_FLAG;
7152 }
7153
7154 av1_use_as_reference(cpi, ref);
Yaowu Xuc27fc142016-08-22 16:08:15 -07007155 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07007156 }
7157
7158 if (flags &
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007159 (AOM_EFLAG_NO_UPD_LAST | AOM_EFLAG_NO_UPD_GF | AOM_EFLAG_NO_UPD_ARF)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07007160 int upd = AOM_REFFRAME_ALL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007161
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007162 // Refreshing LAST/LAST2/LAST3 is handled by 1 common flag.
7163 if (flags & AOM_EFLAG_NO_UPD_LAST) upd ^= AOM_LAST_FLAG;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007164
Yaowu Xuf883b422016-08-30 14:01:10 -07007165 if (flags & AOM_EFLAG_NO_UPD_GF) upd ^= AOM_GOLD_FLAG;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007166
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007167 if (flags & AOM_EFLAG_NO_UPD_ARF) {
7168 upd ^= AOM_ALT_FLAG;
7169 upd ^= AOM_BWD_FLAG;
7170 upd ^= AOM_ALT2_FLAG;
7171 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07007172
Yaowu Xuf883b422016-08-30 14:01:10 -07007173 av1_update_reference(cpi, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07007174 }
7175
sarahparker21dbca42018-03-30 17:43:44 -07007176 cpi->ext_use_ref_frame_mvs = cpi->oxcf.allow_ref_frame_mvs &
7177 ((flags & AOM_EFLAG_NO_REF_FRAME_MVS) == 0);
sarahparker27d686a2018-03-30 17:43:44 -07007178 cpi->ext_use_error_resilient = cpi->oxcf.error_resilient_mode |
7179 ((flags & AOM_EFLAG_ERROR_RESILIENT) != 0);
sarahparker9806fed2018-03-30 17:43:44 -07007180 cpi->ext_use_s_frame =
7181 cpi->oxcf.s_frame_mode | ((flags & AOM_EFLAG_SET_S_FRAME) != 0);
Sarah Parker50b6d6e2018-04-11 19:21:54 -07007182 cpi->ext_use_primary_ref_none = (flags & AOM_EFLAG_SET_PRIMARY_REF_NONE) != 0;
sarahparker21dbca42018-03-30 17:43:44 -07007183
Yaowu Xuf883b422016-08-30 14:01:10 -07007184 if (flags & AOM_EFLAG_NO_UPD_ENTROPY) {
7185 av1_update_entropy(cpi, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07007186 }
7187}
Andrey Norkin795ba872018-03-06 13:24:14 -08007188
Andrey Norkin795ba872018-03-06 13:24:14 -08007189int64_t timebase_units_to_ticks(const aom_rational_t *timebase, int64_t n) {
7190 return n * TICKS_PER_SEC * timebase->num / timebase->den;
7191}
7192
7193int64_t ticks_to_timebase_units(const aom_rational_t *timebase, int64_t n) {
7194 const int64_t round = TICKS_PER_SEC * timebase->num / 2 - 1;
7195 return (n * timebase->den + round) / timebase->num / TICKS_PER_SEC;
7196}
Tom Fineganf8d6a162018-08-21 10:47:55 -07007197
7198aom_fixed_buf_t *av1_get_global_headers(AV1_COMP *cpi) {
7199 if (!cpi) return NULL;
7200
7201 uint8_t header_buf[512] = { 0 };
7202 const uint32_t sequence_header_size =
7203 write_sequence_header_obu(cpi, &header_buf[0]);
7204 assert(sequence_header_size <= sizeof(header_buf));
7205 if (sequence_header_size == 0) return NULL;
7206
7207 const size_t obu_header_size = 1;
7208 const size_t size_field_size = aom_uleb_size_in_bytes(sequence_header_size);
7209 const size_t payload_offset = obu_header_size + size_field_size;
7210
7211 if (payload_offset + sequence_header_size > sizeof(header_buf)) return NULL;
7212 memmove(&header_buf[payload_offset], &header_buf[0], sequence_header_size);
7213
7214 if (write_obu_header(OBU_SEQUENCE_HEADER, 0, &header_buf[0]) !=
7215 obu_header_size) {
7216 return NULL;
7217 }
7218
7219 size_t coded_size_field_size = 0;
7220 if (aom_uleb_encode(sequence_header_size, size_field_size,
7221 &header_buf[obu_header_size],
7222 &coded_size_field_size) != 0) {
7223 return NULL;
7224 }
7225 assert(coded_size_field_size == size_field_size);
7226
7227 aom_fixed_buf_t *global_headers =
7228 (aom_fixed_buf_t *)malloc(sizeof(*global_headers));
7229 if (!global_headers) return NULL;
7230
7231 const size_t global_header_buf_size =
7232 obu_header_size + size_field_size + sequence_header_size;
7233
7234 global_headers->buf = malloc(global_header_buf_size);
7235 if (!global_headers->buf) {
7236 free(global_headers);
7237 return NULL;
7238 }
7239
7240 memcpy(global_headers->buf, &header_buf[0], global_header_buf_size);
7241 global_headers->sz = global_header_buf_size;
7242 return global_headers;
7243}