blob: 3fae217ec2f8d9861fc043d692a4fcd5119baa92 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Urvang Joshi8a02d762016-07-28 15:51:12 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Urvang Joshi8a02d762016-07-28 15:51:12 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <limits.h>
13#include <math.h>
14#include <stdio.h>
15
Tom Finegan60e653d2018-05-22 11:34:58 -070016#include "config/aom_config.h"
Tom Finegan44702c82018-05-22 13:00:39 -070017#include "config/aom_dsp_rtcd.h"
18#include "config/aom_scale_rtcd.h"
Yaowu Xufa3721d2018-07-30 14:38:49 -070019#include "config/av1_rtcd.h"
20
21#include "aom_dsp/aom_dsp_common.h"
22#include "aom_dsp/aom_filter.h"
23#if CONFIG_DENOISE
24#include "aom_dsp/grain_table.h"
25#include "aom_dsp/noise_util.h"
26#include "aom_dsp/noise_model.h"
27#endif
28#include "aom_dsp/psnr.h"
29#if CONFIG_INTERNAL_STATS
30#include "aom_dsp/ssim.h"
31#endif
32#include "aom_ports/aom_timer.h"
33#include "aom_ports/mem.h"
34#include "aom_ports/system_state.h"
35#include "aom_scale/aom_scale.h"
36#if CONFIG_BITSTREAM_DEBUG || CONFIG_MISMATCH_DEBUG
37#include "aom_util/debug_util.h"
38#endif // CONFIG_BITSTREAM_DEBUG || CONFIG_MISMATCH_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070039
40#include "av1/common/alloccommon.h"
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010041#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/filter.h"
43#include "av1/common/idct.h"
44#include "av1/common/reconinter.h"
45#include "av1/common/reconintra.h"
Fergus Simpsond0565002017-03-27 16:51:52 -070046#include "av1/common/resize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#include "av1/common/tile_common.h"
48
Ravi Chaudharyc5e74692018-10-08 16:05:38 +053049#include "av1/encoder/av1_multi_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/encoder/aq_complexity.h"
51#include "av1/encoder/aq_cyclicrefresh.h"
52#include "av1/encoder/aq_variance.h"
53#include "av1/encoder/bitstream.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070054#include "av1/encoder/context_tree.h"
55#include "av1/encoder/encodeframe.h"
56#include "av1/encoder/encodemv.h"
David Turner056f7cd2019-01-07 17:48:13 +000057#include "av1/encoder/encode_strategy.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070058#include "av1/encoder/encoder.h"
Angie Chiangf0fbf9d2017-03-15 15:01:22 -070059#include "av1/encoder/encodetxb.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070060#include "av1/encoder/ethread.h"
61#include "av1/encoder/firstpass.h"
Yaowu Xufa3721d2018-07-30 14:38:49 -070062#include "av1/encoder/grain_test_vectors.h"
RogerZhoucc5d35d2017-08-07 22:20:15 -070063#include "av1/encoder/hash_motion.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070064#include "av1/encoder/mbgraph.h"
65#include "av1/encoder/picklpf.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070066#include "av1/encoder/pickrst.h"
Debargha Mukherjee7166f222017-09-05 21:32:42 -070067#include "av1/encoder/random.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070068#include "av1/encoder/ratectrl.h"
69#include "av1/encoder/rd.h"
Debargha Mukherjeedf713102018-10-02 12:33:32 -070070#include "av1/encoder/rdopt.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070071#include "av1/encoder/segmentation.h"
72#include "av1/encoder/speed_features.h"
73#include "av1/encoder/temporal_filter.h"
Yue Chen7cae98f2018-08-24 10:43:16 -070074#include "av1/encoder/reconinter_enc.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070075
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +000076#define DEFAULT_EXPLICIT_ORDER_HINT_BITS 7
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +000077
Andrey Norkin795ba872018-03-06 13:24:14 -080078// av1 uses 10,000,000 ticks/second as time stamp
79#define TICKS_PER_SEC 10000000LL
Andrey Norkin795ba872018-03-06 13:24:14 -080080
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -080081#if CONFIG_ENTROPY_STATS
82FRAME_COUNTS aggregate_fc;
83#endif // CONFIG_ENTROPY_STATS
84
Yaowu Xuc27fc142016-08-22 16:08:15 -070085#define AM_SEGMENT_ID_INACTIVE 7
86#define AM_SEGMENT_ID_ACTIVE 0
87
Johannb0ef6ff2018-02-08 14:32:21 -080088// Whether to use high precision mv for altref computation.
89#define ALTREF_HIGH_PRECISION_MV 1
90
91// Q threshold for high precision mv. Choose a very high value for now so that
92// HIGH_PRECISION is always chosen.
93#define HIGH_PRECISION_MV_QTHRESH 200
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -070094
Yaowu Xuc27fc142016-08-22 16:08:15 -070095// #define OUTPUT_YUV_REC
Yaowu Xuc27fc142016-08-22 16:08:15 -070096#ifdef OUTPUT_YUV_SKINMAP
97FILE *yuv_skinmap_file = NULL;
98#endif
99#ifdef OUTPUT_YUV_REC
100FILE *yuv_rec_file;
101#define FILE_NAME_LEN 100
102#endif
103
Debargha Mukherjeedf713102018-10-02 12:33:32 -0700104// Estimate if the source frame is screen content, based on the portion of
105// blocks that have no more than 4 (experimentally selected) luma colors.
106static int is_screen_content(const uint8_t *src, int use_hbd, int bd,
107 int stride, int width, int height) {
108 assert(src != NULL);
109 int counts = 0;
110 const int blk_w = 16;
111 const int blk_h = 16;
112 const int limit = 4;
113 for (int r = 0; r + blk_h <= height; r += blk_h) {
114 for (int c = 0; c + blk_w <= width; c += blk_w) {
115 int count_buf[1 << 12]; // Maximum (1 << 12) color levels.
116 const int n_colors =
117 use_hbd ? av1_count_colors_highbd(src + r * stride + c, stride, blk_w,
118 blk_h, bd, count_buf)
119 : av1_count_colors(src + r * stride + c, stride, blk_w, blk_h,
120 count_buf);
121 if (n_colors > 1 && n_colors <= limit) counts++;
122 }
123 }
124 // The threshold is 10%.
125 return counts * blk_h * blk_w * 10 > width * height;
126}
127
Yaowu Xuf883b422016-08-30 14:01:10 -0700128static INLINE void Scale2Ratio(AOM_SCALING mode, int *hr, int *hs) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700129 switch (mode) {
130 case NORMAL:
131 *hr = 1;
132 *hs = 1;
133 break;
134 case FOURFIVE:
135 *hr = 4;
136 *hs = 5;
137 break;
138 case THREEFIVE:
139 *hr = 3;
140 *hs = 5;
141 break;
142 case ONETWO:
143 *hr = 1;
144 *hs = 2;
145 break;
146 default:
147 *hr = 1;
148 *hs = 1;
149 assert(0);
150 break;
151 }
152}
153
154// Mark all inactive blocks as active. Other segmentation features may be set
155// so memset cannot be used, instead only inactive blocks should be reset.
Yaowu Xuf883b422016-08-30 14:01:10 -0700156static void suppress_active_map(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700157 unsigned char *const seg_map = cpi->segmentation_map;
158 int i;
159 if (cpi->active_map.enabled || cpi->active_map.update)
160 for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
161 if (seg_map[i] == AM_SEGMENT_ID_INACTIVE)
162 seg_map[i] = AM_SEGMENT_ID_ACTIVE;
163}
164
Yaowu Xuf883b422016-08-30 14:01:10 -0700165static void apply_active_map(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700166 struct segmentation *const seg = &cpi->common.seg;
167 unsigned char *const seg_map = cpi->segmentation_map;
168 const unsigned char *const active_map = cpi->active_map.map;
169 int i;
170
171 assert(AM_SEGMENT_ID_ACTIVE == CR_SEGMENT_ID_BASE);
172
173 if (frame_is_intra_only(&cpi->common)) {
174 cpi->active_map.enabled = 0;
175 cpi->active_map.update = 1;
176 }
177
178 if (cpi->active_map.update) {
179 if (cpi->active_map.enabled) {
180 for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
181 if (seg_map[i] == AM_SEGMENT_ID_ACTIVE) seg_map[i] = active_map[i];
Yaowu Xuf883b422016-08-30 14:01:10 -0700182 av1_enable_segmentation(seg);
183 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
Cheng Chend8184da2017-09-26 18:15:22 -0700184 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_H);
185 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_V);
186 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_U);
187 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_V);
188
189 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_H,
190 -MAX_LOOP_FILTER);
191 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_V,
192 -MAX_LOOP_FILTER);
193 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_U,
194 -MAX_LOOP_FILTER);
195 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_V,
196 -MAX_LOOP_FILTER);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700197 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700198 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
Cheng Chend8184da2017-09-26 18:15:22 -0700199 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_H);
200 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_V);
201 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_U);
202 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_V);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700203 if (seg->enabled) {
204 seg->update_data = 1;
205 seg->update_map = 1;
206 }
207 }
208 cpi->active_map.update = 0;
209 }
210}
211
Yaowu Xuf883b422016-08-30 14:01:10 -0700212int av1_set_active_map(AV1_COMP *cpi, unsigned char *new_map_16x16, int rows,
213 int cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700214 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols) {
215 unsigned char *const active_map_8x8 = cpi->active_map.map;
216 const int mi_rows = cpi->common.mi_rows;
217 const int mi_cols = cpi->common.mi_cols;
Jingning Han9d533022017-04-07 10:14:42 -0700218 const int row_scale = mi_size_high[BLOCK_16X16] == 2 ? 1 : 2;
219 const int col_scale = mi_size_wide[BLOCK_16X16] == 2 ? 1 : 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700220 cpi->active_map.update = 1;
221 if (new_map_16x16) {
222 int r, c;
223 for (r = 0; r < mi_rows; ++r) {
224 for (c = 0; c < mi_cols; ++c) {
225 active_map_8x8[r * mi_cols + c] =
Jingning Han9d533022017-04-07 10:14:42 -0700226 new_map_16x16[(r >> row_scale) * cols + (c >> col_scale)]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700227 ? AM_SEGMENT_ID_ACTIVE
228 : AM_SEGMENT_ID_INACTIVE;
229 }
230 }
231 cpi->active_map.enabled = 1;
232 } else {
233 cpi->active_map.enabled = 0;
234 }
235 return 0;
236 } else {
237 return -1;
238 }
239}
240
Yaowu Xuf883b422016-08-30 14:01:10 -0700241int av1_get_active_map(AV1_COMP *cpi, unsigned char *new_map_16x16, int rows,
242 int cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700243 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols &&
244 new_map_16x16) {
245 unsigned char *const seg_map_8x8 = cpi->segmentation_map;
246 const int mi_rows = cpi->common.mi_rows;
247 const int mi_cols = cpi->common.mi_cols;
Jingning Han9d533022017-04-07 10:14:42 -0700248 const int row_scale = mi_size_high[BLOCK_16X16] == 2 ? 1 : 2;
249 const int col_scale = mi_size_wide[BLOCK_16X16] == 2 ? 1 : 2;
250
Yaowu Xuc27fc142016-08-22 16:08:15 -0700251 memset(new_map_16x16, !cpi->active_map.enabled, rows * cols);
252 if (cpi->active_map.enabled) {
253 int r, c;
254 for (r = 0; r < mi_rows; ++r) {
255 for (c = 0; c < mi_cols; ++c) {
256 // Cyclic refresh segments are considered active despite not having
257 // AM_SEGMENT_ID_ACTIVE
Jingning Han9d533022017-04-07 10:14:42 -0700258 new_map_16x16[(r >> row_scale) * cols + (c >> col_scale)] |=
Yaowu Xuc27fc142016-08-22 16:08:15 -0700259 seg_map_8x8[r * mi_cols + c] != AM_SEGMENT_ID_INACTIVE;
260 }
261 }
262 }
263 return 0;
264 } else {
265 return -1;
266 }
267}
268
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800269// Compute the horizontal frequency components' energy in a frame
270// by calculuating the 16x4 Horizontal DCT. This is to be used to
271// decide the superresolution parameters.
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800272void analyze_hor_freq(const AV1_COMP *cpi, double *energy) {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800273 uint64_t freq_energy[16] = { 0 };
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800274 const YV12_BUFFER_CONFIG *buf = cpi->source;
275 const int bd = cpi->td.mb.e_mbd.bd;
276 const int width = buf->y_crop_width;
277 const int height = buf->y_crop_height;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800278 DECLARE_ALIGNED(16, int32_t, coeff[16 * 4]);
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800279 int n = 0;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800280 memset(freq_energy, 0, sizeof(freq_energy));
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800281 if (buf->flags & YV12_FLAG_HIGHBITDEPTH) {
282 const int16_t *src16 = (const int16_t *)CONVERT_TO_SHORTPTR(buf->y_buffer);
283 for (int i = 0; i < height - 4; i += 4) {
284 for (int j = 0; j < width - 16; j += 16) {
285 av1_fwd_txfm2d_16x4(src16 + i * buf->y_stride + j, coeff, buf->y_stride,
286 H_DCT, bd);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800287 for (int k = 1; k < 16; ++k) {
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800288 const uint64_t this_energy =
289 ((int64_t)coeff[k] * coeff[k]) +
290 ((int64_t)coeff[k + 16] * coeff[k + 16]) +
291 ((int64_t)coeff[k + 32] * coeff[k + 32]) +
292 ((int64_t)coeff[k + 48] * coeff[k + 48]);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800293 freq_energy[k] += ROUND_POWER_OF_TWO(this_energy, 2 + 2 * (bd - 8));
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800294 }
295 n++;
296 }
297 }
298 } else {
Debargha Mukherjeeac28c722018-11-14 22:09:46 -0800299 assert(bd == 8);
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800300 DECLARE_ALIGNED(16, int16_t, src16[16 * 4]);
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800301 for (int i = 0; i < height - 4; i += 4) {
302 for (int j = 0; j < width - 16; j += 16) {
303 for (int ii = 0; ii < 4; ++ii)
304 for (int jj = 0; jj < 16; ++jj)
305 src16[ii * 16 + jj] =
306 buf->y_buffer[(i + ii) * buf->y_stride + (j + jj)];
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800307 av1_fwd_txfm2d_16x4(src16, coeff, 16, H_DCT, bd);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800308 for (int k = 1; k < 16; ++k) {
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800309 const uint64_t this_energy =
310 ((int64_t)coeff[k] * coeff[k]) +
311 ((int64_t)coeff[k + 16] * coeff[k + 16]) +
312 ((int64_t)coeff[k + 32] * coeff[k + 32]) +
313 ((int64_t)coeff[k + 48] * coeff[k + 48]);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800314 freq_energy[k] += ROUND_POWER_OF_TWO(this_energy, 2);
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800315 }
316 n++;
317 }
318 }
319 }
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800320 if (n) {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800321 for (int k = 1; k < 16; ++k) energy[k] = (double)freq_energy[k] / n;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800322 // Convert to cumulative energy
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800323 for (int k = 14; k > 0; --k) energy[k] += energy[k + 1];
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800324 } else {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800325 for (int k = 1; k < 16; ++k) energy[k] = 1e+20;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800326 }
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800327}
328
Yaowu Xu45295c32018-03-29 12:06:10 -0700329static void set_high_precision_mv(AV1_COMP *cpi, int allow_high_precision_mv,
330 int cur_frame_force_integer_mv) {
James Zern01a9d702017-08-25 19:09:33 +0000331 MACROBLOCK *const mb = &cpi->td.mb;
Hui Su50361152018-03-02 11:01:42 -0800332 cpi->common.allow_high_precision_mv =
333 allow_high_precision_mv && cur_frame_force_integer_mv == 0;
Rupert Swarbricka84faf22017-12-11 13:56:40 +0000334 const int copy_hp =
335 cpi->common.allow_high_precision_mv && cur_frame_force_integer_mv == 0;
Jingning Hanf050fc12018-03-09 14:53:33 -0800336 int *(*src)[2] = copy_hp ? &mb->nmvcost_hp : &mb->nmvcost;
337 mb->mv_cost_stack = *src;
James Zern01a9d702017-08-25 19:09:33 +0000338}
339
Yaowu Xuf883b422016-08-30 14:01:10 -0700340static BLOCK_SIZE select_sb_size(const AV1_COMP *const cpi) {
Urvang Joshie4530f82018-01-09 11:43:37 -0800341 const AV1_COMMON *const cm = &cpi->common;
342
Yaowu Xuf883b422016-08-30 14:01:10 -0700343 if (cpi->oxcf.superblock_size == AOM_SUPERBLOCK_SIZE_64X64)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700344 return BLOCK_64X64;
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100345#if CONFIG_FILEOPTIONS
Urvang Joshie4530f82018-01-09 11:43:37 -0800346 if (cm->options && cm->options->ext_partition)
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100347#endif
348 if (cpi->oxcf.superblock_size == AOM_SUPERBLOCK_SIZE_128X128)
349 return BLOCK_128X128;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700350
Yaowu Xuf883b422016-08-30 14:01:10 -0700351 assert(cpi->oxcf.superblock_size == AOM_SUPERBLOCK_SIZE_DYNAMIC);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700352
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100353// TODO(any): Possibly could improve this with a heuristic.
354#if CONFIG_FILEOPTIONS
Urvang Joshie4530f82018-01-09 11:43:37 -0800355 if (cm->options && !cm->options->ext_partition) return BLOCK_64X64;
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100356#endif
Urvang Joshie4530f82018-01-09 11:43:37 -0800357
Urvang Joshiaab74432018-06-01 12:06:22 -0700358 // When superres / resize is on, 'cm->width / height' can change between
359 // calls, so we don't apply this heuristic there. Also, this heuristic gives
360 // compression gain for speed >= 2 only.
361 if (cpi->oxcf.superres_mode == SUPERRES_NONE &&
362 cpi->oxcf.resize_mode == RESIZE_NONE && cpi->oxcf.speed >= 2) {
Urvang Joshie4530f82018-01-09 11:43:37 -0800363 return (cm->width >= 480 && cm->height >= 360) ? BLOCK_128X128
364 : BLOCK_64X64;
365 }
366
Yaowu Xuc27fc142016-08-22 16:08:15 -0700367 return BLOCK_128X128;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700368}
369
David Turner99e990e2018-12-10 12:54:26 +0000370static int get_current_frame_ref_type(const AV1_COMP *const cpi) {
371 const AV1_COMMON *const cm = &cpi->common;
372 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
373 // We choose the reference "type" of this frame from the flags which indicate
374 // which reference frames will be refreshed by it. More than one of these
375 // flags may be set, so the order here implies an order of precedence.
376
377 if (frame_is_intra_only(cm) || cm->error_resilient_mode ||
378 cm->force_primary_ref_none)
379 return REGULAR_FRAME;
380 else if (gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE)
381 return EXT_ARF_FRAME;
382 else if (cpi->refresh_alt_ref_frame)
383 return ARF_FRAME;
384 else if (cpi->rc.is_src_frame_alt_ref)
385 return OVERLAY_FRAME;
386 else if (cpi->refresh_golden_frame)
387 return GLD_FRAME;
388 else if (cpi->refresh_bwd_ref_frame)
389 return BRF_FRAME;
390 else
391 return REGULAR_FRAME;
392}
393
Yaowu Xuf883b422016-08-30 14:01:10 -0700394static void setup_frame(AV1_COMP *cpi) {
395 AV1_COMMON *const cm = &cpi->common;
Johannb0ef6ff2018-02-08 14:32:21 -0800396 // Set up entropy context depending on frame type. The decoder mandates
397 // the use of the default context, index 0, for keyframes and inter
398 // frames where the error_resilient_mode or intra_only flag is set. For
399 // other inter-frames the encoder currently uses only two contexts;
400 // context 1 for ALTREF frames and context 0 for the others.
Soo-Chul Han85e8c792018-01-21 01:58:15 -0500401
Thomas Daede51020e12017-12-14 20:12:44 -0800402 cm->primary_ref_frame = PRIMARY_REF_NONE;
Sarah Parker50b6d6e2018-04-11 19:21:54 -0700403 if (frame_is_intra_only(cm) || cm->error_resilient_mode ||
404 cm->force_primary_ref_none) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700405 av1_setup_past_independence(cm);
Thomas Daede51020e12017-12-14 20:12:44 -0800406 for (int i = 0; i < REF_FRAMES; i++) {
David Turner99e990e2018-12-10 12:54:26 +0000407 cpi->fb_of_context_type[i] = -1;
Thomas Daede51020e12017-12-14 20:12:44 -0800408 }
David Turner99e990e2018-12-10 12:54:26 +0000409 cpi->fb_of_context_type[REGULAR_FRAME] =
David Turnera21966b2018-12-05 14:48:49 +0000410 cm->show_frame ? get_ref_frame_map_idx(cm, GOLDEN_FRAME)
411 : get_ref_frame_map_idx(cm, ALTREF_FRAME);
Yunqing Wang19aefd12018-05-14 15:38:57 -0700412 } else {
David Turner99e990e2018-12-10 12:54:26 +0000413 int wanted_fb = cpi->fb_of_context_type[get_current_frame_ref_type(cpi)];
Thomas Daede51020e12017-12-14 20:12:44 -0800414 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
David Turnera21966b2018-12-05 14:48:49 +0000415 int fb = get_ref_frame_map_idx(cm, ref_frame);
Thomas Daede51020e12017-12-14 20:12:44 -0800416 if (fb == wanted_fb) {
417 cm->primary_ref_frame = ref_frame - LAST_FRAME;
418 }
419 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700420 }
421
David Turnerd2a592e2018-11-16 14:59:31 +0000422 if (cm->current_frame.frame_type == KEY_FRAME && cm->show_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700423 cpi->refresh_golden_frame = 1;
424 cpi->refresh_alt_ref_frame = 1;
Yunqing Wang9538e4d2019-01-07 18:28:08 +0000425 av1_zero(cpi->interp_filter_selected);
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000426 set_sb_size(&cm->seq_params, select_sb_size(cpi));
Tarek AMARAc9813852018-03-05 18:40:18 -0500427 } else if (frame_is_sframe(cm)) {
428 cpi->refresh_golden_frame = 1;
429 cpi->refresh_alt_ref_frame = 1;
Yunqing Wang9538e4d2019-01-07 18:28:08 +0000430 av1_zero(cpi->interp_filter_selected);
Tarek AMARAc9813852018-03-05 18:40:18 -0500431 set_sb_size(&cm->seq_params, select_sb_size(cpi));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700432 } else {
David Turnera21966b2018-12-05 14:48:49 +0000433 const RefCntBuffer *const primary_ref_buf = get_primary_ref_frame_buf(cm);
434 if (primary_ref_buf == NULL) {
David Barkercc615a82018-03-19 14:38:51 +0000435 av1_setup_past_independence(cm);
436 cm->seg.update_map = 1;
437 cm->seg.update_data = 1;
Thomas Daededa4d8b92017-06-05 15:44:14 -0700438 } else {
David Turnera21966b2018-12-05 14:48:49 +0000439 *cm->fc = primary_ref_buf->frame_context;
Thomas Daededa4d8b92017-06-05 15:44:14 -0700440 }
Yunqing Wang9538e4d2019-01-07 18:28:08 +0000441 av1_zero(cpi->interp_filter_selected[0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700442 }
443
David Turnera21966b2018-12-05 14:48:49 +0000444 cm->prev_frame = get_primary_ref_frame_buf(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700445 cpi->vaq_refresh = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700446}
447
Cheng Chen46f30c72017-09-07 11:13:33 -0700448static void enc_setup_mi(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700449 int i;
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530450 int mi_rows_sb_aligned = calc_mi_size(cm->mi_rows);
Yunqing Wang19b9f722018-02-20 16:22:01 -0800451 cm->mi = cm->mip;
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530452 memset(cm->mip, 0, cm->mi_stride * mi_rows_sb_aligned * sizeof(*cm->mip));
Yunqing Wang19b9f722018-02-20 16:22:01 -0800453 cm->prev_mi = cm->prev_mip;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700454 // Clear top border row
455 memset(cm->prev_mip, 0, sizeof(*cm->prev_mip) * cm->mi_stride);
456 // Clear left border column
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530457 for (i = 0; i < mi_rows_sb_aligned; ++i)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700458 memset(&cm->prev_mip[i * cm->mi_stride], 0, sizeof(*cm->prev_mip));
Yunqing Wang19b9f722018-02-20 16:22:01 -0800459 cm->mi_grid_visible = cm->mi_grid_base;
460 cm->prev_mi_grid_visible = cm->prev_mi_grid_base;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700461
462 memset(cm->mi_grid_base, 0,
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530463 cm->mi_stride * mi_rows_sb_aligned * sizeof(*cm->mi_grid_base));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700464}
465
Cheng Chen46f30c72017-09-07 11:13:33 -0700466static int enc_alloc_mi(AV1_COMMON *cm, int mi_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700467 cm->mip = aom_calloc(mi_size, sizeof(*cm->mip));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700468 if (!cm->mip) return 1;
Yaowu Xuf883b422016-08-30 14:01:10 -0700469 cm->prev_mip = aom_calloc(mi_size, sizeof(*cm->prev_mip));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700470 if (!cm->prev_mip) return 1;
471 cm->mi_alloc_size = mi_size;
472
Yue Chen53b53f02018-03-29 14:31:23 -0700473 cm->mi_grid_base =
474 (MB_MODE_INFO **)aom_calloc(mi_size, sizeof(MB_MODE_INFO *));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700475 if (!cm->mi_grid_base) return 1;
476 cm->prev_mi_grid_base =
Yue Chen53b53f02018-03-29 14:31:23 -0700477 (MB_MODE_INFO **)aom_calloc(mi_size, sizeof(MB_MODE_INFO *));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700478 if (!cm->prev_mi_grid_base) return 1;
479
480 return 0;
481}
482
Cheng Chen46f30c72017-09-07 11:13:33 -0700483static void enc_free_mi(AV1_COMMON *cm) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700484 aom_free(cm->mip);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700485 cm->mip = NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -0700486 aom_free(cm->prev_mip);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700487 cm->prev_mip = NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -0700488 aom_free(cm->mi_grid_base);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700489 cm->mi_grid_base = NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -0700490 aom_free(cm->prev_mi_grid_base);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700491 cm->prev_mi_grid_base = NULL;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -0700492 cm->mi_alloc_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700493}
494
Cheng Chen46f30c72017-09-07 11:13:33 -0700495static void swap_mi_and_prev_mi(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700496 // Current mip will be the prev_mip for the next frame.
Yue Chen53b53f02018-03-29 14:31:23 -0700497 MB_MODE_INFO **temp_base = cm->prev_mi_grid_base;
498 MB_MODE_INFO *temp = cm->prev_mip;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700499 cm->prev_mip = cm->mip;
500 cm->mip = temp;
501
502 // Update the upper left visible macroblock ptrs.
Yunqing Wang19b9f722018-02-20 16:22:01 -0800503 cm->mi = cm->mip;
504 cm->prev_mi = cm->prev_mip;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700505
506 cm->prev_mi_grid_base = cm->mi_grid_base;
507 cm->mi_grid_base = temp_base;
Yunqing Wang19b9f722018-02-20 16:22:01 -0800508 cm->mi_grid_visible = cm->mi_grid_base;
509 cm->prev_mi_grid_visible = cm->prev_mi_grid_base;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700510}
511
Yaowu Xuf883b422016-08-30 14:01:10 -0700512void av1_initialize_enc(void) {
Wan-Teh Chang3cac4542018-06-29 10:21:39 -0700513 av1_rtcd();
514 aom_dsp_rtcd();
515 aom_scale_rtcd();
516 av1_init_intra_predictors();
517 av1_init_me_luts();
518 av1_rc_init_minq_luts();
519 av1_init_wedge_masks();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700520}
521
Debargha Mukherjeeccb27262017-09-25 14:19:46 -0700522static void dealloc_context_buffers_ext(AV1_COMP *cpi) {
523 if (cpi->mbmi_ext_base) {
524 aom_free(cpi->mbmi_ext_base);
525 cpi->mbmi_ext_base = NULL;
526 }
527}
528
529static void alloc_context_buffers_ext(AV1_COMP *cpi) {
530 AV1_COMMON *cm = &cpi->common;
531 int mi_size = cm->mi_cols * cm->mi_rows;
532
533 dealloc_context_buffers_ext(cpi);
534 CHECK_MEM_ERROR(cm, cpi->mbmi_ext_base,
535 aom_calloc(mi_size, sizeof(*cpi->mbmi_ext_base)));
536}
537
Yaowu Xuc0ea2582019-01-15 10:17:16 -0800538static void reset_film_grain_chroma_params(aom_film_grain_t *pars) {
539 pars->num_cr_points = 0;
540 pars->cr_mult = 0;
541 pars->cr_luma_mult = 0;
542 memset(pars->scaling_points_cr, 0, sizeof(pars->scaling_points_cr));
543 memset(pars->ar_coeffs_cr, 0, sizeof(pars->ar_coeffs_cr));
544 pars->num_cb_points = 0;
545 pars->cb_mult = 0;
546 pars->cb_luma_mult = 0;
547 memset(pars->scaling_points_cb, 0, sizeof(pars->scaling_points_cb));
548 memset(pars->ar_coeffs_cb, 0, sizeof(pars->ar_coeffs_cb));
549}
550
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800551static void update_film_grain_parameters(struct AV1_COMP *cpi,
552 const AV1EncoderConfig *oxcf) {
553 AV1_COMMON *const cm = &cpi->common;
554 cpi->oxcf = *oxcf;
555
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700556 if (cpi->film_grain_table) {
557 aom_film_grain_table_free(cpi->film_grain_table);
558 aom_free(cpi->film_grain_table);
559 cpi->film_grain_table = NULL;
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700560 }
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700561
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800562 if (oxcf->film_grain_test_vector) {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -0700563 cm->seq_params.film_grain_params_present = 1;
David Turnerd2a592e2018-11-16 14:59:31 +0000564 if (cm->current_frame.frame_type == KEY_FRAME) {
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800565 memcpy(&cm->film_grain_params,
566 film_grain_test_vectors + oxcf->film_grain_test_vector - 1,
567 sizeof(cm->film_grain_params));
Yaowu Xuc0ea2582019-01-15 10:17:16 -0800568 if (oxcf->monochrome)
569 reset_film_grain_chroma_params(&cm->film_grain_params);
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700570 cm->film_grain_params.bit_depth = cm->seq_params.bit_depth;
571 if (cm->seq_params.color_range == AOM_CR_FULL_RANGE) {
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800572 cm->film_grain_params.clip_to_restricted_range = 0;
573 }
574 }
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700575 } else if (oxcf->film_grain_table_filename) {
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700576 cpi->film_grain_table = aom_malloc(sizeof(*cpi->film_grain_table));
577 memset(cpi->film_grain_table, 0, sizeof(aom_film_grain_table_t));
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700578
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700579 aom_film_grain_table_read(cpi->film_grain_table,
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700580 oxcf->film_grain_table_filename, &cm->error);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800581 } else {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -0700582 cm->seq_params.film_grain_params_present = 0;
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800583 memset(&cm->film_grain_params, 0, sizeof(cm->film_grain_params));
584 }
585}
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800586
Yaowu Xuf883b422016-08-30 14:01:10 -0700587static void dealloc_compressor_data(AV1_COMP *cpi) {
588 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000589 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700590
Debargha Mukherjeeccb27262017-09-25 14:19:46 -0700591 dealloc_context_buffers_ext(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700592
Yaowu Xuf883b422016-08-30 14:01:10 -0700593 aom_free(cpi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700594 cpi->tile_data = NULL;
595
596 // Delete sementation map
Yaowu Xuf883b422016-08-30 14:01:10 -0700597 aom_free(cpi->segmentation_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700598 cpi->segmentation_map = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700599
Yaowu Xuf883b422016-08-30 14:01:10 -0700600 av1_cyclic_refresh_free(cpi->cyclic_refresh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700601 cpi->cyclic_refresh = NULL;
602
Yaowu Xuf883b422016-08-30 14:01:10 -0700603 aom_free(cpi->active_map.map);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700604 cpi->active_map.map = NULL;
605
Jingning Hand064cf02017-06-01 10:00:39 -0700606 aom_free(cpi->td.mb.above_pred_buf);
607 cpi->td.mb.above_pred_buf = NULL;
608
609 aom_free(cpi->td.mb.left_pred_buf);
610 cpi->td.mb.left_pred_buf = NULL;
611
612 aom_free(cpi->td.mb.wsrc_buf);
613 cpi->td.mb.wsrc_buf = NULL;
614
Ravi Chaudhary5d970f42018-09-25 11:25:32 +0530615#if CONFIG_COLLECT_INTER_MODE_RD_STATS
616 aom_free(cpi->td.mb.inter_modes_info);
617 cpi->td.mb.inter_modes_info = NULL;
618#endif
619
Ravi Chaudhary783d6a32018-08-28 18:21:02 +0530620 for (int i = 0; i < 2; i++)
621 for (int j = 0; j < 2; j++) {
622 aom_free(cpi->td.mb.hash_value_buffer[i][j]);
623 cpi->td.mb.hash_value_buffer[i][j] = NULL;
624 }
Jingning Hand064cf02017-06-01 10:00:39 -0700625 aom_free(cpi->td.mb.mask_buf);
626 cpi->td.mb.mask_buf = NULL;
Jingning Hand064cf02017-06-01 10:00:39 -0700627
Jingning Han6cc1fd32017-10-13 09:05:36 -0700628 aom_free(cm->tpl_mvs);
629 cm->tpl_mvs = NULL;
Jingning Han6cc1fd32017-10-13 09:05:36 -0700630
Yaowu Xuf883b422016-08-30 14:01:10 -0700631 av1_free_ref_frame_buffers(cm->buffer_pool);
Angie Chiangf0fbf9d2017-03-15 15:01:22 -0700632 av1_free_txb_buf(cpi);
Yaowu Xuf883b422016-08-30 14:01:10 -0700633 av1_free_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700634
Yaowu Xuf883b422016-08-30 14:01:10 -0700635 aom_free_frame_buffer(&cpi->last_frame_uf);
Yaowu Xuf883b422016-08-30 14:01:10 -0700636 av1_free_restoration_buffers(cm);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800637 aom_free_frame_buffer(&cpi->trial_frame_rst);
Yaowu Xuf883b422016-08-30 14:01:10 -0700638 aom_free_frame_buffer(&cpi->scaled_source);
639 aom_free_frame_buffer(&cpi->scaled_last_source);
640 aom_free_frame_buffer(&cpi->alt_ref_buffer);
641 av1_lookahead_destroy(cpi->lookahead);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700642
Yaowu Xuf883b422016-08-30 14:01:10 -0700643 aom_free(cpi->tile_tok[0][0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700644 cpi->tile_tok[0][0] = 0;
645
Ravi Chaudhary73cf15b2018-08-30 10:52:51 +0530646 aom_free(cpi->tplist[0][0]);
647 cpi->tplist[0][0] = NULL;
648
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000649 av1_free_pc_tree(&cpi->td, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700650
hui sud9a812b2017-07-06 14:34:37 -0700651 aom_free(cpi->td.mb.palette_buffer);
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700652
Urvang Joshi0a4cfad2018-09-07 11:10:39 -0700653 aom_free(cpi->td.mb.tmp_conv_dst);
654 for (int j = 0; j < 2; ++j) {
655 aom_free(cpi->td.mb.tmp_obmc_bufs[j]);
656 }
657
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700658#if CONFIG_DENOISE
659 if (cpi->denoise_and_model) {
660 aom_denoise_and_model_free(cpi->denoise_and_model);
661 cpi->denoise_and_model = NULL;
662 }
663#endif
664 if (cpi->film_grain_table) {
665 aom_film_grain_table_free(cpi->film_grain_table);
666 cpi->film_grain_table = NULL;
667 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700668}
669
Yaowu Xuf883b422016-08-30 14:01:10 -0700670static void save_coding_context(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700671 CODING_CONTEXT *const cc = &cpi->coding_context;
Yaowu Xuf883b422016-08-30 14:01:10 -0700672 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700673
Sebastien Alaiwane140c502017-04-27 09:52:34 +0200674 // Stores a snapshot of key state variables which can subsequently be
675 // restored with a call to av1_restore_coding_context. These functions are
676 // intended for use in a re-code loop in av1_compress_frame where the
677 // quantizer value is adjusted between loop iterations.
Jingning Hanf050fc12018-03-09 14:53:33 -0800678 av1_copy(cc->nmv_vec_cost, cpi->td.mb.nmv_vec_cost);
679 av1_copy(cc->nmv_costs, cpi->nmv_costs);
680 av1_copy(cc->nmv_costs_hp, cpi->nmv_costs_hp);
James Zern01a9d702017-08-25 19:09:33 +0000681
Yaowu Xuc27fc142016-08-22 16:08:15 -0700682 cc->fc = *cm->fc;
683}
684
Yaowu Xuf883b422016-08-30 14:01:10 -0700685static void restore_coding_context(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700686 CODING_CONTEXT *const cc = &cpi->coding_context;
Yaowu Xuf883b422016-08-30 14:01:10 -0700687 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700688
Sebastien Alaiwane140c502017-04-27 09:52:34 +0200689 // Restore key state variables to the snapshot state stored in the
690 // previous call to av1_save_coding_context.
Jingning Hanf050fc12018-03-09 14:53:33 -0800691 av1_copy(cpi->td.mb.nmv_vec_cost, cc->nmv_vec_cost);
692 av1_copy(cpi->nmv_costs, cc->nmv_costs);
693 av1_copy(cpi->nmv_costs_hp, cc->nmv_costs_hp);
James Zern01a9d702017-08-25 19:09:33 +0000694
Yaowu Xuc27fc142016-08-22 16:08:15 -0700695 *cm->fc = cc->fc;
696}
697
Yaowu Xuf883b422016-08-30 14:01:10 -0700698static void configure_static_seg_features(AV1_COMP *cpi) {
699 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700700 const RATE_CONTROL *const rc = &cpi->rc;
701 struct segmentation *const seg = &cm->seg;
702
703 int high_q = (int)(rc->avg_q > 48.0);
704 int qi_delta;
705
706 // Disable and clear down for KF
David Turnerd2a592e2018-11-16 14:59:31 +0000707 if (cm->current_frame.frame_type == KEY_FRAME) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700708 // Clear down the global segmentation map
709 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
710 seg->update_map = 0;
711 seg->update_data = 0;
712 cpi->static_mb_pct = 0;
713
714 // Disable segmentation
Yaowu Xuf883b422016-08-30 14:01:10 -0700715 av1_disable_segmentation(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700716
717 // Clear down the segment features.
Yaowu Xuf883b422016-08-30 14:01:10 -0700718 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700719 } else if (cpi->refresh_alt_ref_frame) {
720 // If this is an alt ref frame
721 // Clear down the global segmentation map
722 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
723 seg->update_map = 0;
724 seg->update_data = 0;
725 cpi->static_mb_pct = 0;
726
727 // Disable segmentation and individual segment features by default
Yaowu Xuf883b422016-08-30 14:01:10 -0700728 av1_disable_segmentation(seg);
729 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700730
731 // Scan frames from current to arf frame.
732 // This function re-enables segmentation if appropriate.
Yaowu Xuf883b422016-08-30 14:01:10 -0700733 av1_update_mbgraph_stats(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700734
735 // If segmentation was enabled set those features needed for the
736 // arf itself.
737 if (seg->enabled) {
738 seg->update_map = 1;
739 seg->update_data = 1;
740
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700741 qi_delta = av1_compute_qdelta(rc, rc->avg_q, rc->avg_q * 0.875,
742 cm->seq_params.bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700743 av1_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta - 2);
Cheng Chend8184da2017-09-26 18:15:22 -0700744 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_H, -2);
745 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_V, -2);
746 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_U, -2);
747 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_V, -2);
748
749 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_H);
750 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_V);
751 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_U);
752 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_V);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700753
Yaowu Xuf883b422016-08-30 14:01:10 -0700754 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700755 }
756 } else if (seg->enabled) {
757 // All other frames if segmentation has been enabled
758
759 // First normal frame in a valid gf or alt ref group
760 if (rc->frames_since_golden == 0) {
761 // Set up segment features for normal frames in an arf group
762 if (rc->source_alt_ref_active) {
763 seg->update_map = 0;
764 seg->update_data = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700765
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700766 qi_delta = av1_compute_qdelta(rc, rc->avg_q, rc->avg_q * 1.125,
767 cm->seq_params.bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700768 av1_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta + 2);
769 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700770
Cheng Chend8184da2017-09-26 18:15:22 -0700771 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_H, -2);
772 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_V, -2);
773 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_U, -2);
774 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_V, -2);
775
776 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_H);
777 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_V);
778 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_U);
779 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_V);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700780
781 // Segment coding disabled for compred testing
782 if (high_q || (cpi->static_mb_pct == 100)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700783 av1_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
784 av1_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
785 av1_enable_segfeature(seg, 1, SEG_LVL_SKIP);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700786 }
787 } else {
788 // Disable segmentation and clear down features if alt ref
789 // is not active for this group
790
Yaowu Xuf883b422016-08-30 14:01:10 -0700791 av1_disable_segmentation(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700792
793 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
794
795 seg->update_map = 0;
796 seg->update_data = 0;
797
Yaowu Xuf883b422016-08-30 14:01:10 -0700798 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700799 }
800 } else if (rc->is_src_frame_alt_ref) {
801 // Special case where we are coding over the top of a previous
802 // alt ref frame.
803 // Segment coding disabled for compred testing
804
805 // Enable ref frame features for segment 0 as well
Yaowu Xuf883b422016-08-30 14:01:10 -0700806 av1_enable_segfeature(seg, 0, SEG_LVL_REF_FRAME);
807 av1_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700808
809 // All mbs should use ALTREF_FRAME
Yaowu Xuf883b422016-08-30 14:01:10 -0700810 av1_clear_segdata(seg, 0, SEG_LVL_REF_FRAME);
811 av1_set_segdata(seg, 0, SEG_LVL_REF_FRAME, ALTREF_FRAME);
812 av1_clear_segdata(seg, 1, SEG_LVL_REF_FRAME);
813 av1_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700814
815 // Skip all MBs if high Q (0,0 mv and skip coeffs)
816 if (high_q) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700817 av1_enable_segfeature(seg, 0, SEG_LVL_SKIP);
818 av1_enable_segfeature(seg, 1, SEG_LVL_SKIP);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700819 }
820 // Enable data update
821 seg->update_data = 1;
822 } else {
823 // All other frames.
824
825 // No updates.. leave things as they are.
826 seg->update_map = 0;
827 seg->update_data = 0;
828 }
829 }
830}
831
Yaowu Xuf883b422016-08-30 14:01:10 -0700832static void update_reference_segmentation_map(AV1_COMP *cpi) {
833 AV1_COMMON *const cm = &cpi->common;
Yushin Choa7f65922018-04-04 16:06:11 -0700834 MB_MODE_INFO **mi_4x4_ptr = cm->mi_grid_visible;
David Turnerb757ce02018-11-12 15:01:28 +0000835 uint8_t *cache_ptr = cm->cur_frame->seg_map;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700836 int row, col;
837
838 for (row = 0; row < cm->mi_rows; row++) {
Yushin Choa7f65922018-04-04 16:06:11 -0700839 MB_MODE_INFO **mi_4x4 = mi_4x4_ptr;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700840 uint8_t *cache = cache_ptr;
Yushin Choa7f65922018-04-04 16:06:11 -0700841 for (col = 0; col < cm->mi_cols; col++, mi_4x4++, cache++)
842 cache[0] = mi_4x4[0]->segment_id;
843 mi_4x4_ptr += cm->mi_stride;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700844 cache_ptr += cm->mi_cols;
845 }
846}
847
Yaowu Xuf883b422016-08-30 14:01:10 -0700848static void alloc_raw_frame_buffers(AV1_COMP *cpi) {
849 AV1_COMMON *cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700850 const SequenceHeader *const seq_params = &cm->seq_params;
Yaowu Xuf883b422016-08-30 14:01:10 -0700851 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700852
853 if (!cpi->lookahead)
Satish Kumar Suman29909962019-01-09 10:31:21 +0530854 cpi->lookahead = av1_lookahead_init(
855 oxcf->width, oxcf->height, seq_params->subsampling_x,
856 seq_params->subsampling_y, seq_params->use_highbitdepth,
857 oxcf->lag_in_frames, oxcf->border_in_pixels);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700858 if (!cpi->lookahead)
Yaowu Xuf883b422016-08-30 14:01:10 -0700859 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700860 "Failed to allocate lag buffers");
861
862 // TODO(agrange) Check if ARF is enabled and skip allocation if not.
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700863 if (aom_realloc_frame_buffer(
864 &cpi->alt_ref_buffer, oxcf->width, oxcf->height,
865 seq_params->subsampling_x, seq_params->subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530866 seq_params->use_highbitdepth, oxcf->border_in_pixels,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700867 cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700868 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700869 "Failed to allocate altref buffer");
870}
871
Yaowu Xuf883b422016-08-30 14:01:10 -0700872static void alloc_util_frame_buffers(AV1_COMP *cpi) {
873 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700874 const SequenceHeader *const seq_params = &cm->seq_params;
875 if (aom_realloc_frame_buffer(
876 &cpi->last_frame_uf, cm->width, cm->height, seq_params->subsampling_x,
877 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530878 cpi->oxcf.border_in_pixels, cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700879 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700880 "Failed to allocate last frame buffer");
881
Fergus Simpson9cd57cf2017-06-12 17:02:03 -0700882 if (aom_realloc_frame_buffer(
Debargha Mukherjee3a4959f2018-02-26 15:34:03 -0800883 &cpi->trial_frame_rst, cm->superres_upscaled_width,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700884 cm->superres_upscaled_height, seq_params->subsampling_x,
885 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman3b12c002018-12-19 15:27:20 +0530886 AOM_RESTORATION_FRAME_BORDER, cm->byte_alignment, NULL, NULL, NULL))
Debargha Mukherjee874d36d2016-12-14 16:53:17 -0800887 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800888 "Failed to allocate trial restored frame buffer");
Yaowu Xuc27fc142016-08-22 16:08:15 -0700889
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700890 if (aom_realloc_frame_buffer(
891 &cpi->scaled_source, cm->width, cm->height, seq_params->subsampling_x,
892 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530893 cpi->oxcf.border_in_pixels, cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700894 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700895 "Failed to allocate scaled source buffer");
896
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700897 if (aom_realloc_frame_buffer(
898 &cpi->scaled_last_source, cm->width, cm->height,
899 seq_params->subsampling_x, seq_params->subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530900 seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700901 cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700902 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700903 "Failed to allocate scaled last source buffer");
904}
905
Cheng Chen46f30c72017-09-07 11:13:33 -0700906static void alloc_compressor_data(AV1_COMP *cpi) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700907 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000908 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700909
Yaowu Xuf883b422016-08-30 14:01:10 -0700910 av1_alloc_context_buffers(cm, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700911
Ravi Chaudhary73cf15b2018-08-30 10:52:51 +0530912 int mi_rows_aligned_to_sb =
913 ALIGN_POWER_OF_TWO(cm->mi_rows, cm->seq_params.mib_size_log2);
914 int sb_rows = mi_rows_aligned_to_sb >> cm->seq_params.mib_size_log2;
915
Angie Chiangf0fbf9d2017-03-15 15:01:22 -0700916 av1_alloc_txb_buf(cpi);
Angie Chiangf0fbf9d2017-03-15 15:01:22 -0700917
Yaowu Xuc27fc142016-08-22 16:08:15 -0700918 alloc_context_buffers_ext(cpi);
919
Yaowu Xuf883b422016-08-30 14:01:10 -0700920 aom_free(cpi->tile_tok[0][0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700921
922 {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000923 unsigned int tokens =
924 get_token_alloc(cm->mb_rows, cm->mb_cols, MAX_SB_SIZE_LOG2, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700925 CHECK_MEM_ERROR(cm, cpi->tile_tok[0][0],
Yaowu Xuf883b422016-08-30 14:01:10 -0700926 aom_calloc(tokens, sizeof(*cpi->tile_tok[0][0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700927 }
Ravi Chaudhary73cf15b2018-08-30 10:52:51 +0530928 aom_free(cpi->tplist[0][0]);
929
930 CHECK_MEM_ERROR(cm, cpi->tplist[0][0],
931 aom_calloc(sb_rows * MAX_TILE_ROWS * MAX_TILE_COLS,
932 sizeof(*cpi->tplist[0][0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700933
Yaowu Xuf883b422016-08-30 14:01:10 -0700934 av1_setup_pc_tree(&cpi->common, &cpi->td);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700935}
936
Yaowu Xuf883b422016-08-30 14:01:10 -0700937void av1_new_framerate(AV1_COMP *cpi, double framerate) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700938 cpi->framerate = framerate < 0.1 ? 30 : framerate;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700939 av1_rc_update_framerate(cpi, cpi->common.width, cpi->common.height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700940}
941
Yunqing Wang75e20e82018-06-16 12:10:48 -0700942static void set_tile_info(AV1_COMP *cpi) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200943 AV1_COMMON *const cm = &cpi->common;
Dominic Symesf58f1112017-09-25 12:47:40 +0200944 int i, start_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200945
946 av1_get_tile_limits(cm);
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200947
948 // configure tile columns
Dominic Symes26ad0b22017-10-01 16:35:13 +0200949 if (cpi->oxcf.tile_width_count == 0 || cpi->oxcf.tile_height_count == 0) {
Dominic Symesf58f1112017-09-25 12:47:40 +0200950 cm->uniform_tile_spacing_flag = 1;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200951 cm->log2_tile_cols = AOMMAX(cpi->oxcf.tile_columns, cm->min_log2_tile_cols);
952 cm->log2_tile_cols = AOMMIN(cm->log2_tile_cols, cm->max_log2_tile_cols);
Dominic Symesf58f1112017-09-25 12:47:40 +0200953 } else {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000954 int mi_cols = ALIGN_POWER_OF_TWO(cm->mi_cols, cm->seq_params.mib_size_log2);
955 int sb_cols = mi_cols >> cm->seq_params.mib_size_log2;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200956 int size_sb, j = 0;
Dominic Symesf58f1112017-09-25 12:47:40 +0200957 cm->uniform_tile_spacing_flag = 0;
958 for (i = 0, start_sb = 0; start_sb < sb_cols && i < MAX_TILE_COLS; i++) {
959 cm->tile_col_start_sb[i] = start_sb;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200960 size_sb = cpi->oxcf.tile_widths[j++];
961 if (j >= cpi->oxcf.tile_width_count) j = 0;
David Barker6cd5a822018-03-05 16:19:28 +0000962 start_sb += AOMMIN(size_sb, cm->max_tile_width_sb);
Dominic Symesf58f1112017-09-25 12:47:40 +0200963 }
964 cm->tile_cols = i;
965 cm->tile_col_start_sb[i] = sb_cols;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200966 }
967 av1_calculate_tile_cols(cm);
968
969 // configure tile rows
970 if (cm->uniform_tile_spacing_flag) {
971 cm->log2_tile_rows = AOMMAX(cpi->oxcf.tile_rows, cm->min_log2_tile_rows);
972 cm->log2_tile_rows = AOMMIN(cm->log2_tile_rows, cm->max_log2_tile_rows);
Dominic Symesf58f1112017-09-25 12:47:40 +0200973 } else {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000974 int mi_rows = ALIGN_POWER_OF_TWO(cm->mi_rows, cm->seq_params.mib_size_log2);
975 int sb_rows = mi_rows >> cm->seq_params.mib_size_log2;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200976 int size_sb, j = 0;
Dominic Symesf58f1112017-09-25 12:47:40 +0200977 for (i = 0, start_sb = 0; start_sb < sb_rows && i < MAX_TILE_ROWS; i++) {
978 cm->tile_row_start_sb[i] = start_sb;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200979 size_sb = cpi->oxcf.tile_heights[j++];
980 if (j >= cpi->oxcf.tile_height_count) j = 0;
981 start_sb += AOMMIN(size_sb, cm->max_tile_height_sb);
Dominic Symesf58f1112017-09-25 12:47:40 +0200982 }
983 cm->tile_rows = i;
984 cm->tile_row_start_sb[i] = sb_rows;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200985 }
986 av1_calculate_tile_rows(cm);
987}
988
Yaowu Xuf883b422016-08-30 14:01:10 -0700989static void update_frame_size(AV1_COMP *cpi) {
990 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700991 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
992
Yaowu Xuf883b422016-08-30 14:01:10 -0700993 av1_set_mb_mi(cm, cm->width, cm->height);
994 av1_init_context_buffers(cm);
Luc Trudeau1e84af52017-11-25 15:00:28 -0500995 av1_init_macroblockd(cm, xd, NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700996 memset(cpi->mbmi_ext_base, 0,
997 cm->mi_rows * cm->mi_cols * sizeof(*cpi->mbmi_ext_base));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700998 set_tile_info(cpi);
999}
1000
Yaowu Xuf883b422016-08-30 14:01:10 -07001001static void init_buffer_indices(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001002 int fb_idx;
Zoe Liu5989a722018-03-29 13:37:36 -07001003 for (fb_idx = 0; fb_idx < REF_FRAMES; ++fb_idx)
David Turnera21966b2018-12-05 14:48:49 +00001004 cpi->common.remapped_ref_idx[fb_idx] = fb_idx;
RogerZhou3b635242017-09-19 10:06:46 -07001005 cpi->rate_index = 0;
1006 cpi->rate_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001007}
1008
Debargha Mukherjee57498692018-05-11 13:29:31 -07001009static INLINE int does_level_match(int width, int height, double fps,
1010 int lvl_width, int lvl_height,
1011 double lvl_fps, int lvl_dim_mult) {
1012 const int64_t lvl_luma_pels = lvl_width * lvl_height;
1013 const double lvl_display_sample_rate = lvl_luma_pels * lvl_fps;
1014 const int64_t luma_pels = width * height;
1015 const double display_sample_rate = luma_pels * fps;
1016 return luma_pels <= lvl_luma_pels &&
1017 display_sample_rate <= lvl_display_sample_rate &&
1018 width <= lvl_width * lvl_dim_mult &&
1019 height <= lvl_height * lvl_dim_mult;
1020}
1021
Andrey Norkin26495512018-06-20 17:13:11 -07001022static void set_bitstream_level_tier(SequenceHeader *seq, AV1_COMMON *cm,
Andrey Norkinf481d982018-05-15 12:05:31 -07001023 const AV1EncoderConfig *oxcf) {
Debargha Mukherjee57498692018-05-11 13:29:31 -07001024 // TODO(any): This is a placeholder function that only addresses dimensions
1025 // and max display sample rates.
1026 // Need to add checks for max bit rate, max decoded luma sample rate, header
1027 // rate, etc. that are not covered by this function.
Debargha Mukherjeeea675402018-05-10 16:10:41 -07001028 (void)oxcf;
Debargha Mukherjee57498692018-05-11 13:29:31 -07001029 BitstreamLevel bl = { 9, 3 };
1030 if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate, 512,
1031 288, 30.0, 4)) {
1032 bl.major = 2;
1033 bl.minor = 0;
1034 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1035 704, 396, 30.0, 4)) {
1036 bl.major = 2;
1037 bl.minor = 1;
1038 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1039 1088, 612, 30.0, 4)) {
1040 bl.major = 3;
1041 bl.minor = 0;
1042 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1043 1376, 774, 30.0, 4)) {
1044 bl.major = 3;
1045 bl.minor = 1;
1046 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1047 2048, 1152, 30.0, 3)) {
1048 bl.major = 4;
1049 bl.minor = 0;
1050 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1051 2048, 1152, 60.0, 3)) {
1052 bl.major = 4;
1053 bl.minor = 1;
1054 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1055 4096, 2176, 30.0, 2)) {
1056 bl.major = 5;
1057 bl.minor = 0;
1058 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1059 4096, 2176, 60.0, 2)) {
1060 bl.major = 5;
1061 bl.minor = 1;
1062 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1063 4096, 2176, 120.0, 2)) {
1064 bl.major = 5;
1065 bl.minor = 2;
1066 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1067 8192, 4352, 30.0, 2)) {
1068 bl.major = 6;
1069 bl.minor = 0;
1070 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1071 8192, 4352, 60.0, 2)) {
1072 bl.major = 6;
1073 bl.minor = 1;
1074 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1075 8192, 4352, 120.0, 2)) {
1076 bl.major = 6;
1077 bl.minor = 2;
1078 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1079 16384, 8704, 30.0, 2)) {
1080 bl.major = 7;
1081 bl.minor = 0;
1082 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1083 16384, 8704, 60.0, 2)) {
1084 bl.major = 7;
1085 bl.minor = 1;
1086 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1087 16384, 8704, 120.0, 2)) {
1088 bl.major = 7;
1089 bl.minor = 2;
1090 }
Debargha Mukherjeeea675402018-05-10 16:10:41 -07001091 for (int i = 0; i < MAX_NUM_OPERATING_POINTS; ++i) {
Debargha Mukherjee57498692018-05-11 13:29:31 -07001092 seq->level[i] = bl;
Andrey Norkinf481d982018-05-15 12:05:31 -07001093 seq->tier[i] = 0; // setting main tier by default
Andrey Norkin26495512018-06-20 17:13:11 -07001094 // Set the maximum parameters for bitrate and buffer size for this profile,
1095 // level, and tier
1096 cm->op_params[i].bitrate = max_level_bitrate(
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001097 cm->seq_params.profile, major_minor_to_seq_level_idx(seq->level[i]),
1098 seq->tier[i]);
Andrey Norkinc7511de2018-06-22 12:31:06 -07001099 // Level with seq_level_idx = 31 returns a high "dummy" bitrate to pass the
1100 // check
Andrey Norkin26495512018-06-20 17:13:11 -07001101 if (cm->op_params[i].bitrate == 0)
1102 aom_internal_error(
1103 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1104 "AV1 does not support this combination of profile, level, and tier.");
Andrey Norkinc7511de2018-06-22 12:31:06 -07001105 // Buffer size in bits/s is bitrate in bits/s * 1 s
Andrey Norkin26495512018-06-20 17:13:11 -07001106 cm->op_params[i].buffer_size = cm->op_params[i].bitrate;
Debargha Mukherjeeea675402018-05-10 16:10:41 -07001107 }
1108}
1109
Andrey Norkin26495512018-06-20 17:13:11 -07001110static void init_seq_coding_tools(SequenceHeader *seq, AV1_COMMON *cm,
1111 const AV1EncoderConfig *oxcf) {
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001112 seq->still_picture = (oxcf->limit == 1);
1113 seq->reduced_still_picture_hdr = seq->still_picture;
Debargha Mukherjee9713ccb2018-04-08 19:09:17 -07001114 seq->reduced_still_picture_hdr &= !oxcf->full_still_picture_hdr;
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001115 seq->force_screen_content_tools = 2;
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001116 seq->force_integer_mv = 2;
David Turnerebf96f42018-11-14 16:57:57 +00001117 seq->order_hint_info.enable_order_hint = oxcf->enable_order_hint;
David Turner936235c2018-11-28 13:42:01 +00001118 seq->frame_id_numbers_present_flag =
1119 !(seq->still_picture && seq->reduced_still_picture_hdr) &&
1120 !oxcf->large_scale_tile && oxcf->error_resilient_mode;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001121 if (seq->still_picture && seq->reduced_still_picture_hdr) {
David Turnerebf96f42018-11-14 16:57:57 +00001122 seq->order_hint_info.enable_order_hint = 0;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001123 seq->force_screen_content_tools = 2;
1124 seq->force_integer_mv = 2;
1125 }
David Turnerebf96f42018-11-14 16:57:57 +00001126 seq->order_hint_info.order_hint_bits_minus_1 =
1127 seq->order_hint_info.enable_order_hint
1128 ? DEFAULT_EXPLICIT_ORDER_HINT_BITS - 1
1129 : -1;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001130
David Turner760a2f42018-12-07 15:25:36 +00001131 seq->max_frame_width =
1132 oxcf->forced_max_frame_width ? oxcf->forced_max_frame_width : oxcf->width;
1133 seq->max_frame_height = oxcf->forced_max_frame_height
1134 ? oxcf->forced_max_frame_height
1135 : oxcf->height;
1136 seq->num_bits_width =
1137 (seq->max_frame_width > 1) ? get_msb(seq->max_frame_width - 1) + 1 : 1;
1138 seq->num_bits_height =
1139 (seq->max_frame_height > 1) ? get_msb(seq->max_frame_height - 1) + 1 : 1;
1140 assert(seq->num_bits_width <= 16);
1141 assert(seq->num_bits_height <= 16);
1142
1143 seq->frame_id_length = FRAME_ID_LENGTH;
1144 seq->delta_frame_id_length = DELTA_FRAME_ID_LENGTH;
1145
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001146 seq->enable_dual_filter = oxcf->enable_dual_filter;
Debargha Mukherjee7ac3eb12018-12-12 10:26:50 -08001147 seq->order_hint_info.enable_dist_wtd_comp = oxcf->enable_dist_wtd_comp;
1148 seq->order_hint_info.enable_dist_wtd_comp &=
David Turnerebf96f42018-11-14 16:57:57 +00001149 seq->order_hint_info.enable_order_hint;
1150 seq->order_hint_info.enable_ref_frame_mvs = oxcf->enable_ref_frame_mvs;
1151 seq->order_hint_info.enable_ref_frame_mvs &=
1152 seq->order_hint_info.enable_order_hint;
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001153 seq->enable_superres = oxcf->enable_superres;
1154 seq->enable_cdef = oxcf->enable_cdef;
1155 seq->enable_restoration = oxcf->enable_restoration;
Debargha Mukherjee37df9162018-03-25 12:48:24 -07001156 seq->enable_warped_motion = oxcf->enable_warped_motion;
Debargha Mukherjee16ea6ba2018-12-10 12:01:38 -08001157 seq->enable_interintra_compound = oxcf->enable_interintra_comp;
1158 seq->enable_masked_compound = oxcf->enable_masked_comp;
Debargha Mukherjee03c43ba2018-12-14 13:08:08 -08001159 seq->enable_intra_edge_filter = oxcf->enable_intra_edge_filter;
Yue Chen8f9ca582018-12-12 15:11:47 -08001160 seq->enable_filter_intra = oxcf->enable_filter_intra;
Debargha Mukherjee57498692018-05-11 13:29:31 -07001161
Andrey Norkin26495512018-06-20 17:13:11 -07001162 set_bitstream_level_tier(seq, cm, oxcf);
Adrian Grangec56f6ec2018-05-31 14:19:32 -07001163
1164 if (seq->operating_points_cnt_minus_1 == 0) {
1165 seq->operating_point_idc[0] = 0;
1166 } else {
1167 // Set operating_point_idc[] such that for the i-th operating point the
1168 // first (operating_points_cnt-i) spatial layers and the first temporal
1169 // layer are decoded Note that highest quality operating point should come
1170 // first
1171 for (int i = 0; i < seq->operating_points_cnt_minus_1 + 1; i++)
1172 seq->operating_point_idc[i] =
1173 (~(~0u << (seq->operating_points_cnt_minus_1 + 1 - i)) << 8) | 1;
1174 }
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001175}
1176
Yaowu Xuf883b422016-08-30 14:01:10 -07001177static void init_config(struct AV1_COMP *cpi, AV1EncoderConfig *oxcf) {
1178 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001179
1180 cpi->oxcf = *oxcf;
1181 cpi->framerate = oxcf->init_framerate;
1182
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001183 cm->seq_params.profile = oxcf->profile;
1184 cm->seq_params.bit_depth = oxcf->bit_depth;
1185 cm->seq_params.use_highbitdepth = oxcf->use_highbitdepth;
1186 cm->seq_params.color_primaries = oxcf->color_primaries;
1187 cm->seq_params.transfer_characteristics = oxcf->transfer_characteristics;
1188 cm->seq_params.matrix_coefficients = oxcf->matrix_coefficients;
Debargha Mukherjeef340fec2018-01-10 18:12:22 -08001189 cm->seq_params.monochrome = oxcf->monochrome;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001190 cm->seq_params.chroma_sample_position = oxcf->chroma_sample_position;
1191 cm->seq_params.color_range = oxcf->color_range;
Andrey Norkin28e9ce22018-01-08 10:11:21 -08001192 cm->timing_info_present = oxcf->timing_info_present;
Andrey Norkin795ba872018-03-06 13:24:14 -08001193 cm->timing_info.num_units_in_display_tick =
1194 oxcf->timing_info.num_units_in_display_tick;
1195 cm->timing_info.time_scale = oxcf->timing_info.time_scale;
1196 cm->timing_info.equal_picture_interval =
1197 oxcf->timing_info.equal_picture_interval;
1198 cm->timing_info.num_ticks_per_picture =
1199 oxcf->timing_info.num_ticks_per_picture;
1200
Andrey Norkin26495512018-06-20 17:13:11 -07001201 cm->seq_params.display_model_info_present_flag =
1202 oxcf->display_model_info_present_flag;
Adrian Grangec56f6ec2018-05-31 14:19:32 -07001203 cm->seq_params.decoder_model_info_present_flag =
1204 oxcf->decoder_model_info_present_flag;
Andrey Norkin795ba872018-03-06 13:24:14 -08001205 if (oxcf->decoder_model_info_present_flag) {
Andrey Norkin26495512018-06-20 17:13:11 -07001206 // set the decoder model parameters in schedule mode
Andrey Norkin795ba872018-03-06 13:24:14 -08001207 cm->buffer_model.num_units_in_decoding_tick =
1208 oxcf->buffer_model.num_units_in_decoding_tick;
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07001209 cm->buffer_removal_time_present = 1;
Andrey Norkin795ba872018-03-06 13:24:14 -08001210 set_aom_dec_model_info(&cm->buffer_model);
Andrey Norkin26495512018-06-20 17:13:11 -07001211 set_dec_model_op_parameters(&cm->op_params[0]);
1212 } else if (cm->timing_info_present &&
1213 cm->timing_info.equal_picture_interval &&
1214 !cm->seq_params.decoder_model_info_present_flag) {
1215 // set the decoder model parameters in resource availability mode
1216 set_resource_availability_parameters(&cm->op_params[0]);
Andrey Norkinc7511de2018-06-22 12:31:06 -07001217 } else {
1218 cm->op_params[0].initial_display_delay =
1219 10; // Default value (not signaled)
Andrey Norkin795ba872018-03-06 13:24:14 -08001220 }
Andrey Norkinc7511de2018-06-22 12:31:06 -07001221
Tom Fineganf8d6a162018-08-21 10:47:55 -07001222 if (cm->seq_params.monochrome) {
1223 cm->seq_params.subsampling_x = 1;
1224 cm->seq_params.subsampling_y = 1;
1225 } else if (cm->seq_params.color_primaries == AOM_CICP_CP_BT_709 &&
1226 cm->seq_params.transfer_characteristics == AOM_CICP_TC_SRGB &&
1227 cm->seq_params.matrix_coefficients == AOM_CICP_MC_IDENTITY) {
1228 cm->seq_params.subsampling_x = 0;
1229 cm->seq_params.subsampling_y = 0;
1230 } else {
1231 if (cm->seq_params.profile == 0) {
1232 cm->seq_params.subsampling_x = 1;
1233 cm->seq_params.subsampling_y = 1;
1234 } else if (cm->seq_params.profile == 1) {
1235 cm->seq_params.subsampling_x = 0;
1236 cm->seq_params.subsampling_y = 0;
1237 } else {
1238 if (cm->seq_params.bit_depth == AOM_BITS_12) {
1239 cm->seq_params.subsampling_x = oxcf->chroma_subsampling_x;
1240 cm->seq_params.subsampling_y = oxcf->chroma_subsampling_y;
1241 } else {
1242 cm->seq_params.subsampling_x = 1;
1243 cm->seq_params.subsampling_y = 0;
1244 }
1245 }
Tom Finegan02b2a842018-08-24 13:50:00 -07001246 }
1247
Yaowu Xuc27fc142016-08-22 16:08:15 -07001248 cm->width = oxcf->width;
1249 cm->height = oxcf->height;
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001250 set_sb_size(&cm->seq_params,
1251 select_sb_size(cpi)); // set sb size before allocations
Cheng Chen46f30c72017-09-07 11:13:33 -07001252 alloc_compressor_data(cpi);
Yaowu Xuc7119a72018-03-29 09:59:37 -07001253
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08001254 update_film_grain_parameters(cpi, oxcf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001255
1256 // Single thread case: use counts in common.
Yue Chencc6a6ef2018-05-21 16:21:05 -07001257 cpi->td.counts = &cpi->counts;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001258
1259 // change includes all joint functionality
Yaowu Xuf883b422016-08-30 14:01:10 -07001260 av1_change_config(cpi, oxcf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001261
1262 cpi->static_mb_pct = 0;
1263 cpi->ref_frame_flags = 0;
1264
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07001265 // Reset resize pending flags
1266 cpi->resize_pending_width = 0;
1267 cpi->resize_pending_height = 0;
1268
Yaowu Xuc27fc142016-08-22 16:08:15 -07001269 init_buffer_indices(cpi);
1270}
1271
1272static void set_rc_buffer_sizes(RATE_CONTROL *rc,
Yaowu Xuf883b422016-08-30 14:01:10 -07001273 const AV1EncoderConfig *oxcf) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001274 const int64_t bandwidth = oxcf->target_bandwidth;
1275 const int64_t starting = oxcf->starting_buffer_level_ms;
1276 const int64_t optimal = oxcf->optimal_buffer_level_ms;
1277 const int64_t maximum = oxcf->maximum_buffer_size_ms;
1278
1279 rc->starting_buffer_level = starting * bandwidth / 1000;
1280 rc->optimal_buffer_level =
1281 (optimal == 0) ? bandwidth / 8 : optimal * bandwidth / 1000;
1282 rc->maximum_buffer_size =
1283 (maximum == 0) ? bandwidth / 8 : maximum * bandwidth / 1000;
1284}
1285
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001286#define HIGHBD_BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX4DF, JSDAF, JSVAF) \
1287 cpi->fn_ptr[BT].sdf = SDF; \
1288 cpi->fn_ptr[BT].sdaf = SDAF; \
1289 cpi->fn_ptr[BT].vf = VF; \
1290 cpi->fn_ptr[BT].svf = SVF; \
1291 cpi->fn_ptr[BT].svaf = SVAF; \
1292 cpi->fn_ptr[BT].sdx4df = SDX4DF; \
1293 cpi->fn_ptr[BT].jsdaf = JSDAF; \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001294 cpi->fn_ptr[BT].jsvaf = JSVAF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001295
1296#define MAKE_BFP_SAD_WRAPPER(fnname) \
1297 static unsigned int fnname##_bits8(const uint8_t *src_ptr, \
1298 int source_stride, \
1299 const uint8_t *ref_ptr, int ref_stride) { \
1300 return fnname(src_ptr, source_stride, ref_ptr, ref_stride); \
1301 } \
1302 static unsigned int fnname##_bits10( \
1303 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1304 int ref_stride) { \
1305 return fnname(src_ptr, source_stride, ref_ptr, ref_stride) >> 2; \
1306 } \
1307 static unsigned int fnname##_bits12( \
1308 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1309 int ref_stride) { \
1310 return fnname(src_ptr, source_stride, ref_ptr, ref_stride) >> 4; \
1311 }
1312
1313#define MAKE_BFP_SADAVG_WRAPPER(fnname) \
1314 static unsigned int fnname##_bits8( \
1315 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1316 int ref_stride, const uint8_t *second_pred) { \
1317 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred); \
1318 } \
1319 static unsigned int fnname##_bits10( \
1320 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1321 int ref_stride, const uint8_t *second_pred) { \
1322 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred) >> \
1323 2; \
1324 } \
1325 static unsigned int fnname##_bits12( \
1326 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1327 int ref_stride, const uint8_t *second_pred) { \
1328 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred) >> \
1329 4; \
1330 }
1331
Yaowu Xuc27fc142016-08-22 16:08:15 -07001332#define MAKE_BFP_SAD4D_WRAPPER(fnname) \
1333 static void fnname##_bits8(const uint8_t *src_ptr, int source_stride, \
1334 const uint8_t *const ref_ptr[], int ref_stride, \
1335 unsigned int *sad_array) { \
1336 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1337 } \
1338 static void fnname##_bits10(const uint8_t *src_ptr, int source_stride, \
1339 const uint8_t *const ref_ptr[], int ref_stride, \
1340 unsigned int *sad_array) { \
1341 int i; \
1342 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1343 for (i = 0; i < 4; i++) sad_array[i] >>= 2; \
1344 } \
1345 static void fnname##_bits12(const uint8_t *src_ptr, int source_stride, \
1346 const uint8_t *const ref_ptr[], int ref_stride, \
1347 unsigned int *sad_array) { \
1348 int i; \
1349 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1350 for (i = 0; i < 4; i++) sad_array[i] >>= 4; \
1351 }
1352
Cheng Chenbf3d4962017-11-01 14:48:52 -07001353#define MAKE_BFP_JSADAVG_WRAPPER(fnname) \
1354 static unsigned int fnname##_bits8( \
1355 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1356 int ref_stride, const uint8_t *second_pred, \
Debargha Mukherjeef90004a2018-12-20 13:35:06 -08001357 const DIST_WTD_COMP_PARAMS *jcp_param) { \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001358 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred, \
1359 jcp_param); \
1360 } \
1361 static unsigned int fnname##_bits10( \
1362 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1363 int ref_stride, const uint8_t *second_pred, \
Debargha Mukherjeef90004a2018-12-20 13:35:06 -08001364 const DIST_WTD_COMP_PARAMS *jcp_param) { \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001365 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred, \
1366 jcp_param) >> \
1367 2; \
1368 } \
1369 static unsigned int fnname##_bits12( \
1370 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1371 int ref_stride, const uint8_t *second_pred, \
Debargha Mukherjeef90004a2018-12-20 13:35:06 -08001372 const DIST_WTD_COMP_PARAMS *jcp_param) { \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001373 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred, \
1374 jcp_param) >> \
1375 4; \
1376 }
Cheng Chenbf3d4962017-11-01 14:48:52 -07001377
Yaowu Xuf883b422016-08-30 14:01:10 -07001378MAKE_BFP_SAD_WRAPPER(aom_highbd_sad128x128)
1379MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad128x128_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001380MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad128x128x4d)
1381MAKE_BFP_SAD_WRAPPER(aom_highbd_sad128x64)
1382MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad128x64_avg)
1383MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad128x64x4d)
1384MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x128)
1385MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x128_avg)
1386MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x128x4d)
Yaowu Xuf883b422016-08-30 14:01:10 -07001387MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x16)
1388MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x16_avg)
1389MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x16x4d)
1390MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x32)
1391MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x32_avg)
1392MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x32x4d)
1393MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x32)
1394MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x32_avg)
1395MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x32x4d)
1396MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x64)
1397MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x64_avg)
1398MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x64x4d)
1399MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x32)
1400MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x32_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001401MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x32x4d)
1402MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x64)
1403MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x64_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001404MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x64x4d)
1405MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x16)
1406MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x16_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001407MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x16x4d)
1408MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x8)
1409MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x8_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001410MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x8x4d)
1411MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x16)
1412MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x16_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001413MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x16x4d)
1414MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x8)
1415MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x8_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001416MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x8x4d)
1417MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x4)
1418MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x4_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001419MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x4x4d)
1420MAKE_BFP_SAD_WRAPPER(aom_highbd_sad4x8)
1421MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad4x8_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001422MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad4x8x4d)
1423MAKE_BFP_SAD_WRAPPER(aom_highbd_sad4x4)
1424MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad4x4_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001425MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad4x4x4d)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001426
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001427MAKE_BFP_SAD_WRAPPER(aom_highbd_sad4x16)
1428MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad4x16_avg)
1429MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad4x16x4d)
1430MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x4)
1431MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x4_avg)
1432MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x4x4d)
1433MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x32)
1434MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x32_avg)
1435MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x32x4d)
1436MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x8)
1437MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x8_avg)
1438MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x8x4d)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001439MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x64)
1440MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x64_avg)
1441MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x64x4d)
1442MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x16)
1443MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x16_avg)
1444MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x16x4d)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001445
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001446MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad128x128_avg)
1447MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad128x64_avg)
1448MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x128_avg)
1449MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x16_avg)
1450MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x32_avg)
1451MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x32_avg)
1452MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x64_avg)
1453MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x32_avg)
1454MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x64_avg)
1455MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x16_avg)
1456MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x8_avg)
1457MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x16_avg)
1458MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x8_avg)
1459MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x4_avg)
1460MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad4x8_avg)
1461MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad4x4_avg)
1462MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad4x16_avg)
1463MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x4_avg)
1464MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x32_avg)
1465MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x8_avg)
1466MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x64_avg)
1467MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x16_avg)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001468
David Barker0f3c94e2017-05-16 15:21:50 +01001469#define HIGHBD_MBFP(BT, MCSDF, MCSVF) \
David Barkerf19f35f2017-05-22 16:33:22 +01001470 cpi->fn_ptr[BT].msdf = MCSDF; \
1471 cpi->fn_ptr[BT].msvf = MCSVF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001472
David Barkerc155e012017-05-11 13:54:54 +01001473#define MAKE_MBFP_COMPOUND_SAD_WRAPPER(fnname) \
1474 static unsigned int fnname##_bits8( \
1475 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1476 int ref_stride, const uint8_t *second_pred_ptr, const uint8_t *m, \
1477 int m_stride, int invert_mask) { \
1478 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
1479 second_pred_ptr, m, m_stride, invert_mask); \
1480 } \
1481 static unsigned int fnname##_bits10( \
1482 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1483 int ref_stride, const uint8_t *second_pred_ptr, const uint8_t *m, \
1484 int m_stride, int invert_mask) { \
1485 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
1486 second_pred_ptr, m, m_stride, invert_mask) >> \
1487 2; \
1488 } \
1489 static unsigned int fnname##_bits12( \
1490 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1491 int ref_stride, const uint8_t *second_pred_ptr, const uint8_t *m, \
1492 int m_stride, int invert_mask) { \
1493 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
1494 second_pred_ptr, m, m_stride, invert_mask) >> \
1495 4; \
1496 }
1497
David Barkerf19f35f2017-05-22 16:33:22 +01001498MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad128x128)
1499MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad128x64)
1500MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01001501MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x64)
1502MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x32)
1503MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x64)
1504MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x32)
1505MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x16)
1506MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x32)
1507MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x16)
1508MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x8)
1509MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x16)
1510MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x8)
1511MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x4)
1512MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad4x8)
1513MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001514MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad4x16)
1515MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x4)
1516MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x32)
1517MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001518MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x64)
1519MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001520
Yaowu Xuc27fc142016-08-22 16:08:15 -07001521#define HIGHBD_OBFP(BT, OSDF, OVF, OSVF) \
1522 cpi->fn_ptr[BT].osdf = OSDF; \
1523 cpi->fn_ptr[BT].ovf = OVF; \
1524 cpi->fn_ptr[BT].osvf = OSVF;
1525
1526#define MAKE_OBFP_SAD_WRAPPER(fnname) \
1527 static unsigned int fnname##_bits8(const uint8_t *ref, int ref_stride, \
1528 const int32_t *wsrc, \
1529 const int32_t *msk) { \
1530 return fnname(ref, ref_stride, wsrc, msk); \
1531 } \
1532 static unsigned int fnname##_bits10(const uint8_t *ref, int ref_stride, \
1533 const int32_t *wsrc, \
1534 const int32_t *msk) { \
1535 return fnname(ref, ref_stride, wsrc, msk) >> 2; \
1536 } \
1537 static unsigned int fnname##_bits12(const uint8_t *ref, int ref_stride, \
1538 const int32_t *wsrc, \
1539 const int32_t *msk) { \
1540 return fnname(ref, ref_stride, wsrc, msk) >> 4; \
1541 }
1542
Yaowu Xuf883b422016-08-30 14:01:10 -07001543MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad128x128)
1544MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad128x64)
1545MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07001546MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x64)
1547MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x32)
1548MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x64)
1549MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x32)
1550MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x16)
1551MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x32)
1552MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x16)
1553MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x8)
1554MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x16)
1555MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x8)
1556MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x4)
1557MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad4x8)
1558MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001559MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad4x16)
1560MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x4)
1561MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x32)
1562MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001563MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x64)
1564MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001565
Yaowu Xuf883b422016-08-30 14:01:10 -07001566static void highbd_set_var_fns(AV1_COMP *const cpi) {
1567 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001568 if (cm->seq_params.use_highbitdepth) {
1569 switch (cm->seq_params.bit_depth) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001570 case AOM_BITS_8:
Cheng Chenbf3d4962017-11-01 14:48:52 -07001571 HIGHBD_BFP(BLOCK_64X16, aom_highbd_sad64x16_bits8,
1572 aom_highbd_sad64x16_avg_bits8, aom_highbd_8_variance64x16,
1573 aom_highbd_8_sub_pixel_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001574 aom_highbd_8_sub_pixel_avg_variance64x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001575 aom_highbd_sad64x16x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001576 aom_highbd_dist_wtd_sad64x16_avg_bits8,
1577 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001578
1579 HIGHBD_BFP(BLOCK_16X64, aom_highbd_sad16x64_bits8,
1580 aom_highbd_sad16x64_avg_bits8, aom_highbd_8_variance16x64,
1581 aom_highbd_8_sub_pixel_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001582 aom_highbd_8_sub_pixel_avg_variance16x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001583 aom_highbd_sad16x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001584 aom_highbd_dist_wtd_sad16x64_avg_bits8,
1585 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001586
1587 HIGHBD_BFP(
1588 BLOCK_32X8, aom_highbd_sad32x8_bits8, aom_highbd_sad32x8_avg_bits8,
1589 aom_highbd_8_variance32x8, aom_highbd_8_sub_pixel_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001590 aom_highbd_8_sub_pixel_avg_variance32x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001591 aom_highbd_sad32x8x4d_bits8, aom_highbd_dist_wtd_sad32x8_avg_bits8,
1592 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x8)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001593
1594 HIGHBD_BFP(
1595 BLOCK_8X32, aom_highbd_sad8x32_bits8, aom_highbd_sad8x32_avg_bits8,
1596 aom_highbd_8_variance8x32, aom_highbd_8_sub_pixel_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001597 aom_highbd_8_sub_pixel_avg_variance8x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001598 aom_highbd_sad8x32x4d_bits8, aom_highbd_dist_wtd_sad8x32_avg_bits8,
1599 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001600
1601 HIGHBD_BFP(
1602 BLOCK_16X4, aom_highbd_sad16x4_bits8, aom_highbd_sad16x4_avg_bits8,
1603 aom_highbd_8_variance16x4, aom_highbd_8_sub_pixel_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001604 aom_highbd_8_sub_pixel_avg_variance16x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001605 aom_highbd_sad16x4x4d_bits8, aom_highbd_dist_wtd_sad16x4_avg_bits8,
1606 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x4)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001607
1608 HIGHBD_BFP(
1609 BLOCK_4X16, aom_highbd_sad4x16_bits8, aom_highbd_sad4x16_avg_bits8,
1610 aom_highbd_8_variance4x16, aom_highbd_8_sub_pixel_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001611 aom_highbd_8_sub_pixel_avg_variance4x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001612 aom_highbd_sad4x16x4d_bits8, aom_highbd_dist_wtd_sad4x16_avg_bits8,
1613 aom_highbd_8_dist_wtd_sub_pixel_avg_variance4x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001614
1615 HIGHBD_BFP(BLOCK_32X16, aom_highbd_sad32x16_bits8,
1616 aom_highbd_sad32x16_avg_bits8, aom_highbd_8_variance32x16,
1617 aom_highbd_8_sub_pixel_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001618 aom_highbd_8_sub_pixel_avg_variance32x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001619 aom_highbd_sad32x16x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001620 aom_highbd_dist_wtd_sad32x16_avg_bits8,
1621 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001622
1623 HIGHBD_BFP(BLOCK_16X32, aom_highbd_sad16x32_bits8,
1624 aom_highbd_sad16x32_avg_bits8, aom_highbd_8_variance16x32,
1625 aom_highbd_8_sub_pixel_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001626 aom_highbd_8_sub_pixel_avg_variance16x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001627 aom_highbd_sad16x32x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001628 aom_highbd_dist_wtd_sad16x32_avg_bits8,
1629 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001630
1631 HIGHBD_BFP(BLOCK_64X32, aom_highbd_sad64x32_bits8,
1632 aom_highbd_sad64x32_avg_bits8, aom_highbd_8_variance64x32,
1633 aom_highbd_8_sub_pixel_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001634 aom_highbd_8_sub_pixel_avg_variance64x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001635 aom_highbd_sad64x32x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001636 aom_highbd_dist_wtd_sad64x32_avg_bits8,
1637 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001638
1639 HIGHBD_BFP(BLOCK_32X64, aom_highbd_sad32x64_bits8,
1640 aom_highbd_sad32x64_avg_bits8, aom_highbd_8_variance32x64,
1641 aom_highbd_8_sub_pixel_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001642 aom_highbd_8_sub_pixel_avg_variance32x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001643 aom_highbd_sad32x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001644 aom_highbd_dist_wtd_sad32x64_avg_bits8,
1645 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001646
1647 HIGHBD_BFP(BLOCK_32X32, aom_highbd_sad32x32_bits8,
1648 aom_highbd_sad32x32_avg_bits8, aom_highbd_8_variance32x32,
1649 aom_highbd_8_sub_pixel_variance32x32,
1650 aom_highbd_8_sub_pixel_avg_variance32x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001651 aom_highbd_sad32x32x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001652 aom_highbd_dist_wtd_sad32x32_avg_bits8,
1653 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001654
1655 HIGHBD_BFP(BLOCK_64X64, aom_highbd_sad64x64_bits8,
1656 aom_highbd_sad64x64_avg_bits8, aom_highbd_8_variance64x64,
1657 aom_highbd_8_sub_pixel_variance64x64,
1658 aom_highbd_8_sub_pixel_avg_variance64x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001659 aom_highbd_sad64x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001660 aom_highbd_dist_wtd_sad64x64_avg_bits8,
1661 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001662
1663 HIGHBD_BFP(BLOCK_16X16, aom_highbd_sad16x16_bits8,
1664 aom_highbd_sad16x16_avg_bits8, aom_highbd_8_variance16x16,
1665 aom_highbd_8_sub_pixel_variance16x16,
1666 aom_highbd_8_sub_pixel_avg_variance16x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001667 aom_highbd_sad16x16x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001668 aom_highbd_dist_wtd_sad16x16_avg_bits8,
1669 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001670
1671 HIGHBD_BFP(
1672 BLOCK_16X8, aom_highbd_sad16x8_bits8, aom_highbd_sad16x8_avg_bits8,
1673 aom_highbd_8_variance16x8, aom_highbd_8_sub_pixel_variance16x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001674 aom_highbd_8_sub_pixel_avg_variance16x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001675 aom_highbd_sad16x8x4d_bits8, aom_highbd_dist_wtd_sad16x8_avg_bits8,
1676 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x8)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001677
1678 HIGHBD_BFP(
1679 BLOCK_8X16, aom_highbd_sad8x16_bits8, aom_highbd_sad8x16_avg_bits8,
1680 aom_highbd_8_variance8x16, aom_highbd_8_sub_pixel_variance8x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001681 aom_highbd_8_sub_pixel_avg_variance8x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001682 aom_highbd_sad8x16x4d_bits8, aom_highbd_dist_wtd_sad8x16_avg_bits8,
1683 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001684
Cheng Chenbf3d4962017-11-01 14:48:52 -07001685 HIGHBD_BFP(
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001686 BLOCK_8X8, aom_highbd_sad8x8_bits8, aom_highbd_sad8x8_avg_bits8,
1687 aom_highbd_8_variance8x8, aom_highbd_8_sub_pixel_variance8x8,
1688 aom_highbd_8_sub_pixel_avg_variance8x8, aom_highbd_sad8x8x4d_bits8,
1689 aom_highbd_dist_wtd_sad8x8_avg_bits8,
1690 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x8)
1691
1692 HIGHBD_BFP(
1693 BLOCK_8X4, aom_highbd_sad8x4_bits8, aom_highbd_sad8x4_avg_bits8,
1694 aom_highbd_8_variance8x4, aom_highbd_8_sub_pixel_variance8x4,
1695 aom_highbd_8_sub_pixel_avg_variance8x4, aom_highbd_sad8x4x4d_bits8,
1696 aom_highbd_dist_wtd_sad8x4_avg_bits8,
1697 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x4)
1698
1699 HIGHBD_BFP(
1700 BLOCK_4X8, aom_highbd_sad4x8_bits8, aom_highbd_sad4x8_avg_bits8,
1701 aom_highbd_8_variance4x8, aom_highbd_8_sub_pixel_variance4x8,
1702 aom_highbd_8_sub_pixel_avg_variance4x8, aom_highbd_sad4x8x4d_bits8,
1703 aom_highbd_dist_wtd_sad4x8_avg_bits8,
1704 aom_highbd_8_dist_wtd_sub_pixel_avg_variance4x8)
1705
1706 HIGHBD_BFP(
1707 BLOCK_4X4, aom_highbd_sad4x4_bits8, aom_highbd_sad4x4_avg_bits8,
1708 aom_highbd_8_variance4x4, aom_highbd_8_sub_pixel_variance4x4,
1709 aom_highbd_8_sub_pixel_avg_variance4x4, aom_highbd_sad4x4x4d_bits8,
1710 aom_highbd_dist_wtd_sad4x4_avg_bits8,
1711 aom_highbd_8_dist_wtd_sub_pixel_avg_variance4x4)
1712
1713 HIGHBD_BFP(BLOCK_128X128, aom_highbd_sad128x128_bits8,
1714 aom_highbd_sad128x128_avg_bits8,
1715 aom_highbd_8_variance128x128,
1716 aom_highbd_8_sub_pixel_variance128x128,
1717 aom_highbd_8_sub_pixel_avg_variance128x128,
1718 aom_highbd_sad128x128x4d_bits8,
1719 aom_highbd_dist_wtd_sad128x128_avg_bits8,
1720 aom_highbd_8_dist_wtd_sub_pixel_avg_variance128x128)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001721
1722 HIGHBD_BFP(BLOCK_128X64, aom_highbd_sad128x64_bits8,
1723 aom_highbd_sad128x64_avg_bits8, aom_highbd_8_variance128x64,
1724 aom_highbd_8_sub_pixel_variance128x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001725 aom_highbd_8_sub_pixel_avg_variance128x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001726 aom_highbd_sad128x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001727 aom_highbd_dist_wtd_sad128x64_avg_bits8,
1728 aom_highbd_8_dist_wtd_sub_pixel_avg_variance128x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001729
1730 HIGHBD_BFP(BLOCK_64X128, aom_highbd_sad64x128_bits8,
1731 aom_highbd_sad64x128_avg_bits8, aom_highbd_8_variance64x128,
1732 aom_highbd_8_sub_pixel_variance64x128,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001733 aom_highbd_8_sub_pixel_avg_variance64x128,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001734 aom_highbd_sad64x128x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001735 aom_highbd_dist_wtd_sad64x128_avg_bits8,
1736 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x128)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001737
David Barkerf19f35f2017-05-22 16:33:22 +01001738 HIGHBD_MBFP(BLOCK_128X128, aom_highbd_masked_sad128x128_bits8,
1739 aom_highbd_8_masked_sub_pixel_variance128x128)
1740 HIGHBD_MBFP(BLOCK_128X64, aom_highbd_masked_sad128x64_bits8,
1741 aom_highbd_8_masked_sub_pixel_variance128x64)
1742 HIGHBD_MBFP(BLOCK_64X128, aom_highbd_masked_sad64x128_bits8,
1743 aom_highbd_8_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01001744 HIGHBD_MBFP(BLOCK_64X64, aom_highbd_masked_sad64x64_bits8,
1745 aom_highbd_8_masked_sub_pixel_variance64x64)
1746 HIGHBD_MBFP(BLOCK_64X32, aom_highbd_masked_sad64x32_bits8,
1747 aom_highbd_8_masked_sub_pixel_variance64x32)
1748 HIGHBD_MBFP(BLOCK_32X64, aom_highbd_masked_sad32x64_bits8,
1749 aom_highbd_8_masked_sub_pixel_variance32x64)
1750 HIGHBD_MBFP(BLOCK_32X32, aom_highbd_masked_sad32x32_bits8,
1751 aom_highbd_8_masked_sub_pixel_variance32x32)
1752 HIGHBD_MBFP(BLOCK_32X16, aom_highbd_masked_sad32x16_bits8,
1753 aom_highbd_8_masked_sub_pixel_variance32x16)
1754 HIGHBD_MBFP(BLOCK_16X32, aom_highbd_masked_sad16x32_bits8,
1755 aom_highbd_8_masked_sub_pixel_variance16x32)
1756 HIGHBD_MBFP(BLOCK_16X16, aom_highbd_masked_sad16x16_bits8,
1757 aom_highbd_8_masked_sub_pixel_variance16x16)
1758 HIGHBD_MBFP(BLOCK_8X16, aom_highbd_masked_sad8x16_bits8,
1759 aom_highbd_8_masked_sub_pixel_variance8x16)
1760 HIGHBD_MBFP(BLOCK_16X8, aom_highbd_masked_sad16x8_bits8,
1761 aom_highbd_8_masked_sub_pixel_variance16x8)
1762 HIGHBD_MBFP(BLOCK_8X8, aom_highbd_masked_sad8x8_bits8,
1763 aom_highbd_8_masked_sub_pixel_variance8x8)
1764 HIGHBD_MBFP(BLOCK_4X8, aom_highbd_masked_sad4x8_bits8,
1765 aom_highbd_8_masked_sub_pixel_variance4x8)
1766 HIGHBD_MBFP(BLOCK_8X4, aom_highbd_masked_sad8x4_bits8,
1767 aom_highbd_8_masked_sub_pixel_variance8x4)
1768 HIGHBD_MBFP(BLOCK_4X4, aom_highbd_masked_sad4x4_bits8,
1769 aom_highbd_8_masked_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001770 HIGHBD_MBFP(BLOCK_64X16, aom_highbd_masked_sad64x16_bits8,
1771 aom_highbd_8_masked_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001772 HIGHBD_MBFP(BLOCK_16X64, aom_highbd_masked_sad16x64_bits8,
1773 aom_highbd_8_masked_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001774 HIGHBD_MBFP(BLOCK_32X8, aom_highbd_masked_sad32x8_bits8,
1775 aom_highbd_8_masked_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001776 HIGHBD_MBFP(BLOCK_8X32, aom_highbd_masked_sad8x32_bits8,
1777 aom_highbd_8_masked_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001778 HIGHBD_MBFP(BLOCK_16X4, aom_highbd_masked_sad16x4_bits8,
1779 aom_highbd_8_masked_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001780 HIGHBD_MBFP(BLOCK_4X16, aom_highbd_masked_sad4x16_bits8,
1781 aom_highbd_8_masked_sub_pixel_variance4x16)
Yaowu Xuf883b422016-08-30 14:01:10 -07001782 HIGHBD_OBFP(BLOCK_128X128, aom_highbd_obmc_sad128x128_bits8,
1783 aom_highbd_obmc_variance128x128,
1784 aom_highbd_obmc_sub_pixel_variance128x128)
1785 HIGHBD_OBFP(BLOCK_128X64, aom_highbd_obmc_sad128x64_bits8,
1786 aom_highbd_obmc_variance128x64,
1787 aom_highbd_obmc_sub_pixel_variance128x64)
1788 HIGHBD_OBFP(BLOCK_64X128, aom_highbd_obmc_sad64x128_bits8,
1789 aom_highbd_obmc_variance64x128,
1790 aom_highbd_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07001791 HIGHBD_OBFP(BLOCK_64X64, aom_highbd_obmc_sad64x64_bits8,
1792 aom_highbd_obmc_variance64x64,
1793 aom_highbd_obmc_sub_pixel_variance64x64)
1794 HIGHBD_OBFP(BLOCK_64X32, aom_highbd_obmc_sad64x32_bits8,
1795 aom_highbd_obmc_variance64x32,
1796 aom_highbd_obmc_sub_pixel_variance64x32)
1797 HIGHBD_OBFP(BLOCK_32X64, aom_highbd_obmc_sad32x64_bits8,
1798 aom_highbd_obmc_variance32x64,
1799 aom_highbd_obmc_sub_pixel_variance32x64)
1800 HIGHBD_OBFP(BLOCK_32X32, aom_highbd_obmc_sad32x32_bits8,
1801 aom_highbd_obmc_variance32x32,
1802 aom_highbd_obmc_sub_pixel_variance32x32)
1803 HIGHBD_OBFP(BLOCK_32X16, aom_highbd_obmc_sad32x16_bits8,
1804 aom_highbd_obmc_variance32x16,
1805 aom_highbd_obmc_sub_pixel_variance32x16)
1806 HIGHBD_OBFP(BLOCK_16X32, aom_highbd_obmc_sad16x32_bits8,
1807 aom_highbd_obmc_variance16x32,
1808 aom_highbd_obmc_sub_pixel_variance16x32)
1809 HIGHBD_OBFP(BLOCK_16X16, aom_highbd_obmc_sad16x16_bits8,
1810 aom_highbd_obmc_variance16x16,
1811 aom_highbd_obmc_sub_pixel_variance16x16)
1812 HIGHBD_OBFP(BLOCK_8X16, aom_highbd_obmc_sad8x16_bits8,
1813 aom_highbd_obmc_variance8x16,
1814 aom_highbd_obmc_sub_pixel_variance8x16)
1815 HIGHBD_OBFP(BLOCK_16X8, aom_highbd_obmc_sad16x8_bits8,
1816 aom_highbd_obmc_variance16x8,
1817 aom_highbd_obmc_sub_pixel_variance16x8)
1818 HIGHBD_OBFP(BLOCK_8X8, aom_highbd_obmc_sad8x8_bits8,
1819 aom_highbd_obmc_variance8x8,
1820 aom_highbd_obmc_sub_pixel_variance8x8)
1821 HIGHBD_OBFP(BLOCK_4X8, aom_highbd_obmc_sad4x8_bits8,
1822 aom_highbd_obmc_variance4x8,
1823 aom_highbd_obmc_sub_pixel_variance4x8)
1824 HIGHBD_OBFP(BLOCK_8X4, aom_highbd_obmc_sad8x4_bits8,
1825 aom_highbd_obmc_variance8x4,
1826 aom_highbd_obmc_sub_pixel_variance8x4)
1827 HIGHBD_OBFP(BLOCK_4X4, aom_highbd_obmc_sad4x4_bits8,
1828 aom_highbd_obmc_variance4x4,
1829 aom_highbd_obmc_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001830 HIGHBD_OBFP(BLOCK_64X16, aom_highbd_obmc_sad64x16_bits8,
1831 aom_highbd_obmc_variance64x16,
1832 aom_highbd_obmc_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001833 HIGHBD_OBFP(BLOCK_16X64, aom_highbd_obmc_sad16x64_bits8,
1834 aom_highbd_obmc_variance16x64,
1835 aom_highbd_obmc_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001836 HIGHBD_OBFP(BLOCK_32X8, aom_highbd_obmc_sad32x8_bits8,
1837 aom_highbd_obmc_variance32x8,
1838 aom_highbd_obmc_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001839 HIGHBD_OBFP(BLOCK_8X32, aom_highbd_obmc_sad8x32_bits8,
1840 aom_highbd_obmc_variance8x32,
1841 aom_highbd_obmc_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001842 HIGHBD_OBFP(BLOCK_16X4, aom_highbd_obmc_sad16x4_bits8,
1843 aom_highbd_obmc_variance16x4,
1844 aom_highbd_obmc_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001845 HIGHBD_OBFP(BLOCK_4X16, aom_highbd_obmc_sad4x16_bits8,
1846 aom_highbd_obmc_variance4x16,
1847 aom_highbd_obmc_sub_pixel_variance4x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001848 break;
1849
Yaowu Xuf883b422016-08-30 14:01:10 -07001850 case AOM_BITS_10:
Cheng Chenbf3d4962017-11-01 14:48:52 -07001851 HIGHBD_BFP(BLOCK_64X16, aom_highbd_sad64x16_bits10,
1852 aom_highbd_sad64x16_avg_bits10, aom_highbd_10_variance64x16,
1853 aom_highbd_10_sub_pixel_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001854 aom_highbd_10_sub_pixel_avg_variance64x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001855 aom_highbd_sad64x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001856 aom_highbd_dist_wtd_sad64x16_avg_bits10,
1857 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001858
1859 HIGHBD_BFP(BLOCK_16X64, aom_highbd_sad16x64_bits10,
1860 aom_highbd_sad16x64_avg_bits10, aom_highbd_10_variance16x64,
1861 aom_highbd_10_sub_pixel_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001862 aom_highbd_10_sub_pixel_avg_variance16x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001863 aom_highbd_sad16x64x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001864 aom_highbd_dist_wtd_sad16x64_avg_bits10,
1865 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001866
1867 HIGHBD_BFP(BLOCK_32X8, aom_highbd_sad32x8_bits10,
1868 aom_highbd_sad32x8_avg_bits10, aom_highbd_10_variance32x8,
1869 aom_highbd_10_sub_pixel_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001870 aom_highbd_10_sub_pixel_avg_variance32x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001871 aom_highbd_sad32x8x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001872 aom_highbd_dist_wtd_sad32x8_avg_bits10,
1873 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001874
1875 HIGHBD_BFP(BLOCK_8X32, aom_highbd_sad8x32_bits10,
1876 aom_highbd_sad8x32_avg_bits10, aom_highbd_10_variance8x32,
1877 aom_highbd_10_sub_pixel_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001878 aom_highbd_10_sub_pixel_avg_variance8x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001879 aom_highbd_sad8x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001880 aom_highbd_dist_wtd_sad8x32_avg_bits10,
1881 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001882
1883 HIGHBD_BFP(BLOCK_16X4, aom_highbd_sad16x4_bits10,
1884 aom_highbd_sad16x4_avg_bits10, aom_highbd_10_variance16x4,
1885 aom_highbd_10_sub_pixel_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001886 aom_highbd_10_sub_pixel_avg_variance16x4,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001887 aom_highbd_sad16x4x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001888 aom_highbd_dist_wtd_sad16x4_avg_bits10,
1889 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001890
1891 HIGHBD_BFP(BLOCK_4X16, aom_highbd_sad4x16_bits10,
1892 aom_highbd_sad4x16_avg_bits10, aom_highbd_10_variance4x16,
1893 aom_highbd_10_sub_pixel_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001894 aom_highbd_10_sub_pixel_avg_variance4x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001895 aom_highbd_sad4x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001896 aom_highbd_dist_wtd_sad4x16_avg_bits10,
1897 aom_highbd_10_dist_wtd_sub_pixel_avg_variance4x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001898
1899 HIGHBD_BFP(BLOCK_32X16, aom_highbd_sad32x16_bits10,
1900 aom_highbd_sad32x16_avg_bits10, aom_highbd_10_variance32x16,
1901 aom_highbd_10_sub_pixel_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001902 aom_highbd_10_sub_pixel_avg_variance32x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001903 aom_highbd_sad32x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001904 aom_highbd_dist_wtd_sad32x16_avg_bits10,
1905 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001906
1907 HIGHBD_BFP(BLOCK_16X32, aom_highbd_sad16x32_bits10,
1908 aom_highbd_sad16x32_avg_bits10, aom_highbd_10_variance16x32,
1909 aom_highbd_10_sub_pixel_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001910 aom_highbd_10_sub_pixel_avg_variance16x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001911 aom_highbd_sad16x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001912 aom_highbd_dist_wtd_sad16x32_avg_bits10,
1913 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001914
1915 HIGHBD_BFP(BLOCK_64X32, aom_highbd_sad64x32_bits10,
1916 aom_highbd_sad64x32_avg_bits10, aom_highbd_10_variance64x32,
1917 aom_highbd_10_sub_pixel_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001918 aom_highbd_10_sub_pixel_avg_variance64x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001919 aom_highbd_sad64x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001920 aom_highbd_dist_wtd_sad64x32_avg_bits10,
1921 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001922
1923 HIGHBD_BFP(BLOCK_32X64, aom_highbd_sad32x64_bits10,
1924 aom_highbd_sad32x64_avg_bits10, aom_highbd_10_variance32x64,
1925 aom_highbd_10_sub_pixel_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001926 aom_highbd_10_sub_pixel_avg_variance32x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001927 aom_highbd_sad32x64x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001928 aom_highbd_dist_wtd_sad32x64_avg_bits10,
1929 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001930
1931 HIGHBD_BFP(BLOCK_32X32, aom_highbd_sad32x32_bits10,
1932 aom_highbd_sad32x32_avg_bits10, aom_highbd_10_variance32x32,
1933 aom_highbd_10_sub_pixel_variance32x32,
1934 aom_highbd_10_sub_pixel_avg_variance32x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001935 aom_highbd_sad32x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001936 aom_highbd_dist_wtd_sad32x32_avg_bits10,
1937 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001938
1939 HIGHBD_BFP(BLOCK_64X64, aom_highbd_sad64x64_bits10,
1940 aom_highbd_sad64x64_avg_bits10, aom_highbd_10_variance64x64,
1941 aom_highbd_10_sub_pixel_variance64x64,
1942 aom_highbd_10_sub_pixel_avg_variance64x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001943 aom_highbd_sad64x64x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001944 aom_highbd_dist_wtd_sad64x64_avg_bits10,
1945 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001946
1947 HIGHBD_BFP(BLOCK_16X16, aom_highbd_sad16x16_bits10,
1948 aom_highbd_sad16x16_avg_bits10, aom_highbd_10_variance16x16,
1949 aom_highbd_10_sub_pixel_variance16x16,
1950 aom_highbd_10_sub_pixel_avg_variance16x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001951 aom_highbd_sad16x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001952 aom_highbd_dist_wtd_sad16x16_avg_bits10,
1953 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001954
1955 HIGHBD_BFP(BLOCK_16X8, aom_highbd_sad16x8_bits10,
1956 aom_highbd_sad16x8_avg_bits10, aom_highbd_10_variance16x8,
1957 aom_highbd_10_sub_pixel_variance16x8,
1958 aom_highbd_10_sub_pixel_avg_variance16x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001959 aom_highbd_sad16x8x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001960 aom_highbd_dist_wtd_sad16x8_avg_bits10,
1961 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001962
1963 HIGHBD_BFP(BLOCK_8X16, aom_highbd_sad8x16_bits10,
1964 aom_highbd_sad8x16_avg_bits10, aom_highbd_10_variance8x16,
1965 aom_highbd_10_sub_pixel_variance8x16,
1966 aom_highbd_10_sub_pixel_avg_variance8x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001967 aom_highbd_sad8x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001968 aom_highbd_dist_wtd_sad8x16_avg_bits10,
1969 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001970
1971 HIGHBD_BFP(
1972 BLOCK_8X8, aom_highbd_sad8x8_bits10, aom_highbd_sad8x8_avg_bits10,
1973 aom_highbd_10_variance8x8, aom_highbd_10_sub_pixel_variance8x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001974 aom_highbd_10_sub_pixel_avg_variance8x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001975 aom_highbd_sad8x8x4d_bits10, aom_highbd_dist_wtd_sad8x8_avg_bits10,
1976 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001977
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001978 HIGHBD_BFP(
1979 BLOCK_8X4, aom_highbd_sad8x4_bits10, aom_highbd_sad8x4_avg_bits10,
1980 aom_highbd_10_variance8x4, aom_highbd_10_sub_pixel_variance8x4,
1981 aom_highbd_10_sub_pixel_avg_variance8x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001982 aom_highbd_sad8x4x4d_bits10, aom_highbd_dist_wtd_sad8x4_avg_bits10,
1983 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001984
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001985 HIGHBD_BFP(
1986 BLOCK_4X8, aom_highbd_sad4x8_bits10, aom_highbd_sad4x8_avg_bits10,
1987 aom_highbd_10_variance4x8, aom_highbd_10_sub_pixel_variance4x8,
1988 aom_highbd_10_sub_pixel_avg_variance4x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001989 aom_highbd_sad4x8x4d_bits10, aom_highbd_dist_wtd_sad4x8_avg_bits10,
1990 aom_highbd_10_dist_wtd_sub_pixel_avg_variance4x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001991
1992 HIGHBD_BFP(
1993 BLOCK_4X4, aom_highbd_sad4x4_bits10, aom_highbd_sad4x4_avg_bits10,
1994 aom_highbd_10_variance4x4, aom_highbd_10_sub_pixel_variance4x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001995 aom_highbd_10_sub_pixel_avg_variance4x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001996 aom_highbd_sad4x4x4d_bits10, aom_highbd_dist_wtd_sad4x4_avg_bits10,
1997 aom_highbd_10_dist_wtd_sub_pixel_avg_variance4x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001998
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001999 HIGHBD_BFP(BLOCK_128X128, aom_highbd_sad128x128_bits10,
2000 aom_highbd_sad128x128_avg_bits10,
2001 aom_highbd_10_variance128x128,
2002 aom_highbd_10_sub_pixel_variance128x128,
2003 aom_highbd_10_sub_pixel_avg_variance128x128,
2004 aom_highbd_sad128x128x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002005 aom_highbd_dist_wtd_sad128x128_avg_bits10,
2006 aom_highbd_10_dist_wtd_sub_pixel_avg_variance128x128);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002007
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002008 HIGHBD_BFP(BLOCK_128X64, aom_highbd_sad128x64_bits10,
2009 aom_highbd_sad128x64_avg_bits10,
2010 aom_highbd_10_variance128x64,
2011 aom_highbd_10_sub_pixel_variance128x64,
2012 aom_highbd_10_sub_pixel_avg_variance128x64,
2013 aom_highbd_sad128x64x4d_bits10,
2014 aom_highbd_dist_wtd_sad128x64_avg_bits10,
2015 aom_highbd_10_dist_wtd_sub_pixel_avg_variance128x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002016
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002017 HIGHBD_BFP(BLOCK_64X128, aom_highbd_sad64x128_bits10,
2018 aom_highbd_sad64x128_avg_bits10,
2019 aom_highbd_10_variance64x128,
2020 aom_highbd_10_sub_pixel_variance64x128,
2021 aom_highbd_10_sub_pixel_avg_variance64x128,
2022 aom_highbd_sad64x128x4d_bits10,
2023 aom_highbd_dist_wtd_sad64x128_avg_bits10,
2024 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x128);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002025
David Barkerf19f35f2017-05-22 16:33:22 +01002026 HIGHBD_MBFP(BLOCK_128X128, aom_highbd_masked_sad128x128_bits10,
2027 aom_highbd_10_masked_sub_pixel_variance128x128)
2028 HIGHBD_MBFP(BLOCK_128X64, aom_highbd_masked_sad128x64_bits10,
2029 aom_highbd_10_masked_sub_pixel_variance128x64)
2030 HIGHBD_MBFP(BLOCK_64X128, aom_highbd_masked_sad64x128_bits10,
2031 aom_highbd_10_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01002032 HIGHBD_MBFP(BLOCK_64X64, aom_highbd_masked_sad64x64_bits10,
2033 aom_highbd_10_masked_sub_pixel_variance64x64)
2034 HIGHBD_MBFP(BLOCK_64X32, aom_highbd_masked_sad64x32_bits10,
2035 aom_highbd_10_masked_sub_pixel_variance64x32)
2036 HIGHBD_MBFP(BLOCK_32X64, aom_highbd_masked_sad32x64_bits10,
2037 aom_highbd_10_masked_sub_pixel_variance32x64)
2038 HIGHBD_MBFP(BLOCK_32X32, aom_highbd_masked_sad32x32_bits10,
2039 aom_highbd_10_masked_sub_pixel_variance32x32)
2040 HIGHBD_MBFP(BLOCK_32X16, aom_highbd_masked_sad32x16_bits10,
2041 aom_highbd_10_masked_sub_pixel_variance32x16)
2042 HIGHBD_MBFP(BLOCK_16X32, aom_highbd_masked_sad16x32_bits10,
2043 aom_highbd_10_masked_sub_pixel_variance16x32)
2044 HIGHBD_MBFP(BLOCK_16X16, aom_highbd_masked_sad16x16_bits10,
2045 aom_highbd_10_masked_sub_pixel_variance16x16)
2046 HIGHBD_MBFP(BLOCK_8X16, aom_highbd_masked_sad8x16_bits10,
2047 aom_highbd_10_masked_sub_pixel_variance8x16)
2048 HIGHBD_MBFP(BLOCK_16X8, aom_highbd_masked_sad16x8_bits10,
2049 aom_highbd_10_masked_sub_pixel_variance16x8)
2050 HIGHBD_MBFP(BLOCK_8X8, aom_highbd_masked_sad8x8_bits10,
2051 aom_highbd_10_masked_sub_pixel_variance8x8)
2052 HIGHBD_MBFP(BLOCK_4X8, aom_highbd_masked_sad4x8_bits10,
2053 aom_highbd_10_masked_sub_pixel_variance4x8)
2054 HIGHBD_MBFP(BLOCK_8X4, aom_highbd_masked_sad8x4_bits10,
2055 aom_highbd_10_masked_sub_pixel_variance8x4)
2056 HIGHBD_MBFP(BLOCK_4X4, aom_highbd_masked_sad4x4_bits10,
2057 aom_highbd_10_masked_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002058 HIGHBD_MBFP(BLOCK_64X16, aom_highbd_masked_sad64x16_bits10,
2059 aom_highbd_10_masked_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002060 HIGHBD_MBFP(BLOCK_16X64, aom_highbd_masked_sad16x64_bits10,
2061 aom_highbd_10_masked_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002062 HIGHBD_MBFP(BLOCK_32X8, aom_highbd_masked_sad32x8_bits10,
2063 aom_highbd_10_masked_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002064 HIGHBD_MBFP(BLOCK_8X32, aom_highbd_masked_sad8x32_bits10,
2065 aom_highbd_10_masked_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002066 HIGHBD_MBFP(BLOCK_16X4, aom_highbd_masked_sad16x4_bits10,
2067 aom_highbd_10_masked_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002068 HIGHBD_MBFP(BLOCK_4X16, aom_highbd_masked_sad4x16_bits10,
2069 aom_highbd_10_masked_sub_pixel_variance4x16)
Yaowu Xuf883b422016-08-30 14:01:10 -07002070 HIGHBD_OBFP(BLOCK_128X128, aom_highbd_obmc_sad128x128_bits10,
2071 aom_highbd_10_obmc_variance128x128,
2072 aom_highbd_10_obmc_sub_pixel_variance128x128)
2073 HIGHBD_OBFP(BLOCK_128X64, aom_highbd_obmc_sad128x64_bits10,
2074 aom_highbd_10_obmc_variance128x64,
2075 aom_highbd_10_obmc_sub_pixel_variance128x64)
2076 HIGHBD_OBFP(BLOCK_64X128, aom_highbd_obmc_sad64x128_bits10,
2077 aom_highbd_10_obmc_variance64x128,
2078 aom_highbd_10_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07002079 HIGHBD_OBFP(BLOCK_64X64, aom_highbd_obmc_sad64x64_bits10,
2080 aom_highbd_10_obmc_variance64x64,
2081 aom_highbd_10_obmc_sub_pixel_variance64x64)
2082 HIGHBD_OBFP(BLOCK_64X32, aom_highbd_obmc_sad64x32_bits10,
2083 aom_highbd_10_obmc_variance64x32,
2084 aom_highbd_10_obmc_sub_pixel_variance64x32)
2085 HIGHBD_OBFP(BLOCK_32X64, aom_highbd_obmc_sad32x64_bits10,
2086 aom_highbd_10_obmc_variance32x64,
2087 aom_highbd_10_obmc_sub_pixel_variance32x64)
2088 HIGHBD_OBFP(BLOCK_32X32, aom_highbd_obmc_sad32x32_bits10,
2089 aom_highbd_10_obmc_variance32x32,
2090 aom_highbd_10_obmc_sub_pixel_variance32x32)
2091 HIGHBD_OBFP(BLOCK_32X16, aom_highbd_obmc_sad32x16_bits10,
2092 aom_highbd_10_obmc_variance32x16,
2093 aom_highbd_10_obmc_sub_pixel_variance32x16)
2094 HIGHBD_OBFP(BLOCK_16X32, aom_highbd_obmc_sad16x32_bits10,
2095 aom_highbd_10_obmc_variance16x32,
2096 aom_highbd_10_obmc_sub_pixel_variance16x32)
2097 HIGHBD_OBFP(BLOCK_16X16, aom_highbd_obmc_sad16x16_bits10,
2098 aom_highbd_10_obmc_variance16x16,
2099 aom_highbd_10_obmc_sub_pixel_variance16x16)
2100 HIGHBD_OBFP(BLOCK_8X16, aom_highbd_obmc_sad8x16_bits10,
2101 aom_highbd_10_obmc_variance8x16,
2102 aom_highbd_10_obmc_sub_pixel_variance8x16)
2103 HIGHBD_OBFP(BLOCK_16X8, aom_highbd_obmc_sad16x8_bits10,
2104 aom_highbd_10_obmc_variance16x8,
2105 aom_highbd_10_obmc_sub_pixel_variance16x8)
2106 HIGHBD_OBFP(BLOCK_8X8, aom_highbd_obmc_sad8x8_bits10,
2107 aom_highbd_10_obmc_variance8x8,
2108 aom_highbd_10_obmc_sub_pixel_variance8x8)
2109 HIGHBD_OBFP(BLOCK_4X8, aom_highbd_obmc_sad4x8_bits10,
2110 aom_highbd_10_obmc_variance4x8,
2111 aom_highbd_10_obmc_sub_pixel_variance4x8)
2112 HIGHBD_OBFP(BLOCK_8X4, aom_highbd_obmc_sad8x4_bits10,
2113 aom_highbd_10_obmc_variance8x4,
2114 aom_highbd_10_obmc_sub_pixel_variance8x4)
2115 HIGHBD_OBFP(BLOCK_4X4, aom_highbd_obmc_sad4x4_bits10,
2116 aom_highbd_10_obmc_variance4x4,
2117 aom_highbd_10_obmc_sub_pixel_variance4x4)
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01002118
Rupert Swarbrick72678572017-08-02 12:05:26 +01002119 HIGHBD_OBFP(BLOCK_64X16, aom_highbd_obmc_sad64x16_bits10,
2120 aom_highbd_10_obmc_variance64x16,
2121 aom_highbd_10_obmc_sub_pixel_variance64x16)
2122
2123 HIGHBD_OBFP(BLOCK_16X64, aom_highbd_obmc_sad16x64_bits10,
2124 aom_highbd_10_obmc_variance16x64,
2125 aom_highbd_10_obmc_sub_pixel_variance16x64)
2126
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002127 HIGHBD_OBFP(BLOCK_32X8, aom_highbd_obmc_sad32x8_bits10,
2128 aom_highbd_10_obmc_variance32x8,
2129 aom_highbd_10_obmc_sub_pixel_variance32x8)
2130
2131 HIGHBD_OBFP(BLOCK_8X32, aom_highbd_obmc_sad8x32_bits10,
2132 aom_highbd_10_obmc_variance8x32,
2133 aom_highbd_10_obmc_sub_pixel_variance8x32)
2134
2135 HIGHBD_OBFP(BLOCK_16X4, aom_highbd_obmc_sad16x4_bits10,
2136 aom_highbd_10_obmc_variance16x4,
2137 aom_highbd_10_obmc_sub_pixel_variance16x4)
2138
2139 HIGHBD_OBFP(BLOCK_4X16, aom_highbd_obmc_sad4x16_bits10,
2140 aom_highbd_10_obmc_variance4x16,
2141 aom_highbd_10_obmc_sub_pixel_variance4x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002142 break;
2143
Yaowu Xuf883b422016-08-30 14:01:10 -07002144 case AOM_BITS_12:
Cheng Chenbf3d4962017-11-01 14:48:52 -07002145 HIGHBD_BFP(BLOCK_64X16, aom_highbd_sad64x16_bits12,
2146 aom_highbd_sad64x16_avg_bits12, aom_highbd_12_variance64x16,
2147 aom_highbd_12_sub_pixel_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002148 aom_highbd_12_sub_pixel_avg_variance64x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002149 aom_highbd_sad64x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002150 aom_highbd_dist_wtd_sad64x16_avg_bits12,
2151 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002152
2153 HIGHBD_BFP(BLOCK_16X64, aom_highbd_sad16x64_bits12,
2154 aom_highbd_sad16x64_avg_bits12, aom_highbd_12_variance16x64,
2155 aom_highbd_12_sub_pixel_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002156 aom_highbd_12_sub_pixel_avg_variance16x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002157 aom_highbd_sad16x64x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002158 aom_highbd_dist_wtd_sad16x64_avg_bits12,
2159 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002160
2161 HIGHBD_BFP(BLOCK_32X8, aom_highbd_sad32x8_bits12,
2162 aom_highbd_sad32x8_avg_bits12, aom_highbd_12_variance32x8,
2163 aom_highbd_12_sub_pixel_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002164 aom_highbd_12_sub_pixel_avg_variance32x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002165 aom_highbd_sad32x8x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002166 aom_highbd_dist_wtd_sad32x8_avg_bits12,
2167 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002168
2169 HIGHBD_BFP(BLOCK_8X32, aom_highbd_sad8x32_bits12,
2170 aom_highbd_sad8x32_avg_bits12, aom_highbd_12_variance8x32,
2171 aom_highbd_12_sub_pixel_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002172 aom_highbd_12_sub_pixel_avg_variance8x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002173 aom_highbd_sad8x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002174 aom_highbd_dist_wtd_sad8x32_avg_bits12,
2175 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002176
2177 HIGHBD_BFP(BLOCK_16X4, aom_highbd_sad16x4_bits12,
2178 aom_highbd_sad16x4_avg_bits12, aom_highbd_12_variance16x4,
2179 aom_highbd_12_sub_pixel_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002180 aom_highbd_12_sub_pixel_avg_variance16x4,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002181 aom_highbd_sad16x4x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002182 aom_highbd_dist_wtd_sad16x4_avg_bits12,
2183 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002184
2185 HIGHBD_BFP(BLOCK_4X16, aom_highbd_sad4x16_bits12,
2186 aom_highbd_sad4x16_avg_bits12, aom_highbd_12_variance4x16,
2187 aom_highbd_12_sub_pixel_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002188 aom_highbd_12_sub_pixel_avg_variance4x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002189 aom_highbd_sad4x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002190 aom_highbd_dist_wtd_sad4x16_avg_bits12,
2191 aom_highbd_12_dist_wtd_sub_pixel_avg_variance4x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002192
2193 HIGHBD_BFP(BLOCK_32X16, aom_highbd_sad32x16_bits12,
2194 aom_highbd_sad32x16_avg_bits12, aom_highbd_12_variance32x16,
2195 aom_highbd_12_sub_pixel_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002196 aom_highbd_12_sub_pixel_avg_variance32x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002197 aom_highbd_sad32x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002198 aom_highbd_dist_wtd_sad32x16_avg_bits12,
2199 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002200
2201 HIGHBD_BFP(BLOCK_16X32, aom_highbd_sad16x32_bits12,
2202 aom_highbd_sad16x32_avg_bits12, aom_highbd_12_variance16x32,
2203 aom_highbd_12_sub_pixel_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002204 aom_highbd_12_sub_pixel_avg_variance16x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002205 aom_highbd_sad16x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002206 aom_highbd_dist_wtd_sad16x32_avg_bits12,
2207 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002208
2209 HIGHBD_BFP(BLOCK_64X32, aom_highbd_sad64x32_bits12,
2210 aom_highbd_sad64x32_avg_bits12, aom_highbd_12_variance64x32,
2211 aom_highbd_12_sub_pixel_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002212 aom_highbd_12_sub_pixel_avg_variance64x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002213 aom_highbd_sad64x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002214 aom_highbd_dist_wtd_sad64x32_avg_bits12,
2215 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002216
2217 HIGHBD_BFP(BLOCK_32X64, aom_highbd_sad32x64_bits12,
2218 aom_highbd_sad32x64_avg_bits12, aom_highbd_12_variance32x64,
2219 aom_highbd_12_sub_pixel_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002220 aom_highbd_12_sub_pixel_avg_variance32x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002221 aom_highbd_sad32x64x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002222 aom_highbd_dist_wtd_sad32x64_avg_bits12,
2223 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002224
2225 HIGHBD_BFP(BLOCK_32X32, aom_highbd_sad32x32_bits12,
2226 aom_highbd_sad32x32_avg_bits12, aom_highbd_12_variance32x32,
2227 aom_highbd_12_sub_pixel_variance32x32,
2228 aom_highbd_12_sub_pixel_avg_variance32x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002229 aom_highbd_sad32x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002230 aom_highbd_dist_wtd_sad32x32_avg_bits12,
2231 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002232
2233 HIGHBD_BFP(BLOCK_64X64, aom_highbd_sad64x64_bits12,
2234 aom_highbd_sad64x64_avg_bits12, aom_highbd_12_variance64x64,
2235 aom_highbd_12_sub_pixel_variance64x64,
2236 aom_highbd_12_sub_pixel_avg_variance64x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002237 aom_highbd_sad64x64x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002238 aom_highbd_dist_wtd_sad64x64_avg_bits12,
2239 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002240
2241 HIGHBD_BFP(BLOCK_16X16, aom_highbd_sad16x16_bits12,
2242 aom_highbd_sad16x16_avg_bits12, aom_highbd_12_variance16x16,
2243 aom_highbd_12_sub_pixel_variance16x16,
2244 aom_highbd_12_sub_pixel_avg_variance16x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002245 aom_highbd_sad16x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002246 aom_highbd_dist_wtd_sad16x16_avg_bits12,
2247 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002248
2249 HIGHBD_BFP(BLOCK_16X8, aom_highbd_sad16x8_bits12,
2250 aom_highbd_sad16x8_avg_bits12, aom_highbd_12_variance16x8,
2251 aom_highbd_12_sub_pixel_variance16x8,
2252 aom_highbd_12_sub_pixel_avg_variance16x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002253 aom_highbd_sad16x8x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002254 aom_highbd_dist_wtd_sad16x8_avg_bits12,
2255 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002256
2257 HIGHBD_BFP(BLOCK_8X16, aom_highbd_sad8x16_bits12,
2258 aom_highbd_sad8x16_avg_bits12, aom_highbd_12_variance8x16,
2259 aom_highbd_12_sub_pixel_variance8x16,
2260 aom_highbd_12_sub_pixel_avg_variance8x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002261 aom_highbd_sad8x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002262 aom_highbd_dist_wtd_sad8x16_avg_bits12,
2263 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002264
2265 HIGHBD_BFP(
2266 BLOCK_8X8, aom_highbd_sad8x8_bits12, aom_highbd_sad8x8_avg_bits12,
2267 aom_highbd_12_variance8x8, aom_highbd_12_sub_pixel_variance8x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002268 aom_highbd_12_sub_pixel_avg_variance8x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002269 aom_highbd_sad8x8x4d_bits12, aom_highbd_dist_wtd_sad8x8_avg_bits12,
2270 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002271
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002272 HIGHBD_BFP(
2273 BLOCK_8X4, aom_highbd_sad8x4_bits12, aom_highbd_sad8x4_avg_bits12,
2274 aom_highbd_12_variance8x4, aom_highbd_12_sub_pixel_variance8x4,
2275 aom_highbd_12_sub_pixel_avg_variance8x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002276 aom_highbd_sad8x4x4d_bits12, aom_highbd_dist_wtd_sad8x4_avg_bits12,
2277 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002278
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002279 HIGHBD_BFP(
2280 BLOCK_4X8, aom_highbd_sad4x8_bits12, aom_highbd_sad4x8_avg_bits12,
2281 aom_highbd_12_variance4x8, aom_highbd_12_sub_pixel_variance4x8,
2282 aom_highbd_12_sub_pixel_avg_variance4x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002283 aom_highbd_sad4x8x4d_bits12, aom_highbd_dist_wtd_sad4x8_avg_bits12,
2284 aom_highbd_12_dist_wtd_sub_pixel_avg_variance4x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002285
2286 HIGHBD_BFP(
2287 BLOCK_4X4, aom_highbd_sad4x4_bits12, aom_highbd_sad4x4_avg_bits12,
2288 aom_highbd_12_variance4x4, aom_highbd_12_sub_pixel_variance4x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002289 aom_highbd_12_sub_pixel_avg_variance4x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002290 aom_highbd_sad4x4x4d_bits12, aom_highbd_dist_wtd_sad4x4_avg_bits12,
2291 aom_highbd_12_dist_wtd_sub_pixel_avg_variance4x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002292
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002293 HIGHBD_BFP(BLOCK_128X128, aom_highbd_sad128x128_bits12,
2294 aom_highbd_sad128x128_avg_bits12,
2295 aom_highbd_12_variance128x128,
2296 aom_highbd_12_sub_pixel_variance128x128,
2297 aom_highbd_12_sub_pixel_avg_variance128x128,
2298 aom_highbd_sad128x128x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002299 aom_highbd_dist_wtd_sad128x128_avg_bits12,
2300 aom_highbd_12_dist_wtd_sub_pixel_avg_variance128x128);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002301
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002302 HIGHBD_BFP(BLOCK_128X64, aom_highbd_sad128x64_bits12,
2303 aom_highbd_sad128x64_avg_bits12,
2304 aom_highbd_12_variance128x64,
2305 aom_highbd_12_sub_pixel_variance128x64,
2306 aom_highbd_12_sub_pixel_avg_variance128x64,
2307 aom_highbd_sad128x64x4d_bits12,
2308 aom_highbd_dist_wtd_sad128x64_avg_bits12,
2309 aom_highbd_12_dist_wtd_sub_pixel_avg_variance128x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002310
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002311 HIGHBD_BFP(BLOCK_64X128, aom_highbd_sad64x128_bits12,
2312 aom_highbd_sad64x128_avg_bits12,
2313 aom_highbd_12_variance64x128,
2314 aom_highbd_12_sub_pixel_variance64x128,
2315 aom_highbd_12_sub_pixel_avg_variance64x128,
2316 aom_highbd_sad64x128x4d_bits12,
2317 aom_highbd_dist_wtd_sad64x128_avg_bits12,
2318 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x128);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002319
David Barkerf19f35f2017-05-22 16:33:22 +01002320 HIGHBD_MBFP(BLOCK_128X128, aom_highbd_masked_sad128x128_bits12,
2321 aom_highbd_12_masked_sub_pixel_variance128x128)
2322 HIGHBD_MBFP(BLOCK_128X64, aom_highbd_masked_sad128x64_bits12,
2323 aom_highbd_12_masked_sub_pixel_variance128x64)
2324 HIGHBD_MBFP(BLOCK_64X128, aom_highbd_masked_sad64x128_bits12,
2325 aom_highbd_12_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01002326 HIGHBD_MBFP(BLOCK_64X64, aom_highbd_masked_sad64x64_bits12,
2327 aom_highbd_12_masked_sub_pixel_variance64x64)
2328 HIGHBD_MBFP(BLOCK_64X32, aom_highbd_masked_sad64x32_bits12,
2329 aom_highbd_12_masked_sub_pixel_variance64x32)
2330 HIGHBD_MBFP(BLOCK_32X64, aom_highbd_masked_sad32x64_bits12,
2331 aom_highbd_12_masked_sub_pixel_variance32x64)
2332 HIGHBD_MBFP(BLOCK_32X32, aom_highbd_masked_sad32x32_bits12,
2333 aom_highbd_12_masked_sub_pixel_variance32x32)
2334 HIGHBD_MBFP(BLOCK_32X16, aom_highbd_masked_sad32x16_bits12,
2335 aom_highbd_12_masked_sub_pixel_variance32x16)
2336 HIGHBD_MBFP(BLOCK_16X32, aom_highbd_masked_sad16x32_bits12,
2337 aom_highbd_12_masked_sub_pixel_variance16x32)
2338 HIGHBD_MBFP(BLOCK_16X16, aom_highbd_masked_sad16x16_bits12,
2339 aom_highbd_12_masked_sub_pixel_variance16x16)
2340 HIGHBD_MBFP(BLOCK_8X16, aom_highbd_masked_sad8x16_bits12,
2341 aom_highbd_12_masked_sub_pixel_variance8x16)
2342 HIGHBD_MBFP(BLOCK_16X8, aom_highbd_masked_sad16x8_bits12,
2343 aom_highbd_12_masked_sub_pixel_variance16x8)
2344 HIGHBD_MBFP(BLOCK_8X8, aom_highbd_masked_sad8x8_bits12,
2345 aom_highbd_12_masked_sub_pixel_variance8x8)
2346 HIGHBD_MBFP(BLOCK_4X8, aom_highbd_masked_sad4x8_bits12,
2347 aom_highbd_12_masked_sub_pixel_variance4x8)
2348 HIGHBD_MBFP(BLOCK_8X4, aom_highbd_masked_sad8x4_bits12,
2349 aom_highbd_12_masked_sub_pixel_variance8x4)
2350 HIGHBD_MBFP(BLOCK_4X4, aom_highbd_masked_sad4x4_bits12,
2351 aom_highbd_12_masked_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002352 HIGHBD_MBFP(BLOCK_64X16, aom_highbd_masked_sad64x16_bits12,
2353 aom_highbd_12_masked_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002354 HIGHBD_MBFP(BLOCK_16X64, aom_highbd_masked_sad16x64_bits12,
2355 aom_highbd_12_masked_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002356 HIGHBD_MBFP(BLOCK_32X8, aom_highbd_masked_sad32x8_bits12,
2357 aom_highbd_12_masked_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002358 HIGHBD_MBFP(BLOCK_8X32, aom_highbd_masked_sad8x32_bits12,
2359 aom_highbd_12_masked_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002360 HIGHBD_MBFP(BLOCK_16X4, aom_highbd_masked_sad16x4_bits12,
2361 aom_highbd_12_masked_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002362 HIGHBD_MBFP(BLOCK_4X16, aom_highbd_masked_sad4x16_bits12,
2363 aom_highbd_12_masked_sub_pixel_variance4x16)
Yaowu Xuf883b422016-08-30 14:01:10 -07002364 HIGHBD_OBFP(BLOCK_128X128, aom_highbd_obmc_sad128x128_bits12,
2365 aom_highbd_12_obmc_variance128x128,
2366 aom_highbd_12_obmc_sub_pixel_variance128x128)
2367 HIGHBD_OBFP(BLOCK_128X64, aom_highbd_obmc_sad128x64_bits12,
2368 aom_highbd_12_obmc_variance128x64,
2369 aom_highbd_12_obmc_sub_pixel_variance128x64)
2370 HIGHBD_OBFP(BLOCK_64X128, aom_highbd_obmc_sad64x128_bits12,
2371 aom_highbd_12_obmc_variance64x128,
2372 aom_highbd_12_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07002373 HIGHBD_OBFP(BLOCK_64X64, aom_highbd_obmc_sad64x64_bits12,
2374 aom_highbd_12_obmc_variance64x64,
2375 aom_highbd_12_obmc_sub_pixel_variance64x64)
2376 HIGHBD_OBFP(BLOCK_64X32, aom_highbd_obmc_sad64x32_bits12,
2377 aom_highbd_12_obmc_variance64x32,
2378 aom_highbd_12_obmc_sub_pixel_variance64x32)
2379 HIGHBD_OBFP(BLOCK_32X64, aom_highbd_obmc_sad32x64_bits12,
2380 aom_highbd_12_obmc_variance32x64,
2381 aom_highbd_12_obmc_sub_pixel_variance32x64)
2382 HIGHBD_OBFP(BLOCK_32X32, aom_highbd_obmc_sad32x32_bits12,
2383 aom_highbd_12_obmc_variance32x32,
2384 aom_highbd_12_obmc_sub_pixel_variance32x32)
2385 HIGHBD_OBFP(BLOCK_32X16, aom_highbd_obmc_sad32x16_bits12,
2386 aom_highbd_12_obmc_variance32x16,
2387 aom_highbd_12_obmc_sub_pixel_variance32x16)
2388 HIGHBD_OBFP(BLOCK_16X32, aom_highbd_obmc_sad16x32_bits12,
2389 aom_highbd_12_obmc_variance16x32,
2390 aom_highbd_12_obmc_sub_pixel_variance16x32)
2391 HIGHBD_OBFP(BLOCK_16X16, aom_highbd_obmc_sad16x16_bits12,
2392 aom_highbd_12_obmc_variance16x16,
2393 aom_highbd_12_obmc_sub_pixel_variance16x16)
2394 HIGHBD_OBFP(BLOCK_8X16, aom_highbd_obmc_sad8x16_bits12,
2395 aom_highbd_12_obmc_variance8x16,
2396 aom_highbd_12_obmc_sub_pixel_variance8x16)
2397 HIGHBD_OBFP(BLOCK_16X8, aom_highbd_obmc_sad16x8_bits12,
2398 aom_highbd_12_obmc_variance16x8,
2399 aom_highbd_12_obmc_sub_pixel_variance16x8)
2400 HIGHBD_OBFP(BLOCK_8X8, aom_highbd_obmc_sad8x8_bits12,
2401 aom_highbd_12_obmc_variance8x8,
2402 aom_highbd_12_obmc_sub_pixel_variance8x8)
2403 HIGHBD_OBFP(BLOCK_4X8, aom_highbd_obmc_sad4x8_bits12,
2404 aom_highbd_12_obmc_variance4x8,
2405 aom_highbd_12_obmc_sub_pixel_variance4x8)
2406 HIGHBD_OBFP(BLOCK_8X4, aom_highbd_obmc_sad8x4_bits12,
2407 aom_highbd_12_obmc_variance8x4,
2408 aom_highbd_12_obmc_sub_pixel_variance8x4)
2409 HIGHBD_OBFP(BLOCK_4X4, aom_highbd_obmc_sad4x4_bits12,
2410 aom_highbd_12_obmc_variance4x4,
2411 aom_highbd_12_obmc_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002412 HIGHBD_OBFP(BLOCK_64X16, aom_highbd_obmc_sad64x16_bits12,
2413 aom_highbd_12_obmc_variance64x16,
2414 aom_highbd_12_obmc_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002415 HIGHBD_OBFP(BLOCK_16X64, aom_highbd_obmc_sad16x64_bits12,
2416 aom_highbd_12_obmc_variance16x64,
2417 aom_highbd_12_obmc_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002418 HIGHBD_OBFP(BLOCK_32X8, aom_highbd_obmc_sad32x8_bits12,
2419 aom_highbd_12_obmc_variance32x8,
2420 aom_highbd_12_obmc_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002421 HIGHBD_OBFP(BLOCK_8X32, aom_highbd_obmc_sad8x32_bits12,
2422 aom_highbd_12_obmc_variance8x32,
2423 aom_highbd_12_obmc_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002424 HIGHBD_OBFP(BLOCK_16X4, aom_highbd_obmc_sad16x4_bits12,
2425 aom_highbd_12_obmc_variance16x4,
2426 aom_highbd_12_obmc_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002427 HIGHBD_OBFP(BLOCK_4X16, aom_highbd_obmc_sad4x16_bits12,
2428 aom_highbd_12_obmc_variance4x16,
2429 aom_highbd_12_obmc_sub_pixel_variance4x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002430 break;
2431
2432 default:
2433 assert(0 &&
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002434 "cm->seq_params.bit_depth should be AOM_BITS_8, "
Yaowu Xuf883b422016-08-30 14:01:10 -07002435 "AOM_BITS_10 or AOM_BITS_12");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002436 }
2437 }
2438}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002439
Yaowu Xuf883b422016-08-30 14:01:10 -07002440static void realloc_segmentation_maps(AV1_COMP *cpi) {
2441 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002442
2443 // Create the encoder segmentation map and set all entries to 0
Yaowu Xuf883b422016-08-30 14:01:10 -07002444 aom_free(cpi->segmentation_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002445 CHECK_MEM_ERROR(cm, cpi->segmentation_map,
Yaowu Xuf883b422016-08-30 14:01:10 -07002446 aom_calloc(cm->mi_rows * cm->mi_cols, 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002447
2448 // Create a map used for cyclic background refresh.
Yaowu Xuf883b422016-08-30 14:01:10 -07002449 if (cpi->cyclic_refresh) av1_cyclic_refresh_free(cpi->cyclic_refresh);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002450 CHECK_MEM_ERROR(cm, cpi->cyclic_refresh,
Yaowu Xuf883b422016-08-30 14:01:10 -07002451 av1_cyclic_refresh_alloc(cm->mi_rows, cm->mi_cols));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002452
2453 // Create a map used to mark inactive areas.
Yaowu Xuf883b422016-08-30 14:01:10 -07002454 aom_free(cpi->active_map.map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002455 CHECK_MEM_ERROR(cm, cpi->active_map.map,
Yaowu Xuf883b422016-08-30 14:01:10 -07002456 aom_calloc(cm->mi_rows * cm->mi_cols, 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002457}
2458
Yaowu Xuf883b422016-08-30 14:01:10 -07002459void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
2460 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002461 SequenceHeader *const seq_params = &cm->seq_params;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002462 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002463 RATE_CONTROL *const rc = &cpi->rc;
hui sud9a812b2017-07-06 14:34:37 -07002464 MACROBLOCK *const x = &cpi->td.mb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002465
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002466 if (seq_params->profile != oxcf->profile) seq_params->profile = oxcf->profile;
2467 seq_params->bit_depth = oxcf->bit_depth;
2468 seq_params->color_primaries = oxcf->color_primaries;
2469 seq_params->transfer_characteristics = oxcf->transfer_characteristics;
2470 seq_params->matrix_coefficients = oxcf->matrix_coefficients;
2471 seq_params->monochrome = oxcf->monochrome;
2472 seq_params->chroma_sample_position = oxcf->chroma_sample_position;
2473 seq_params->color_range = oxcf->color_range;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002474
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002475 assert(IMPLIES(seq_params->profile <= PROFILE_1,
2476 seq_params->bit_depth <= AOM_BITS_10));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002477
Andrey Norkin28e9ce22018-01-08 10:11:21 -08002478 cm->timing_info_present = oxcf->timing_info_present;
Andrey Norkin795ba872018-03-06 13:24:14 -08002479 cm->timing_info.num_units_in_display_tick =
2480 oxcf->timing_info.num_units_in_display_tick;
2481 cm->timing_info.time_scale = oxcf->timing_info.time_scale;
2482 cm->timing_info.equal_picture_interval =
2483 oxcf->timing_info.equal_picture_interval;
2484 cm->timing_info.num_ticks_per_picture =
2485 oxcf->timing_info.num_ticks_per_picture;
2486
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002487 seq_params->display_model_info_present_flag =
Andrey Norkin26495512018-06-20 17:13:11 -07002488 oxcf->display_model_info_present_flag;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002489 seq_params->decoder_model_info_present_flag =
Adrian Grangec56f6ec2018-05-31 14:19:32 -07002490 oxcf->decoder_model_info_present_flag;
Andrey Norkin795ba872018-03-06 13:24:14 -08002491 if (oxcf->decoder_model_info_present_flag) {
Andrey Norkin26495512018-06-20 17:13:11 -07002492 // set the decoder model parameters in schedule mode
Andrey Norkin795ba872018-03-06 13:24:14 -08002493 cm->buffer_model.num_units_in_decoding_tick =
2494 oxcf->buffer_model.num_units_in_decoding_tick;
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07002495 cm->buffer_removal_time_present = 1;
Andrey Norkin795ba872018-03-06 13:24:14 -08002496 set_aom_dec_model_info(&cm->buffer_model);
Andrey Norkin26495512018-06-20 17:13:11 -07002497 set_dec_model_op_parameters(&cm->op_params[0]);
2498 } else if (cm->timing_info_present &&
2499 cm->timing_info.equal_picture_interval &&
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002500 !seq_params->decoder_model_info_present_flag) {
Andrey Norkin26495512018-06-20 17:13:11 -07002501 // set the decoder model parameters in resource availability mode
2502 set_resource_availability_parameters(&cm->op_params[0]);
Andrey Norkinc7511de2018-06-22 12:31:06 -07002503 } else {
2504 cm->op_params[0].initial_display_delay =
2505 10; // Default value (not signaled)
Andrey Norkin795ba872018-03-06 13:24:14 -08002506 }
Andrey Norkin28e9ce22018-01-08 10:11:21 -08002507
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002508 update_film_grain_parameters(cpi, oxcf);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002509
Yaowu Xuc27fc142016-08-22 16:08:15 -07002510 cpi->oxcf = *oxcf;
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +01002511 cpi->common.options = oxcf->cfg;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002512 x->e_mbd.bd = (int)seq_params->bit_depth;
hui sud9a812b2017-07-06 14:34:37 -07002513 x->e_mbd.global_motion = cm->global_motion;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002514
Yaowu Xuf883b422016-08-30 14:01:10 -07002515 if ((oxcf->pass == 0) && (oxcf->rc_mode == AOM_Q)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002516 rc->baseline_gf_interval = FIXED_GF_INTERVAL;
2517 } else {
2518 rc->baseline_gf_interval = (MIN_GF_INTERVAL + MAX_GF_INTERVAL) / 2;
2519 }
2520
2521 cpi->refresh_last_frame = 1;
2522 cpi->refresh_golden_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002523 cpi->refresh_bwd_ref_frame = 0;
Zoe Liue9b15e22017-07-19 15:53:01 -07002524 cpi->refresh_alt2_ref_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002525
Debargha Mukherjee229fdc82018-03-10 07:45:33 -08002526 cm->refresh_frame_context = (oxcf->frame_parallel_decoding_mode)
2527 ? REFRESH_FRAME_CONTEXT_DISABLED
2528 : REFRESH_FRAME_CONTEXT_BACKWARD;
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01002529 if (oxcf->large_scale_tile)
James Zernf34dfc82018-02-23 16:53:33 -08002530 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_DISABLED;
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01002531
Alex Converse74ad0912017-07-18 10:22:58 -07002532 if (x->palette_buffer == NULL) {
hui sud9a812b2017-07-06 14:34:37 -07002533 CHECK_MEM_ERROR(cm, x->palette_buffer,
2534 aom_memalign(16, sizeof(*x->palette_buffer)));
2535 }
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002536
2537 if (x->tmp_conv_dst == NULL) {
2538 CHECK_MEM_ERROR(
2539 cm, x->tmp_conv_dst,
2540 aom_memalign(32, MAX_SB_SIZE * MAX_SB_SIZE * sizeof(*x->tmp_conv_dst)));
Urvang Joshie58f6ec2018-09-10 15:10:12 -07002541 x->e_mbd.tmp_conv_dst = x->tmp_conv_dst;
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002542 }
2543 for (int i = 0; i < 2; ++i) {
2544 if (x->tmp_obmc_bufs[i] == NULL) {
2545 CHECK_MEM_ERROR(cm, x->tmp_obmc_bufs[i],
wenyao.liu22d8ab32018-10-16 09:11:29 +08002546 aom_memalign(32, 2 * MAX_MB_PLANE * MAX_SB_SQUARE *
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002547 sizeof(*x->tmp_obmc_bufs[i])));
Urvang Joshie58f6ec2018-09-10 15:10:12 -07002548 x->e_mbd.tmp_obmc_bufs[i] = x->tmp_obmc_bufs[i];
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002549 }
2550 }
2551
Yaowu Xuf883b422016-08-30 14:01:10 -07002552 av1_reset_segment_features(cm);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07002553 set_high_precision_mv(cpi, 1, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002554
Yaowu Xuc27fc142016-08-22 16:08:15 -07002555 set_rc_buffer_sizes(rc, &cpi->oxcf);
2556
2557 // Under a configuration change, where maximum_buffer_size may change,
2558 // keep buffer level clipped to the maximum allowed buffer size.
Yaowu Xuf883b422016-08-30 14:01:10 -07002559 rc->bits_off_target = AOMMIN(rc->bits_off_target, rc->maximum_buffer_size);
2560 rc->buffer_level = AOMMIN(rc->buffer_level, rc->maximum_buffer_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002561
2562 // Set up frame rate and related parameters rate control values.
Yaowu Xuf883b422016-08-30 14:01:10 -07002563 av1_new_framerate(cpi, cpi->framerate);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002564
2565 // Set absolute upper and lower quality limits
2566 rc->worst_quality = cpi->oxcf.worst_allowed_q;
2567 rc->best_quality = cpi->oxcf.best_allowed_q;
2568
Urvang Joshib55cb5e2018-09-12 14:50:21 -07002569 cm->interp_filter = oxcf->large_scale_tile ? EIGHTTAP_REGULAR : SWITCHABLE;
Yue Chen5380cb52018-02-23 15:33:21 -08002570 cm->switchable_motion_mode = 1;
2571
Yaowu Xuc27fc142016-08-22 16:08:15 -07002572 if (cpi->oxcf.render_width > 0 && cpi->oxcf.render_height > 0) {
2573 cm->render_width = cpi->oxcf.render_width;
2574 cm->render_height = cpi->oxcf.render_height;
2575 } else {
2576 cm->render_width = cpi->oxcf.width;
2577 cm->render_height = cpi->oxcf.height;
2578 }
2579 cm->width = cpi->oxcf.width;
2580 cm->height = cpi->oxcf.height;
2581
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002582 int sb_size = seq_params->sb_size;
Urvang Joshie4530f82018-01-09 11:43:37 -08002583 // Superblock size should not be updated after the first key frame.
2584 if (!cpi->seq_params_locked) {
2585 set_sb_size(&cm->seq_params, select_sb_size(cpi));
2586 }
Dominic Symes917d6c02017-10-11 18:00:52 +02002587
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002588 if (cpi->initial_width || sb_size != seq_params->sb_size) {
Dominic Symes917d6c02017-10-11 18:00:52 +02002589 if (cm->width > cpi->initial_width || cm->height > cpi->initial_height ||
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002590 seq_params->sb_size != sb_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002591 av1_free_context_buffers(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002592 av1_free_pc_tree(&cpi->td, num_planes);
Cheng Chen46f30c72017-09-07 11:13:33 -07002593 alloc_compressor_data(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002594 realloc_segmentation_maps(cpi);
2595 cpi->initial_width = cpi->initial_height = 0;
2596 }
2597 }
2598 update_frame_size(cpi);
2599
2600 cpi->alt_ref_source = NULL;
2601 rc->is_src_frame_alt_ref = 0;
2602
Yaowu Xuc27fc142016-08-22 16:08:15 -07002603 rc->is_bwd_ref_frame = 0;
2604 rc->is_last_bipred_frame = 0;
2605 rc->is_bipred_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002606
Yaowu Xuc27fc142016-08-22 16:08:15 -07002607 set_tile_info(cpi);
2608
2609 cpi->ext_refresh_frame_flags_pending = 0;
2610 cpi->ext_refresh_frame_context_pending = 0;
2611
Yaowu Xuc27fc142016-08-22 16:08:15 -07002612 highbd_set_var_fns(cpi);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002613
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07002614 // Init sequence level coding tools
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07002615 // This should not be called after the first key frame.
2616 if (!cpi->seq_params_locked) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002617 seq_params->operating_points_cnt_minus_1 =
Adrian Grangec56f6ec2018-05-31 14:19:32 -07002618 cm->number_spatial_layers > 1 ? cm->number_spatial_layers - 1 : 0;
Andrey Norkin26495512018-06-20 17:13:11 -07002619 init_seq_coding_tools(&cm->seq_params, cm, oxcf);
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07002620 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002621}
2622
Yaowu Xuf883b422016-08-30 14:01:10 -07002623AV1_COMP *av1_create_compressor(AV1EncoderConfig *oxcf,
2624 BufferPool *const pool) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002625 unsigned int i;
Yaowu Xuf883b422016-08-30 14:01:10 -07002626 AV1_COMP *volatile const cpi = aom_memalign(32, sizeof(AV1_COMP));
2627 AV1_COMMON *volatile const cm = cpi != NULL ? &cpi->common : NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002628
2629 if (!cm) return NULL;
2630
Yaowu Xuf883b422016-08-30 14:01:10 -07002631 av1_zero(*cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002632
Wan-Teh Changa2fad3e2018-07-19 16:55:19 -07002633 // The jmp_buf is valid only for the duration of the function that calls
2634 // setjmp(). Therefore, this function must reset the 'setjmp' field to 0
2635 // before it returns.
Yaowu Xuc27fc142016-08-22 16:08:15 -07002636 if (setjmp(cm->error.jmp)) {
2637 cm->error.setjmp = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002638 av1_remove_compressor(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002639 return 0;
2640 }
2641
2642 cm->error.setjmp = 1;
Cheng Chen46f30c72017-09-07 11:13:33 -07002643 cm->alloc_mi = enc_alloc_mi;
2644 cm->free_mi = enc_free_mi;
2645 cm->setup_mi = enc_setup_mi;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002646
Angie Chianga5d96c42016-10-21 16:16:56 -07002647 CHECK_MEM_ERROR(cm, cm->fc,
2648 (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->fc)));
David Turner1bcefb32018-11-19 17:54:00 +00002649 CHECK_MEM_ERROR(
2650 cm, cm->default_frame_context,
2651 (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->default_frame_context)));
Angie Chianga5d96c42016-10-21 16:16:56 -07002652 memset(cm->fc, 0, sizeof(*cm->fc));
David Turner1bcefb32018-11-19 17:54:00 +00002653 memset(cm->default_frame_context, 0, sizeof(*cm->default_frame_context));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002654
2655 cpi->resize_state = 0;
2656 cpi->resize_avg_qp = 0;
2657 cpi->resize_buffer_underflow = 0;
Fergus Simpsonddc846e2017-04-24 18:09:13 -07002658
Yaowu Xuc27fc142016-08-22 16:08:15 -07002659 cpi->common.buffer_pool = pool;
2660
2661 init_config(cpi, oxcf);
Yaowu Xuf883b422016-08-30 14:01:10 -07002662 av1_rc_init(&cpi->oxcf, oxcf->pass, &cpi->rc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002663
David Turnerd2a592e2018-11-16 14:59:31 +00002664 cm->current_frame.frame_number = 0;
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07002665 cpi->seq_params_locked = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002666 cpi->partition_search_skippable_frame = 0;
2667 cpi->tile_data = NULL;
David Turnere7ebf902018-12-04 14:04:55 +00002668 cpi->last_show_frame_buf = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002669 realloc_segmentation_maps(cpi);
2670
Jingning Hanf050fc12018-03-09 14:53:33 -08002671 memset(cpi->nmv_costs, 0, sizeof(cpi->nmv_costs));
2672 memset(cpi->nmv_costs_hp, 0, sizeof(cpi->nmv_costs_hp));
James Zern01a9d702017-08-25 19:09:33 +00002673
Yaowu Xuc27fc142016-08-22 16:08:15 -07002674 for (i = 0; i < (sizeof(cpi->mbgraph_stats) / sizeof(cpi->mbgraph_stats[0]));
2675 i++) {
2676 CHECK_MEM_ERROR(
2677 cm, cpi->mbgraph_stats[i].mb_stats,
Yaowu Xuf883b422016-08-30 14:01:10 -07002678 aom_calloc(cm->MBs * sizeof(*cpi->mbgraph_stats[i].mb_stats), 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002679 }
2680
2681#if CONFIG_FP_MB_STATS
2682 cpi->use_fp_mb_stats = 0;
2683 if (cpi->use_fp_mb_stats) {
2684 // a place holder used to store the first pass mb stats in the first pass
2685 CHECK_MEM_ERROR(cm, cpi->twopass.frame_mb_stats_buf,
Yaowu Xuf883b422016-08-30 14:01:10 -07002686 aom_calloc(cm->MBs * sizeof(uint8_t), 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002687 } else {
2688 cpi->twopass.frame_mb_stats_buf = NULL;
2689 }
2690#endif
2691
2692 cpi->refresh_alt_ref_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002693
2694 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
2695#if CONFIG_INTERNAL_STATS
2696 cpi->b_calculate_blockiness = 1;
2697 cpi->b_calculate_consistency = 1;
2698 cpi->total_inconsistency = 0;
2699 cpi->psnr.worst = 100.0;
2700 cpi->worst_ssim = 100.0;
2701
2702 cpi->count = 0;
2703 cpi->bytes = 0;
Debargha Mukherjee0857e662019-01-04 16:22:09 -08002704#if CONFIG_SPEED_STATS
2705 cpi->tx_search_count = 0;
2706#endif // CONFIG_SPEED_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002707
2708 if (cpi->b_calculate_psnr) {
2709 cpi->total_sq_error = 0;
2710 cpi->total_samples = 0;
2711 cpi->tot_recode_hits = 0;
2712 cpi->summed_quality = 0;
2713 cpi->summed_weights = 0;
2714 }
2715
2716 cpi->fastssim.worst = 100.0;
2717 cpi->psnrhvs.worst = 100.0;
2718
2719 if (cpi->b_calculate_blockiness) {
2720 cpi->total_blockiness = 0;
2721 cpi->worst_blockiness = 0.0;
2722 }
2723
2724 if (cpi->b_calculate_consistency) {
2725 CHECK_MEM_ERROR(cm, cpi->ssim_vars,
Yaowu Xuf883b422016-08-30 14:01:10 -07002726 aom_malloc(sizeof(*cpi->ssim_vars) * 4 *
Yaowu Xuc27fc142016-08-22 16:08:15 -07002727 cpi->common.mi_rows * cpi->common.mi_cols));
2728 cpi->worst_consistency = 100.0;
2729 }
2730#endif
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08002731#if CONFIG_ENTROPY_STATS
2732 av1_zero(aggregate_fc);
2733#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002734
2735 cpi->first_time_stamp_ever = INT64_MAX;
2736
Jingning Hanf050fc12018-03-09 14:53:33 -08002737 cpi->td.mb.nmvcost[0] = &cpi->nmv_costs[0][MV_MAX];
2738 cpi->td.mb.nmvcost[1] = &cpi->nmv_costs[1][MV_MAX];
2739 cpi->td.mb.nmvcost_hp[0] = &cpi->nmv_costs_hp[0][MV_MAX];
2740 cpi->td.mb.nmvcost_hp[1] = &cpi->nmv_costs_hp[1][MV_MAX];
James Zern01a9d702017-08-25 19:09:33 +00002741
Yaowu Xuc27fc142016-08-22 16:08:15 -07002742#ifdef OUTPUT_YUV_SKINMAP
2743 yuv_skinmap_file = fopen("skinmap.yuv", "ab");
2744#endif
2745#ifdef OUTPUT_YUV_REC
2746 yuv_rec_file = fopen("rec.yuv", "wb");
2747#endif
2748
Yaowu Xuc27fc142016-08-22 16:08:15 -07002749 if (oxcf->pass == 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002750 av1_init_first_pass(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002751 } else if (oxcf->pass == 2) {
2752 const size_t packet_sz = sizeof(FIRSTPASS_STATS);
2753 const int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
2754
2755#if CONFIG_FP_MB_STATS
2756 if (cpi->use_fp_mb_stats) {
2757 const size_t psz = cpi->common.MBs * sizeof(uint8_t);
2758 const int ps = (int)(oxcf->firstpass_mb_stats_in.sz / psz);
2759
2760 cpi->twopass.firstpass_mb_stats.mb_stats_start =
2761 oxcf->firstpass_mb_stats_in.buf;
2762 cpi->twopass.firstpass_mb_stats.mb_stats_end =
2763 cpi->twopass.firstpass_mb_stats.mb_stats_start +
2764 (ps - 1) * cpi->common.MBs * sizeof(uint8_t);
2765 }
2766#endif
2767
2768 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
2769 cpi->twopass.stats_in = cpi->twopass.stats_in_start;
2770 cpi->twopass.stats_in_end = &cpi->twopass.stats_in[packets - 1];
2771
Yaowu Xuf883b422016-08-30 14:01:10 -07002772 av1_init_second_pass(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002773 }
2774
Jingning Hand064cf02017-06-01 10:00:39 -07002775 CHECK_MEM_ERROR(
2776 cm, cpi->td.mb.above_pred_buf,
Yue Chen1a799252018-03-01 16:47:41 -08002777 (uint8_t *)aom_memalign(16, MAX_MB_PLANE * MAX_SB_SQUARE *
Johannb0ef6ff2018-02-08 14:32:21 -08002778 sizeof(*cpi->td.mb.above_pred_buf)));
Jingning Hand064cf02017-06-01 10:00:39 -07002779 CHECK_MEM_ERROR(
2780 cm, cpi->td.mb.left_pred_buf,
Yue Chen1a799252018-03-01 16:47:41 -08002781 (uint8_t *)aom_memalign(16, MAX_MB_PLANE * MAX_SB_SQUARE *
Johannb0ef6ff2018-02-08 14:32:21 -08002782 sizeof(*cpi->td.mb.left_pred_buf)));
Jingning Hand064cf02017-06-01 10:00:39 -07002783
2784 CHECK_MEM_ERROR(cm, cpi->td.mb.wsrc_buf,
2785 (int32_t *)aom_memalign(
2786 16, MAX_SB_SQUARE * sizeof(*cpi->td.mb.wsrc_buf)));
2787
Ravi Chaudhary5d970f42018-09-25 11:25:32 +05302788#if CONFIG_COLLECT_INTER_MODE_RD_STATS
2789 CHECK_MEM_ERROR(
2790 cm, cpi->td.mb.inter_modes_info,
2791 (InterModesInfo *)aom_malloc(sizeof(*cpi->td.mb.inter_modes_info)));
2792#endif
2793
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05302794 for (int x = 0; x < 2; x++)
2795 for (int y = 0; y < 2; y++)
2796 CHECK_MEM_ERROR(
2797 cm, cpi->td.mb.hash_value_buffer[x][y],
2798 (uint32_t *)aom_malloc(AOM_BUFFER_SIZE_FOR_BLOCK_HASH *
2799 sizeof(*cpi->td.mb.hash_value_buffer[0][0])));
2800
2801 cpi->td.mb.g_crc_initialized = 0;
2802
Jingning Hand064cf02017-06-01 10:00:39 -07002803 CHECK_MEM_ERROR(cm, cpi->td.mb.mask_buf,
2804 (int32_t *)aom_memalign(
2805 16, MAX_SB_SQUARE * sizeof(*cpi->td.mb.mask_buf)));
2806
Yaowu Xuf883b422016-08-30 14:01:10 -07002807 av1_set_speed_features_framesize_independent(cpi);
2808 av1_set_speed_features_framesize_dependent(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002809
Yue Chen7cae98f2018-08-24 10:43:16 -07002810 for (int frame = 0; frame < MAX_LAG_BUFFERS; ++frame) {
2811 int mi_cols = ALIGN_POWER_OF_TWO(cm->mi_cols, MAX_MIB_SIZE_LOG2);
2812 int mi_rows = ALIGN_POWER_OF_TWO(cm->mi_rows, MAX_MIB_SIZE_LOG2);
2813
2814 CHECK_MEM_ERROR(cm, cpi->tpl_stats[frame].tpl_stats_ptr,
2815 aom_calloc(mi_rows * mi_cols,
2816 sizeof(*cpi->tpl_stats[frame].tpl_stats_ptr)));
2817 cpi->tpl_stats[frame].is_valid = 0;
2818 cpi->tpl_stats[frame].width = mi_cols;
2819 cpi->tpl_stats[frame].height = mi_rows;
2820 cpi->tpl_stats[frame].stride = mi_cols;
2821 cpi->tpl_stats[frame].mi_rows = cm->mi_rows;
2822 cpi->tpl_stats[frame].mi_cols = cm->mi_cols;
2823 }
2824
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002825#define BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX4DF, JSDAF, JSVAF) \
2826 cpi->fn_ptr[BT].sdf = SDF; \
2827 cpi->fn_ptr[BT].sdaf = SDAF; \
2828 cpi->fn_ptr[BT].vf = VF; \
2829 cpi->fn_ptr[BT].svf = SVF; \
2830 cpi->fn_ptr[BT].svaf = SVAF; \
2831 cpi->fn_ptr[BT].sdx4df = SDX4DF; \
2832 cpi->fn_ptr[BT].jsdaf = JSDAF; \
Cheng Chenf78632e2017-10-20 15:30:51 -07002833 cpi->fn_ptr[BT].jsvaf = JSVAF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002834
Cheng Chenf78632e2017-10-20 15:30:51 -07002835 BFP(BLOCK_4X16, aom_sad4x16, aom_sad4x16_avg, aom_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002836 aom_sub_pixel_variance4x16, aom_sub_pixel_avg_variance4x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002837 aom_sad4x16x4d, aom_dist_wtd_sad4x16_avg,
2838 aom_dist_wtd_sub_pixel_avg_variance4x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002839
2840 BFP(BLOCK_16X4, aom_sad16x4, aom_sad16x4_avg, aom_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002841 aom_sub_pixel_variance16x4, aom_sub_pixel_avg_variance16x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002842 aom_sad16x4x4d, aom_dist_wtd_sad16x4_avg,
2843 aom_dist_wtd_sub_pixel_avg_variance16x4)
Cheng Chenf78632e2017-10-20 15:30:51 -07002844
2845 BFP(BLOCK_8X32, aom_sad8x32, aom_sad8x32_avg, aom_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002846 aom_sub_pixel_variance8x32, aom_sub_pixel_avg_variance8x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002847 aom_sad8x32x4d, aom_dist_wtd_sad8x32_avg,
2848 aom_dist_wtd_sub_pixel_avg_variance8x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002849
2850 BFP(BLOCK_32X8, aom_sad32x8, aom_sad32x8_avg, aom_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002851 aom_sub_pixel_variance32x8, aom_sub_pixel_avg_variance32x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002852 aom_sad32x8x4d, aom_dist_wtd_sad32x8_avg,
2853 aom_dist_wtd_sub_pixel_avg_variance32x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002854
2855 BFP(BLOCK_16X64, aom_sad16x64, aom_sad16x64_avg, aom_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002856 aom_sub_pixel_variance16x64, aom_sub_pixel_avg_variance16x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002857 aom_sad16x64x4d, aom_dist_wtd_sad16x64_avg,
2858 aom_dist_wtd_sub_pixel_avg_variance16x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002859
2860 BFP(BLOCK_64X16, aom_sad64x16, aom_sad64x16_avg, aom_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002861 aom_sub_pixel_variance64x16, aom_sub_pixel_avg_variance64x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002862 aom_sad64x16x4d, aom_dist_wtd_sad64x16_avg,
2863 aom_dist_wtd_sub_pixel_avg_variance64x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002864
Cheng Chenf78632e2017-10-20 15:30:51 -07002865 BFP(BLOCK_128X128, aom_sad128x128, aom_sad128x128_avg, aom_variance128x128,
2866 aom_sub_pixel_variance128x128, aom_sub_pixel_avg_variance128x128,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002867 aom_sad128x128x4d, aom_dist_wtd_sad128x128_avg,
2868 aom_dist_wtd_sub_pixel_avg_variance128x128)
Cheng Chenf78632e2017-10-20 15:30:51 -07002869
2870 BFP(BLOCK_128X64, aom_sad128x64, aom_sad128x64_avg, aom_variance128x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002871 aom_sub_pixel_variance128x64, aom_sub_pixel_avg_variance128x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002872 aom_sad128x64x4d, aom_dist_wtd_sad128x64_avg,
2873 aom_dist_wtd_sub_pixel_avg_variance128x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002874
2875 BFP(BLOCK_64X128, aom_sad64x128, aom_sad64x128_avg, aom_variance64x128,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002876 aom_sub_pixel_variance64x128, aom_sub_pixel_avg_variance64x128,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002877 aom_sad64x128x4d, aom_dist_wtd_sad64x128_avg,
2878 aom_dist_wtd_sub_pixel_avg_variance64x128)
Cheng Chenf78632e2017-10-20 15:30:51 -07002879
2880 BFP(BLOCK_32X16, aom_sad32x16, aom_sad32x16_avg, aom_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002881 aom_sub_pixel_variance32x16, aom_sub_pixel_avg_variance32x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002882 aom_sad32x16x4d, aom_dist_wtd_sad32x16_avg,
2883 aom_dist_wtd_sub_pixel_avg_variance32x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002884
2885 BFP(BLOCK_16X32, aom_sad16x32, aom_sad16x32_avg, aom_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002886 aom_sub_pixel_variance16x32, aom_sub_pixel_avg_variance16x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002887 aom_sad16x32x4d, aom_dist_wtd_sad16x32_avg,
2888 aom_dist_wtd_sub_pixel_avg_variance16x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002889
2890 BFP(BLOCK_64X32, aom_sad64x32, aom_sad64x32_avg, aom_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002891 aom_sub_pixel_variance64x32, aom_sub_pixel_avg_variance64x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002892 aom_sad64x32x4d, aom_dist_wtd_sad64x32_avg,
2893 aom_dist_wtd_sub_pixel_avg_variance64x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002894
2895 BFP(BLOCK_32X64, aom_sad32x64, aom_sad32x64_avg, aom_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002896 aom_sub_pixel_variance32x64, aom_sub_pixel_avg_variance32x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002897 aom_sad32x64x4d, aom_dist_wtd_sad32x64_avg,
2898 aom_dist_wtd_sub_pixel_avg_variance32x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002899
2900 BFP(BLOCK_32X32, aom_sad32x32, aom_sad32x32_avg, aom_variance32x32,
2901 aom_sub_pixel_variance32x32, aom_sub_pixel_avg_variance32x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002902 aom_sad32x32x4d, aom_dist_wtd_sad32x32_avg,
2903 aom_dist_wtd_sub_pixel_avg_variance32x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002904
2905 BFP(BLOCK_64X64, aom_sad64x64, aom_sad64x64_avg, aom_variance64x64,
2906 aom_sub_pixel_variance64x64, aom_sub_pixel_avg_variance64x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002907 aom_sad64x64x4d, aom_dist_wtd_sad64x64_avg,
2908 aom_dist_wtd_sub_pixel_avg_variance64x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002909
2910 BFP(BLOCK_16X16, aom_sad16x16, aom_sad16x16_avg, aom_variance16x16,
2911 aom_sub_pixel_variance16x16, aom_sub_pixel_avg_variance16x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002912 aom_sad16x16x4d, aom_dist_wtd_sad16x16_avg,
2913 aom_dist_wtd_sub_pixel_avg_variance16x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002914
2915 BFP(BLOCK_16X8, aom_sad16x8, aom_sad16x8_avg, aom_variance16x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002916 aom_sub_pixel_variance16x8, aom_sub_pixel_avg_variance16x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002917 aom_sad16x8x4d, aom_dist_wtd_sad16x8_avg,
2918 aom_dist_wtd_sub_pixel_avg_variance16x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002919
2920 BFP(BLOCK_8X16, aom_sad8x16, aom_sad8x16_avg, aom_variance8x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002921 aom_sub_pixel_variance8x16, aom_sub_pixel_avg_variance8x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002922 aom_sad8x16x4d, aom_dist_wtd_sad8x16_avg,
2923 aom_dist_wtd_sub_pixel_avg_variance8x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002924
2925 BFP(BLOCK_8X8, aom_sad8x8, aom_sad8x8_avg, aom_variance8x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002926 aom_sub_pixel_variance8x8, aom_sub_pixel_avg_variance8x8, aom_sad8x8x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002927 aom_dist_wtd_sad8x8_avg, aom_dist_wtd_sub_pixel_avg_variance8x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002928
2929 BFP(BLOCK_8X4, aom_sad8x4, aom_sad8x4_avg, aom_variance8x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002930 aom_sub_pixel_variance8x4, aom_sub_pixel_avg_variance8x4, aom_sad8x4x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002931 aom_dist_wtd_sad8x4_avg, aom_dist_wtd_sub_pixel_avg_variance8x4)
Cheng Chenf78632e2017-10-20 15:30:51 -07002932
2933 BFP(BLOCK_4X8, aom_sad4x8, aom_sad4x8_avg, aom_variance4x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002934 aom_sub_pixel_variance4x8, aom_sub_pixel_avg_variance4x8, aom_sad4x8x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002935 aom_dist_wtd_sad4x8_avg, aom_dist_wtd_sub_pixel_avg_variance4x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002936
2937 BFP(BLOCK_4X4, aom_sad4x4, aom_sad4x4_avg, aom_variance4x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002938 aom_sub_pixel_variance4x4, aom_sub_pixel_avg_variance4x4, aom_sad4x4x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002939 aom_dist_wtd_sad4x4_avg, aom_dist_wtd_sub_pixel_avg_variance4x4)
Cheng Chenf78632e2017-10-20 15:30:51 -07002940
Yaowu Xuc27fc142016-08-22 16:08:15 -07002941#define OBFP(BT, OSDF, OVF, OSVF) \
2942 cpi->fn_ptr[BT].osdf = OSDF; \
2943 cpi->fn_ptr[BT].ovf = OVF; \
2944 cpi->fn_ptr[BT].osvf = OSVF;
2945
Yaowu Xuf883b422016-08-30 14:01:10 -07002946 OBFP(BLOCK_128X128, aom_obmc_sad128x128, aom_obmc_variance128x128,
2947 aom_obmc_sub_pixel_variance128x128)
2948 OBFP(BLOCK_128X64, aom_obmc_sad128x64, aom_obmc_variance128x64,
2949 aom_obmc_sub_pixel_variance128x64)
2950 OBFP(BLOCK_64X128, aom_obmc_sad64x128, aom_obmc_variance64x128,
2951 aom_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07002952 OBFP(BLOCK_64X64, aom_obmc_sad64x64, aom_obmc_variance64x64,
2953 aom_obmc_sub_pixel_variance64x64)
2954 OBFP(BLOCK_64X32, aom_obmc_sad64x32, aom_obmc_variance64x32,
2955 aom_obmc_sub_pixel_variance64x32)
2956 OBFP(BLOCK_32X64, aom_obmc_sad32x64, aom_obmc_variance32x64,
2957 aom_obmc_sub_pixel_variance32x64)
2958 OBFP(BLOCK_32X32, aom_obmc_sad32x32, aom_obmc_variance32x32,
2959 aom_obmc_sub_pixel_variance32x32)
2960 OBFP(BLOCK_32X16, aom_obmc_sad32x16, aom_obmc_variance32x16,
2961 aom_obmc_sub_pixel_variance32x16)
2962 OBFP(BLOCK_16X32, aom_obmc_sad16x32, aom_obmc_variance16x32,
2963 aom_obmc_sub_pixel_variance16x32)
2964 OBFP(BLOCK_16X16, aom_obmc_sad16x16, aom_obmc_variance16x16,
2965 aom_obmc_sub_pixel_variance16x16)
2966 OBFP(BLOCK_16X8, aom_obmc_sad16x8, aom_obmc_variance16x8,
2967 aom_obmc_sub_pixel_variance16x8)
2968 OBFP(BLOCK_8X16, aom_obmc_sad8x16, aom_obmc_variance8x16,
2969 aom_obmc_sub_pixel_variance8x16)
2970 OBFP(BLOCK_8X8, aom_obmc_sad8x8, aom_obmc_variance8x8,
2971 aom_obmc_sub_pixel_variance8x8)
2972 OBFP(BLOCK_4X8, aom_obmc_sad4x8, aom_obmc_variance4x8,
2973 aom_obmc_sub_pixel_variance4x8)
2974 OBFP(BLOCK_8X4, aom_obmc_sad8x4, aom_obmc_variance8x4,
2975 aom_obmc_sub_pixel_variance8x4)
2976 OBFP(BLOCK_4X4, aom_obmc_sad4x4, aom_obmc_variance4x4,
2977 aom_obmc_sub_pixel_variance4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002978 OBFP(BLOCK_4X16, aom_obmc_sad4x16, aom_obmc_variance4x16,
2979 aom_obmc_sub_pixel_variance4x16)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002980 OBFP(BLOCK_16X4, aom_obmc_sad16x4, aom_obmc_variance16x4,
2981 aom_obmc_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002982 OBFP(BLOCK_8X32, aom_obmc_sad8x32, aom_obmc_variance8x32,
2983 aom_obmc_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002984 OBFP(BLOCK_32X8, aom_obmc_sad32x8, aom_obmc_variance32x8,
2985 aom_obmc_sub_pixel_variance32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002986 OBFP(BLOCK_16X64, aom_obmc_sad16x64, aom_obmc_variance16x64,
2987 aom_obmc_sub_pixel_variance16x64)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002988 OBFP(BLOCK_64X16, aom_obmc_sad64x16, aom_obmc_variance64x16,
2989 aom_obmc_sub_pixel_variance64x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002990
David Barkerf19f35f2017-05-22 16:33:22 +01002991#define MBFP(BT, MCSDF, MCSVF) \
2992 cpi->fn_ptr[BT].msdf = MCSDF; \
2993 cpi->fn_ptr[BT].msvf = MCSVF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002994
David Barkerf19f35f2017-05-22 16:33:22 +01002995 MBFP(BLOCK_128X128, aom_masked_sad128x128,
2996 aom_masked_sub_pixel_variance128x128)
2997 MBFP(BLOCK_128X64, aom_masked_sad128x64, aom_masked_sub_pixel_variance128x64)
2998 MBFP(BLOCK_64X128, aom_masked_sad64x128, aom_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01002999 MBFP(BLOCK_64X64, aom_masked_sad64x64, aom_masked_sub_pixel_variance64x64)
3000 MBFP(BLOCK_64X32, aom_masked_sad64x32, aom_masked_sub_pixel_variance64x32)
3001 MBFP(BLOCK_32X64, aom_masked_sad32x64, aom_masked_sub_pixel_variance32x64)
3002 MBFP(BLOCK_32X32, aom_masked_sad32x32, aom_masked_sub_pixel_variance32x32)
3003 MBFP(BLOCK_32X16, aom_masked_sad32x16, aom_masked_sub_pixel_variance32x16)
3004 MBFP(BLOCK_16X32, aom_masked_sad16x32, aom_masked_sub_pixel_variance16x32)
3005 MBFP(BLOCK_16X16, aom_masked_sad16x16, aom_masked_sub_pixel_variance16x16)
3006 MBFP(BLOCK_16X8, aom_masked_sad16x8, aom_masked_sub_pixel_variance16x8)
3007 MBFP(BLOCK_8X16, aom_masked_sad8x16, aom_masked_sub_pixel_variance8x16)
3008 MBFP(BLOCK_8X8, aom_masked_sad8x8, aom_masked_sub_pixel_variance8x8)
3009 MBFP(BLOCK_4X8, aom_masked_sad4x8, aom_masked_sub_pixel_variance4x8)
3010 MBFP(BLOCK_8X4, aom_masked_sad8x4, aom_masked_sub_pixel_variance8x4)
3011 MBFP(BLOCK_4X4, aom_masked_sad4x4, aom_masked_sub_pixel_variance4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01003012
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01003013 MBFP(BLOCK_4X16, aom_masked_sad4x16, aom_masked_sub_pixel_variance4x16)
3014
3015 MBFP(BLOCK_16X4, aom_masked_sad16x4, aom_masked_sub_pixel_variance16x4)
3016
3017 MBFP(BLOCK_8X32, aom_masked_sad8x32, aom_masked_sub_pixel_variance8x32)
3018
3019 MBFP(BLOCK_32X8, aom_masked_sad32x8, aom_masked_sub_pixel_variance32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01003020
3021 MBFP(BLOCK_16X64, aom_masked_sad16x64, aom_masked_sub_pixel_variance16x64)
3022
3023 MBFP(BLOCK_64X16, aom_masked_sad64x16, aom_masked_sub_pixel_variance64x16)
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01003024
Yaowu Xuc27fc142016-08-22 16:08:15 -07003025 highbd_set_var_fns(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003026
Yaowu Xuf883b422016-08-30 14:01:10 -07003027 /* av1_init_quantizer() is first called here. Add check in
3028 * av1_frame_init_quantizer() so that av1_init_quantizer is only
Yaowu Xuc27fc142016-08-22 16:08:15 -07003029 * called later when needed. This will avoid unnecessary calls of
Yaowu Xuf883b422016-08-30 14:01:10 -07003030 * av1_init_quantizer() for every frame.
Yaowu Xuc27fc142016-08-22 16:08:15 -07003031 */
Yaowu Xuf883b422016-08-30 14:01:10 -07003032 av1_init_quantizer(cpi);
Zoe Liud902b742018-02-19 17:02:41 -08003033 av1_qm_init(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003034
Yaowu Xuf883b422016-08-30 14:01:10 -07003035 av1_loop_filter_init(cm);
Urvang Joshide71d142017-10-05 12:12:15 -07003036 cm->superres_scale_denominator = SCALE_NUMERATOR;
Debargha Mukherjee29e40a62017-06-14 09:37:12 -07003037 cm->superres_upscaled_width = oxcf->width;
3038 cm->superres_upscaled_height = oxcf->height;
Yaowu Xuf883b422016-08-30 14:01:10 -07003039 av1_loop_restoration_precal();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003040
3041 cm->error.setjmp = 0;
3042
3043 return cpi;
3044}
3045
Urvang Joshiee2c8112018-05-04 14:53:15 -07003046#if CONFIG_INTERNAL_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003047#define SNPRINT(H, T) snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T))
3048
3049#define SNPRINT2(H, T, V) \
3050 snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T), (V))
Urvang Joshiee2c8112018-05-04 14:53:15 -07003051#endif // CONFIG_INTERNAL_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003052
Yaowu Xuf883b422016-08-30 14:01:10 -07003053void av1_remove_compressor(AV1_COMP *cpi) {
3054 AV1_COMMON *cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003055 unsigned int i;
3056 int t;
3057
3058 if (!cpi) return;
3059
3060 cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003061 const int num_planes = av1_num_planes(cm);
3062
David Turnerd2a592e2018-11-16 14:59:31 +00003063 if (cm->current_frame.frame_number > 0) {
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08003064#if CONFIG_ENTROPY_STATS
3065 if (cpi->oxcf.pass != 1) {
3066 fprintf(stderr, "Writing counts.stt\n");
3067 FILE *f = fopen("counts.stt", "wb");
3068 fwrite(&aggregate_fc, sizeof(aggregate_fc), 1, f);
3069 fclose(f);
3070 }
3071#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003072#if CONFIG_INTERNAL_STATS
Yaowu Xuf883b422016-08-30 14:01:10 -07003073 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003074
3075 if (cpi->oxcf.pass != 1) {
3076 char headings[512] = { 0 };
3077 char results[512] = { 0 };
3078 FILE *f = fopen("opsnr.stt", "a");
3079 double time_encoded =
3080 (cpi->last_end_time_stamp_seen - cpi->first_time_stamp_ever) /
3081 10000000.000;
3082 double total_encode_time =
3083 (cpi->time_receive_data + cpi->time_compress_data) / 1000.000;
3084 const double dr =
3085 (double)cpi->bytes * (double)8 / (double)1000 / time_encoded;
3086 const double peak = (double)((1 << cpi->oxcf.input_bit_depth) - 1);
3087 const double target_rate = (double)cpi->oxcf.target_bandwidth / 1000;
3088 const double rate_err = ((100.0 * (dr - target_rate)) / target_rate);
3089
3090 if (cpi->b_calculate_psnr) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003091 const double total_psnr = aom_sse_to_psnr(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003092 (double)cpi->total_samples, peak, (double)cpi->total_sq_error);
3093 const double total_ssim =
3094 100 * pow(cpi->summed_quality / cpi->summed_weights, 8.0);
3095 snprintf(headings, sizeof(headings),
Jingning Han87651b22017-11-28 20:02:26 -08003096 "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\tGLPsnrP\t"
Yaowu Xuf883b422016-08-30 14:01:10 -07003097 "AOMSSIM\tVPSSIMP\tFASTSIM\tPSNRHVS\t"
Jingning Hanbe1ae3f2017-11-27 10:27:56 -08003098 "WstPsnr\tWstSsim\tWstFast\tWstHVS\t"
Jingning Han87651b22017-11-28 20:02:26 -08003099 "AVPsrnY\tAPsnrCb\tAPsnrCr");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003100 snprintf(results, sizeof(results),
3101 "%7.2f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
3102 "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
Jingning Hanbe1ae3f2017-11-27 10:27:56 -08003103 "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
Jingning Han87651b22017-11-28 20:02:26 -08003104 "%7.3f\t%7.3f\t%7.3f",
Wan-Teh Changc25c92a2018-04-23 15:04:14 -07003105 dr, cpi->psnr.stat[STAT_ALL] / cpi->count, total_psnr,
3106 cpi->psnr.stat[STAT_ALL] / cpi->count, total_psnr, total_ssim,
3107 total_ssim, cpi->fastssim.stat[STAT_ALL] / cpi->count,
3108 cpi->psnrhvs.stat[STAT_ALL] / cpi->count, cpi->psnr.worst,
Jingning Hanbe1ae3f2017-11-27 10:27:56 -08003109 cpi->worst_ssim, cpi->fastssim.worst, cpi->psnrhvs.worst,
Wan-Teh Changc25c92a2018-04-23 15:04:14 -07003110 cpi->psnr.stat[STAT_Y] / cpi->count,
3111 cpi->psnr.stat[STAT_U] / cpi->count,
3112 cpi->psnr.stat[STAT_V] / cpi->count);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003113
3114 if (cpi->b_calculate_blockiness) {
3115 SNPRINT(headings, "\t Block\tWstBlck");
3116 SNPRINT2(results, "\t%7.3f", cpi->total_blockiness / cpi->count);
3117 SNPRINT2(results, "\t%7.3f", cpi->worst_blockiness);
3118 }
3119
3120 if (cpi->b_calculate_consistency) {
3121 double consistency =
Yaowu Xuf883b422016-08-30 14:01:10 -07003122 aom_sse_to_psnr((double)cpi->total_samples, peak,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003123 (double)cpi->total_inconsistency);
3124
3125 SNPRINT(headings, "\tConsist\tWstCons");
3126 SNPRINT2(results, "\t%7.3f", consistency);
3127 SNPRINT2(results, "\t%7.3f", cpi->worst_consistency);
3128 }
Sarah Parkerf97b7862016-08-25 17:42:57 -07003129 fprintf(f, "%s\t Time\tRcErr\tAbsErr\n", headings);
3130 fprintf(f, "%s\t%8.0f\t%7.2f\t%7.2f\n", results, total_encode_time,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003131 rate_err, fabs(rate_err));
3132 }
3133
3134 fclose(f);
3135 }
Urvang Joshiee2c8112018-05-04 14:53:15 -07003136#endif // CONFIG_INTERNAL_STATS
Debargha Mukherjee0857e662019-01-04 16:22:09 -08003137#if CONFIG_SPEED_STATS
3138 if (cpi->oxcf.pass != 1) {
3139 fprintf(stdout, "tx_search_count = %d\n", cpi->tx_search_count);
3140 }
3141#endif // CONFIG_SPEED_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003142 }
3143
Yue Chen7cae98f2018-08-24 10:43:16 -07003144 for (int frame = 0; frame < MAX_LAG_BUFFERS; ++frame) {
3145 aom_free(cpi->tpl_stats[frame].tpl_stats_ptr);
3146 cpi->tpl_stats[frame].is_valid = 0;
3147 }
3148
Ravi Chaudhary1f58dd82018-12-07 17:24:15 +05303149 for (t = cpi->num_workers - 1; t >= 0; --t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003150 AVxWorker *const worker = &cpi->workers[t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003151 EncWorkerData *const thread_data = &cpi->tile_thr_data[t];
3152
3153 // Deallocate allocated threads.
Yaowu Xuf883b422016-08-30 14:01:10 -07003154 aom_get_worker_interface()->end(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003155
3156 // Deallocate allocated thread data.
Ravi Chaudhary1cf7d162018-10-09 17:00:43 +05303157 if (cpi->row_mt == 1) aom_free(thread_data->td->tctx);
Ravi Chaudhary1f58dd82018-12-07 17:24:15 +05303158 if (t > 0) {
hui sud9a812b2017-07-06 14:34:37 -07003159 aom_free(thread_data->td->palette_buffer);
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07003160 aom_free(thread_data->td->tmp_conv_dst);
3161 for (int j = 0; j < 2; ++j) {
3162 aom_free(thread_data->td->tmp_obmc_bufs[j]);
3163 }
Jingning Hand064cf02017-06-01 10:00:39 -07003164 aom_free(thread_data->td->above_pred_buf);
3165 aom_free(thread_data->td->left_pred_buf);
3166 aom_free(thread_data->td->wsrc_buf);
wenyao.liu22d8ab32018-10-16 09:11:29 +08003167
Ravi Chaudhary5d970f42018-09-25 11:25:32 +05303168#if CONFIG_COLLECT_INTER_MODE_RD_STATS
3169 aom_free(thread_data->td->inter_modes_info);
3170#endif
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07003171 for (int x = 0; x < 2; x++) {
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05303172 for (int y = 0; y < 2; y++) {
3173 aom_free(thread_data->td->hash_value_buffer[x][y]);
3174 thread_data->td->hash_value_buffer[x][y] = NULL;
3175 }
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07003176 }
Jingning Hand064cf02017-06-01 10:00:39 -07003177 aom_free(thread_data->td->mask_buf);
Yaowu Xuf883b422016-08-30 14:01:10 -07003178 aom_free(thread_data->td->counts);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003179 av1_free_pc_tree(thread_data->td, num_planes);
Yaowu Xuf883b422016-08-30 14:01:10 -07003180 aom_free(thread_data->td);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003181 }
3182 }
Ravi Chaudhary90a15f42018-10-11 18:56:35 +05303183#if CONFIG_MULTITHREAD
3184 if (cpi->row_mt == 1) {
3185 if (cpi->row_mt_mutex_ != NULL) {
3186 pthread_mutex_destroy(cpi->row_mt_mutex_);
3187 aom_free(cpi->row_mt_mutex_);
3188 }
3189 }
3190#endif
Ravi Chaudharyc5e74692018-10-08 16:05:38 +05303191 av1_row_mt_mem_dealloc(cpi);
Yaowu Xuf883b422016-08-30 14:01:10 -07003192 aom_free(cpi->tile_thr_data);
3193 aom_free(cpi->workers);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003194
Deepa K G964e72e2018-05-16 16:56:01 +05303195 if (cpi->num_workers > 1) {
3196 av1_loop_filter_dealloc(&cpi->lf_row_sync);
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05303197 av1_loop_restoration_dealloc(&cpi->lr_row_sync, cpi->num_workers);
Deepa K G964e72e2018-05-16 16:56:01 +05303198 }
3199
Yaowu Xuc27fc142016-08-22 16:08:15 -07003200 dealloc_compressor_data(cpi);
3201
3202 for (i = 0; i < sizeof(cpi->mbgraph_stats) / sizeof(cpi->mbgraph_stats[0]);
3203 ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003204 aom_free(cpi->mbgraph_stats[i].mb_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003205 }
3206
3207#if CONFIG_FP_MB_STATS
3208 if (cpi->use_fp_mb_stats) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003209 aom_free(cpi->twopass.frame_mb_stats_buf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003210 cpi->twopass.frame_mb_stats_buf = NULL;
3211 }
3212#endif
Debargha Mukherjee5d157212017-01-10 14:44:47 -08003213#if CONFIG_INTERNAL_STATS
3214 aom_free(cpi->ssim_vars);
3215 cpi->ssim_vars = NULL;
3216#endif // CONFIG_INTERNAL_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003217
Yaowu Xuf883b422016-08-30 14:01:10 -07003218 av1_remove_common(cm);
RogerZhou80d52342017-11-20 10:56:26 -08003219 for (i = 0; i < FRAME_BUFFERS; ++i) {
3220 av1_hash_table_destroy(&cm->buffer_pool->frame_bufs[i].hash_table);
3221 }
Michelle Findlay-Olynykdea531d2017-12-13 14:10:56 -08003222 if (cpi->sf.use_hash_based_trellis) hbt_destroy();
Yaowu Xuf883b422016-08-30 14:01:10 -07003223 av1_free_ref_frame_buffers(cm->buffer_pool);
3224 aom_free(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003225
3226#ifdef OUTPUT_YUV_SKINMAP
3227 fclose(yuv_skinmap_file);
3228#endif
3229#ifdef OUTPUT_YUV_REC
3230 fclose(yuv_rec_file);
3231#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003232}
3233
Yaowu Xuf883b422016-08-30 14:01:10 -07003234static void generate_psnr_packet(AV1_COMP *cpi) {
3235 struct aom_codec_cx_pkt pkt;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003236 int i;
3237 PSNR_STATS psnr;
David Turnerc29e1a92018-12-06 14:10:14 +00003238 aom_calc_highbd_psnr(cpi->source, &cpi->common.cur_frame->buf, &psnr,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003239 cpi->td.mb.e_mbd.bd, cpi->oxcf.input_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003240
3241 for (i = 0; i < 4; ++i) {
3242 pkt.data.psnr.samples[i] = psnr.samples[i];
3243 pkt.data.psnr.sse[i] = psnr.sse[i];
3244 pkt.data.psnr.psnr[i] = psnr.psnr[i];
3245 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003246 pkt.kind = AOM_CODEC_PSNR_PKT;
3247 aom_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003248}
3249
Yaowu Xuf883b422016-08-30 14:01:10 -07003250int av1_use_as_reference(AV1_COMP *cpi, int ref_frame_flags) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003251 if (ref_frame_flags > ((1 << INTER_REFS_PER_FRAME) - 1)) return -1;
3252
Yunqing Wangf2e7a392017-11-08 00:27:21 -08003253 cpi->ext_ref_frame_flags = ref_frame_flags;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003254 return 0;
3255}
3256
Yunqing Wang9a50fec2017-11-02 17:02:00 -07003257void av1_update_reference(AV1_COMP *cpi, int ref_frame_upd_flags) {
3258 cpi->ext_refresh_last_frame = (ref_frame_upd_flags & AOM_LAST_FLAG) != 0;
3259 cpi->ext_refresh_golden_frame = (ref_frame_upd_flags & AOM_GOLD_FLAG) != 0;
3260 cpi->ext_refresh_alt_ref_frame = (ref_frame_upd_flags & AOM_ALT_FLAG) != 0;
3261 cpi->ext_refresh_bwd_ref_frame = (ref_frame_upd_flags & AOM_BWD_FLAG) != 0;
3262 cpi->ext_refresh_alt2_ref_frame = (ref_frame_upd_flags & AOM_ALT2_FLAG) != 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003263 cpi->ext_refresh_frame_flags_pending = 1;
3264}
3265
Thomas Daede497d1952017-08-08 17:33:06 -07003266int av1_copy_reference_enc(AV1_COMP *cpi, int idx, YV12_BUFFER_CONFIG *sd) {
3267 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003268 const int num_planes = av1_num_planes(cm);
Thomas Daede497d1952017-08-08 17:33:06 -07003269 YV12_BUFFER_CONFIG *cfg = get_ref_frame(cm, idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003270 if (cfg) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003271 aom_yv12_copy_frame(cfg, sd, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003272 return 0;
3273 } else {
3274 return -1;
3275 }
3276}
3277
Thomas Daede497d1952017-08-08 17:33:06 -07003278int av1_set_reference_enc(AV1_COMP *cpi, int idx, YV12_BUFFER_CONFIG *sd) {
3279 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003280 const int num_planes = av1_num_planes(cm);
Thomas Daede497d1952017-08-08 17:33:06 -07003281 YV12_BUFFER_CONFIG *cfg = get_ref_frame(cm, idx);
Yaowu Xuf883b422016-08-30 14:01:10 -07003282 if (cfg) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003283 aom_yv12_copy_frame(sd, cfg, num_planes);
Yaowu Xuf883b422016-08-30 14:01:10 -07003284 return 0;
3285 } else {
3286 return -1;
3287 }
3288}
3289
3290int av1_update_entropy(AV1_COMP *cpi, int update) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003291 cpi->ext_refresh_frame_context = update;
3292 cpi->ext_refresh_frame_context_pending = 1;
3293 return 0;
3294}
3295
3296#if defined(OUTPUT_YUV_DENOISED) || defined(OUTPUT_YUV_SKINMAP)
3297// The denoiser buffer is allocated as a YUV 440 buffer. This function writes it
3298// as YUV 420. We simply use the top-left pixels of the UV buffers, since we do
3299// not denoise the UV channels at this time. If ever we implement UV channel
3300// denoising we will have to modify this.
Yaowu Xuf883b422016-08-30 14:01:10 -07003301void aom_write_yuv_frame_420(YV12_BUFFER_CONFIG *s, FILE *f) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003302 uint8_t *src = s->y_buffer;
3303 int h = s->y_height;
3304
3305 do {
3306 fwrite(src, s->y_width, 1, f);
3307 src += s->y_stride;
3308 } while (--h);
3309
3310 src = s->u_buffer;
3311 h = s->uv_height;
3312
3313 do {
3314 fwrite(src, s->uv_width, 1, f);
3315 src += s->uv_stride;
3316 } while (--h);
3317
3318 src = s->v_buffer;
3319 h = s->uv_height;
3320
3321 do {
3322 fwrite(src, s->uv_width, 1, f);
3323 src += s->uv_stride;
3324 } while (--h);
3325}
3326#endif
3327
Yaowu Xuf883b422016-08-30 14:01:10 -07003328static void check_show_existing_frame(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003329 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
Yaowu Xuf883b422016-08-30 14:01:10 -07003330 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003331 const FRAME_UPDATE_TYPE next_frame_update_type =
3332 gf_group->update_type[gf_group->index];
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003333#if USE_SYMM_MULTI_LAYER
3334 const int which_arf = (cpi->new_bwdref_update_rule == 1)
3335 ? gf_group->arf_update_idx[gf_group->index] > 0
3336 : gf_group->arf_update_idx[gf_group->index];
3337#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003338 const int which_arf = gf_group->arf_update_idx[gf_group->index];
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003339#endif
Zoe Liu5fca7242016-10-10 17:18:57 -07003340
3341 if (cm->show_existing_frame == 1) {
3342 cm->show_existing_frame = 0;
3343 } else if (cpi->rc.is_last_bipred_frame) {
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003344#if USE_SYMM_MULTI_LAYER
3345 // NOTE: When new structure is used, every bwdref will have one overlay
3346 // frame. Therefore, there is no need to find out which frame to
3347 // show in advance.
3348 if (cpi->new_bwdref_update_rule == 0) {
3349#endif
3350 // NOTE: If the current frame is a last bi-predictive frame, it is
3351 // needed next to show the BWDREF_FRAME, which is pointed by
3352 // the last_fb_idxes[0] after reference frame buffer update
3353 cpi->rc.is_last_bipred_frame = 0;
3354 cm->show_existing_frame = 1;
David Turnera21966b2018-12-05 14:48:49 +00003355 cpi->existing_fb_idx_to_show = cm->remapped_ref_idx[0];
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003356#if USE_SYMM_MULTI_LAYER
3357 }
3358#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003359 } else if (cpi->is_arf_filter_off[which_arf] &&
3360 (next_frame_update_type == OVERLAY_UPDATE ||
3361 next_frame_update_type == INTNL_OVERLAY_UPDATE)) {
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003362#if USE_SYMM_MULTI_LAYER
3363 const int bwdref_to_show =
3364 (cpi->new_bwdref_update_rule == 1) ? BWDREF_FRAME : ALTREF2_FRAME;
3365#else
3366 const int bwdref_to_show = ALTREF2_FRAME;
3367#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003368 // Other parameters related to OVERLAY_UPDATE will be taken care of
Yaowu Xuf883b422016-08-30 14:01:10 -07003369 // in av1_rc_get_second_pass_params(cpi)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003370 cm->show_existing_frame = 1;
3371 cpi->rc.is_src_frame_alt_ref = 1;
Urvang Joshi4d9f15f2018-11-05 15:26:22 -08003372 cpi->existing_fb_idx_to_show =
3373 (next_frame_update_type == OVERLAY_UPDATE)
David Turnera21966b2018-12-05 14:48:49 +00003374 ? get_ref_frame_map_idx(cm, ALTREF_FRAME)
3375 : get_ref_frame_map_idx(cm, bwdref_to_show);
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003376#if USE_SYMM_MULTI_LAYER
3377 if (cpi->new_bwdref_update_rule == 0)
3378#endif
3379 cpi->is_arf_filter_off[which_arf] = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003380 }
3381 cpi->rc.is_src_frame_ext_arf = 0;
3382}
Yaowu Xuc27fc142016-08-22 16:08:15 -07003383
3384#ifdef OUTPUT_YUV_REC
Yaowu Xuf883b422016-08-30 14:01:10 -07003385void aom_write_one_yuv_frame(AV1_COMMON *cm, YV12_BUFFER_CONFIG *s) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003386 uint8_t *src = s->y_buffer;
3387 int h = cm->height;
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07003388 if (yuv_rec_file == NULL) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003389 if (s->flags & YV12_FLAG_HIGHBITDEPTH) {
3390 uint16_t *src16 = CONVERT_TO_SHORTPTR(s->y_buffer);
3391
3392 do {
3393 fwrite(src16, s->y_width, 2, yuv_rec_file);
3394 src16 += s->y_stride;
3395 } while (--h);
3396
3397 src16 = CONVERT_TO_SHORTPTR(s->u_buffer);
3398 h = s->uv_height;
3399
3400 do {
3401 fwrite(src16, s->uv_width, 2, yuv_rec_file);
3402 src16 += s->uv_stride;
3403 } while (--h);
3404
3405 src16 = CONVERT_TO_SHORTPTR(s->v_buffer);
3406 h = s->uv_height;
3407
3408 do {
3409 fwrite(src16, s->uv_width, 2, yuv_rec_file);
3410 src16 += s->uv_stride;
3411 } while (--h);
3412
3413 fflush(yuv_rec_file);
3414 return;
3415 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003416
3417 do {
3418 fwrite(src, s->y_width, 1, yuv_rec_file);
3419 src += s->y_stride;
3420 } while (--h);
3421
3422 src = s->u_buffer;
3423 h = s->uv_height;
3424
3425 do {
3426 fwrite(src, s->uv_width, 1, yuv_rec_file);
3427 src += s->uv_stride;
3428 } while (--h);
3429
3430 src = s->v_buffer;
3431 h = s->uv_height;
3432
3433 do {
3434 fwrite(src, s->uv_width, 1, yuv_rec_file);
3435 src += s->uv_stride;
3436 } while (--h);
3437
3438 fflush(yuv_rec_file);
3439}
3440#endif // OUTPUT_YUV_REC
3441
Debargha Mukherjee11f0e402017-03-29 07:42:40 -07003442#define GM_RECODE_LOOP_NUM4X4_FACTOR 192
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003443static int recode_loop_test_global_motion(AV1_COMP *cpi) {
3444 int i;
3445 int recode = 0;
Debargha Mukherjeea575d232017-04-28 17:46:47 -07003446 RD_COUNTS *const rdc = &cpi->td.rd_counts;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003447 AV1_COMMON *const cm = &cpi->common;
3448 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
3449 if (cm->global_motion[i].wmtype != IDENTITY &&
Debargha Mukherjeea575d232017-04-28 17:46:47 -07003450 rdc->global_motion_used[i] * GM_RECODE_LOOP_NUM4X4_FACTOR <
Debargha Mukherjee265db6d2017-03-28 11:15:27 -07003451 cpi->gmparams_cost[i]) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003452 cm->global_motion[i] = default_warp_params;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07003453 assert(cm->global_motion[i].wmtype == IDENTITY);
Debargha Mukherjee265db6d2017-03-28 11:15:27 -07003454 cpi->gmparams_cost[i] = 0;
David Barker43479c62016-11-30 10:34:20 +00003455 recode = 1;
Urvang Joshi02aade82017-12-18 17:18:16 -08003456 // TODO(sarahparker): The earlier condition for recoding here was:
3457 // "recode |= (rdc->global_motion_used[i] > 0);". Can we bring something
3458 // similar to that back to speed up global motion?
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003459 }
3460 }
3461 return recode;
3462}
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003463
Yaowu Xuc27fc142016-08-22 16:08:15 -07003464// Function to test for conditions that indicate we should loop
3465// back and recode a frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07003466static int recode_loop_test(AV1_COMP *cpi, int high_limit, int low_limit, int q,
3467 int maxq, int minq) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003468 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -07003469 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003470 const int frame_is_kfgfarf = frame_is_kf_gf_arf(cpi);
3471 int force_recode = 0;
3472
3473 if ((rc->projected_frame_size >= rc->max_frame_bandwidth) ||
3474 (cpi->sf.recode_loop == ALLOW_RECODE) ||
3475 (frame_is_kfgfarf && (cpi->sf.recode_loop == ALLOW_RECODE_KFARFGF))) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003476 // TODO(agrange) high_limit could be greater than the scale-down threshold.
3477 if ((rc->projected_frame_size > high_limit && q < maxq) ||
3478 (rc->projected_frame_size < low_limit && q > minq)) {
3479 force_recode = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003480 } else if (cpi->oxcf.rc_mode == AOM_CQ) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003481 // Deal with frame undershoot and whether or not we are
3482 // below the automatically set cq level.
3483 if (q > oxcf->cq_level &&
3484 rc->projected_frame_size < ((rc->this_frame_target * 7) >> 3)) {
3485 force_recode = 1;
3486 }
3487 }
3488 }
3489 return force_recode;
3490}
3491
Yaowu Xuc27fc142016-08-22 16:08:15 -07003492#define DUMP_REF_FRAME_IMAGES 0
3493
3494#if DUMP_REF_FRAME_IMAGES == 1
Yaowu Xuf883b422016-08-30 14:01:10 -07003495static int dump_one_image(AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003496 const YV12_BUFFER_CONFIG *const ref_buf,
3497 char *file_name) {
3498 int h;
3499 FILE *f_ref = NULL;
3500
3501 if (ref_buf == NULL) {
3502 printf("Frame data buffer is NULL.\n");
Yaowu Xuf883b422016-08-30 14:01:10 -07003503 return AOM_CODEC_MEM_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003504 }
3505
3506 if ((f_ref = fopen(file_name, "wb")) == NULL) {
3507 printf("Unable to open file %s to write.\n", file_name);
Yaowu Xuf883b422016-08-30 14:01:10 -07003508 return AOM_CODEC_MEM_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003509 }
3510
3511 // --- Y ---
3512 for (h = 0; h < cm->height; ++h) {
3513 fwrite(&ref_buf->y_buffer[h * ref_buf->y_stride], 1, cm->width, f_ref);
3514 }
3515 // --- U ---
3516 for (h = 0; h < (cm->height >> 1); ++h) {
3517 fwrite(&ref_buf->u_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
3518 f_ref);
3519 }
3520 // --- V ---
3521 for (h = 0; h < (cm->height >> 1); ++h) {
3522 fwrite(&ref_buf->v_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
3523 f_ref);
3524 }
3525
3526 fclose(f_ref);
3527
Yaowu Xuf883b422016-08-30 14:01:10 -07003528 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003529}
3530
Yaowu Xuf883b422016-08-30 14:01:10 -07003531static void dump_ref_frame_images(AV1_COMP *cpi) {
3532 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003533 MV_REFERENCE_FRAME ref_frame;
3534
3535 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3536 char file_name[256] = "";
3537 snprintf(file_name, sizeof(file_name), "/tmp/enc_F%d_ref_%d.yuv",
David Turnerd2a592e2018-11-16 14:59:31 +00003538 cm->current_frame.frame_number, ref_frame);
David Turnera21966b2018-12-05 14:48:49 +00003539 dump_one_image(cm, get_ref_frame_yv12_buf(cpi, ref_frame), file_name);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003540 }
3541}
3542#endif // DUMP_REF_FRAME_IMAGES == 1
3543
Yaowu Xuc27fc142016-08-22 16:08:15 -07003544// This function is used to shift the virtual indices of last reference frames
3545// as follows:
3546// LAST_FRAME -> LAST2_FRAME -> LAST3_FRAME
3547// when the LAST_FRAME is updated.
Yaowu Xuf883b422016-08-30 14:01:10 -07003548static INLINE void shift_last_ref_frames(AV1_COMP *cpi) {
Imdad Sardharwalladadaba62018-02-23 12:06:56 +00003549 // TODO(isbs): shift the scaled indices as well
Urvang Joshia130dcc2018-11-06 10:27:35 -08003550 for (int ref_frame = LAST3_FRAME; ref_frame > LAST_FRAME; --ref_frame) {
3551 const int ref_idx = ref_frame - LAST_FRAME;
David Turnera21966b2018-12-05 14:48:49 +00003552 cpi->common.remapped_ref_idx[ref_idx] =
3553 cpi->common.remapped_ref_idx[ref_idx - 1];
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003554
3555 if (!cpi->rc.is_src_frame_alt_ref) {
3556 memcpy(cpi->interp_filter_selected[ref_frame],
3557 cpi->interp_filter_selected[ref_frame - 1],
3558 sizeof(cpi->interp_filter_selected[ref_frame - 1]));
3559 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003560 }
3561}
Yaowu Xuc27fc142016-08-22 16:08:15 -07003562
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003563#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003564// This function is used to shift the virtual indices of bwd reference
3565// frames as follows:
3566// BWD_REF -> ALT2_REF -> EXT_REF
3567// to clear a space to store the closest bwdref
3568static INLINE void rshift_bwd_ref_frames(AV1_COMP *cpi) {
3569 // TODO(isbs): shift the scaled indices as well
Urvang Joshi03d8ebe2018-11-08 17:13:44 -08003570 static const int ordered_bwd[3] = { BWDREF_FRAME, ALTREF2_FRAME,
3571 EXTREF_FRAME };
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003572
3573 for (int i = 2; i > 0; --i) {
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003574 // [0] is allocated to the current coded frame, i.e. bwdref
3575 memcpy(cpi->interp_filter_selected[ordered_bwd[i]],
3576 cpi->interp_filter_selected[ordered_bwd[i - 1]],
3577 sizeof(cpi->interp_filter_selected[ordered_bwd[i - 1]]));
3578
David Turnera21966b2018-12-05 14:48:49 +00003579 cpi->common.remapped_ref_idx[ordered_bwd[i] - LAST_FRAME] =
3580 cpi->common.remapped_ref_idx[ordered_bwd[i - 1] - LAST_FRAME];
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003581 }
3582}
3583
3584// This function is used to shift the virtual indices of bwd reference
3585// frames as follows:
3586// BWD_REF <- ALT2_REF <- EXT_REF
3587// to update the bwd reference frame for coding the next frame.
3588static INLINE void lshift_bwd_ref_frames(AV1_COMP *cpi) {
3589 // TODO(isbs): shift the scaled indices as well
Urvang Joshi03d8ebe2018-11-08 17:13:44 -08003590 static const int ordered_bwd[3] = { BWDREF_FRAME, ALTREF2_FRAME,
3591 EXTREF_FRAME };
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003592
3593 for (int i = 0; i < 2; ++i) {
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003594 // [0] is allocated to the current coded frame, i.e. bwdref
3595 memcpy(cpi->interp_filter_selected[ordered_bwd[i]],
3596 cpi->interp_filter_selected[ordered_bwd[i + 1]],
3597 sizeof(cpi->interp_filter_selected[ordered_bwd[i + 1]]));
3598
David Turnera21966b2018-12-05 14:48:49 +00003599 cpi->common.remapped_ref_idx[ordered_bwd[i] - LAST_FRAME] =
3600 cpi->common.remapped_ref_idx[ordered_bwd[i + 1] - LAST_FRAME];
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003601 }
3602}
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003603#endif // USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003604
Zoe Liu8dd1c982017-09-11 10:14:35 -07003605static void update_reference_frames(AV1_COMP *cpi) {
3606 AV1_COMMON *const cm = &cpi->common;
3607
Yaowu Xuc27fc142016-08-22 16:08:15 -07003608 // NOTE: Save the new show frame buffer index for --test-code=warn, i.e.,
3609 // for the purpose to verify no mismatch between encoder and decoder.
David Turnere7ebf902018-12-04 14:04:55 +00003610 if (cm->show_frame) cpi->last_show_frame_buf = cm->cur_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003611
Sarah Parker33005522018-07-27 14:46:25 -07003612 // In the case of show_existing frame, we will not send fresh flag
3613 // to decoder. Any change in the reference frame buffer can be done by
3614 // switching the virtual indices.
3615 if (cm->show_existing_frame) {
Sarah Parker29147cf2018-10-16 20:34:51 -07003616 // If we are not indicating to the decoder that this frame is
3617 // a show_existing_frame, which occurs in error_resilient mode,
Sarah Parkera9e19052018-10-18 17:49:26 -07003618 // we still want to refresh the LAST_FRAME when the current frame
3619 // was the source of an ext_arf.
3620 cpi->refresh_last_frame =
3621 !encode_show_existing_frame(cm) && cpi->rc.is_src_frame_ext_arf;
Sarah Parker33005522018-07-27 14:46:25 -07003622 cpi->refresh_golden_frame = 0;
3623 cpi->refresh_bwd_ref_frame = 0;
3624 cpi->refresh_alt2_ref_frame = 0;
3625 cpi->refresh_alt_ref_frame = 0;
3626
3627 cpi->rc.is_bwd_ref_frame = 0;
3628 cpi->rc.is_last_bipred_frame = 0;
3629 cpi->rc.is_bipred_frame = 0;
3630 }
3631
Yaowu Xuc27fc142016-08-22 16:08:15 -07003632 // At this point the new frame has been encoded.
3633 // If any buffer copy / swapping is signaled it should be done here.
Zoe Liubcef1e62018-04-06 20:56:11 -07003634
Sarah Parkerb9041612018-05-22 19:06:47 -07003635 // Only update all of the reference buffers if a KEY_FRAME is also a
3636 // show_frame. This ensures a fwd keyframe does not update all of the buffers
David Turnerd2a592e2018-11-16 14:59:31 +00003637 if ((cm->current_frame.frame_type == KEY_FRAME && cm->show_frame) ||
3638 frame_is_sframe(cm)) {
Zoe Liubcef1e62018-04-06 20:56:11 -07003639 for (int ref_frame = 0; ref_frame < REF_FRAMES; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00003640 assign_frame_buffer_p(&cm->ref_frame_map[cm->remapped_ref_idx[ref_frame]],
3641 cm->cur_frame);
Zoe Liubcef1e62018-04-06 20:56:11 -07003642 }
3643 return;
3644 }
3645
3646 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003647 // We have decided to preserve the previously existing golden frame as our
3648 // new ARF frame. However, in the short term in function
Yaowu Xuf883b422016-08-30 14:01:10 -07003649 // av1_bitstream.c::get_refresh_mask() we left it in the GF slot and, if
Yaowu Xuc27fc142016-08-22 16:08:15 -07003650 // we're updating the GF with the current decoded frame, we save it to the
3651 // ARF slot instead.
3652 // We now have to update the ARF with the current frame and swap gld_fb_idx
3653 // and alt_fb_idx so that, overall, we've stored the old GF in the new ARF
3654 // slot and, if we're updating the GF, the current frame becomes the new GF.
3655 int tmp;
3656
Wei-Ting Lina8c02452018-08-13 11:04:06 -07003657 // ARF in general is a better reference than overlay. We shouldkeep ARF as
3658 // reference instead of replacing it with overlay.
3659
3660 if (!cpi->preserve_arf_as_gld) {
David Turnere7ebf902018-12-04 14:04:55 +00003661 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003662 &cm->ref_frame_map[get_ref_frame_map_idx(cm, ALTREF_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003663 cm->cur_frame);
Wei-Ting Lina8c02452018-08-13 11:04:06 -07003664 }
3665
David Turnera21966b2018-12-05 14:48:49 +00003666 tmp = get_ref_frame_map_idx(cm, ALTREF_FRAME);
3667 cm->remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] =
3668 get_ref_frame_map_idx(cm, GOLDEN_FRAME);
3669 cm->remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] = tmp;
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003670
3671 // TODO(zoeliu): Do we need to copy cpi->interp_filter_selected[0] over to
3672 // cpi->interp_filter_selected[GOLDEN_FRAME]?
Sarah Parker7a9bb782018-10-11 14:52:42 -07003673 } else if (cpi->rc.is_src_frame_ext_arf && encode_show_existing_frame(cm)) {
Wei-Ting Linb72453f2018-06-26 14:05:38 -07003674#if CONFIG_DEBUG
3675 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
3676 assert(gf_group->update_type[gf_group->index] == INTNL_OVERLAY_UPDATE);
3677#endif
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003678#if USE_SYMM_MULTI_LAYER
3679 const int bwdref_to_show =
3680 (cpi->new_bwdref_update_rule == 1) ? BWDREF_FRAME : ALTREF2_FRAME;
3681#else
3682 const int bwdref_to_show = ALTREF2_FRAME;
3683#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003684 // Deal with the special case for showing existing internal ALTREF_FRAME
3685 // Refresh the LAST_FRAME with the ALTREF_FRAME and retire the LAST3_FRAME
3686 // by updating the virtual indices.
David Turnera21966b2018-12-05 14:48:49 +00003687 const int last3_remapped_idx = get_ref_frame_map_idx(cm, LAST3_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003688 shift_last_ref_frames(cpi);
Zoe Liue9b15e22017-07-19 15:53:01 -07003689
David Turnera21966b2018-12-05 14:48:49 +00003690 cm->remapped_ref_idx[LAST_FRAME - LAST_FRAME] =
3691 get_ref_frame_map_idx(cm, bwdref_to_show);
Zoe Liue9b15e22017-07-19 15:53:01 -07003692
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003693 memcpy(cpi->interp_filter_selected[LAST_FRAME],
3694 cpi->interp_filter_selected[bwdref_to_show],
3695 sizeof(cpi->interp_filter_selected[bwdref_to_show]));
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003696#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003697 if (cpi->new_bwdref_update_rule == 1) {
3698 lshift_bwd_ref_frames(cpi);
3699 // pass outdated forward reference frame (previous LAST3) to the
3700 // spared space
David Turnera21966b2018-12-05 14:48:49 +00003701 cm->remapped_ref_idx[EXTREF_FRAME - LAST_FRAME] = last3_remapped_idx;
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003702 } else {
3703#endif
David Turnera21966b2018-12-05 14:48:49 +00003704 cm->remapped_ref_idx[bwdref_to_show - LAST_FRAME] = last3_remapped_idx;
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003705#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003706 }
3707#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003708 } else { /* For non key/golden frames */
Zoe Liue9b15e22017-07-19 15:53:01 -07003709 // === ALTREF_FRAME ===
Yaowu Xuc27fc142016-08-22 16:08:15 -07003710 if (cpi->refresh_alt_ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00003711 int arf_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
David Turnere7ebf902018-12-04 14:04:55 +00003712 assign_frame_buffer_p(&cm->ref_frame_map[arf_idx], cm->cur_frame);
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003713
3714 memcpy(cpi->interp_filter_selected[ALTREF_FRAME],
3715 cpi->interp_filter_selected[0],
3716 sizeof(cpi->interp_filter_selected[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003717 }
3718
Zoe Liue9b15e22017-07-19 15:53:01 -07003719 // === GOLDEN_FRAME ===
Yaowu Xuc27fc142016-08-22 16:08:15 -07003720 if (cpi->refresh_golden_frame) {
David Turnere7ebf902018-12-04 14:04:55 +00003721 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003722 &cm->ref_frame_map[get_ref_frame_map_idx(cm, GOLDEN_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003723 cm->cur_frame);
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003724
3725 memcpy(cpi->interp_filter_selected[GOLDEN_FRAME],
3726 cpi->interp_filter_selected[0],
3727 sizeof(cpi->interp_filter_selected[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003728 }
3729
Zoe Liue9b15e22017-07-19 15:53:01 -07003730 // === BWDREF_FRAME ===
Yaowu Xuc27fc142016-08-22 16:08:15 -07003731 if (cpi->refresh_bwd_ref_frame) {
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003732#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003733 if (cpi->new_bwdref_update_rule) {
3734 // We shift the backward reference frame as follows:
3735 // BWDREF -> ALTREF2 -> EXTREF
3736 // and assign the newly coded frame to BWDREF so that it always
3737 // keeps the nearest future frame
David Turnera21966b2018-12-05 14:48:49 +00003738 const int tmp = get_ref_frame_map_idx(cm, EXTREF_FRAME);
David Turnere7ebf902018-12-04 14:04:55 +00003739 assign_frame_buffer_p(&cm->ref_frame_map[tmp], cm->cur_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003740
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003741 rshift_bwd_ref_frames(cpi);
David Turnera21966b2018-12-05 14:48:49 +00003742 cm->remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = tmp;
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003743 } else {
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003744#endif // USE_SYMM_MULTI_LAYER
David Turnere7ebf902018-12-04 14:04:55 +00003745 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003746 &cm->ref_frame_map[get_ref_frame_map_idx(cm, BWDREF_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003747 cm->cur_frame);
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003748#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003749 }
3750#endif
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003751 memcpy(cpi->interp_filter_selected[BWDREF_FRAME],
3752 cpi->interp_filter_selected[0],
3753 sizeof(cpi->interp_filter_selected[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003754 }
Zoe Liue9b15e22017-07-19 15:53:01 -07003755
Zoe Liue9b15e22017-07-19 15:53:01 -07003756 // === ALTREF2_FRAME ===
3757 if (cpi->refresh_alt2_ref_frame) {
David Turnere7ebf902018-12-04 14:04:55 +00003758 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003759 &cm->ref_frame_map[get_ref_frame_map_idx(cm, ALTREF2_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003760 cm->cur_frame);
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003761
3762 memcpy(cpi->interp_filter_selected[ALTREF2_FRAME],
3763 cpi->interp_filter_selected[0],
3764 sizeof(cpi->interp_filter_selected[0]));
Zoe Liue9b15e22017-07-19 15:53:01 -07003765 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003766 }
3767
3768 if (cpi->refresh_last_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003769 // NOTE(zoeliu): We have two layers of mapping (1) from the per-frame
3770 // reference to the reference frame buffer virtual index; and then (2) from
David Turnere7ebf902018-12-04 14:04:55 +00003771 // the virtual index to the reference frame buffer (RefCntBuffer):
Yaowu Xuc27fc142016-08-22 16:08:15 -07003772 //
Urvang Joshi4d9f15f2018-11-05 15:26:22 -08003773 // LAST_FRAME, ..., EXTREF_FRAME
3774 // | |
3775 // v v
3776 // remapped_ref_idx[LAST_FRAME - 1], ..., remapped_ref_idx[EXTREF_FRAME - 1]
3777 // | |
3778 // v v
3779 // ref_frame_map[], ..., ref_frame_map[]
Yaowu Xuc27fc142016-08-22 16:08:15 -07003780 //
3781 // When refresh_last_frame is set, it is intended to retire LAST3_FRAME,
3782 // have the other 2 LAST reference frames shifted as follows:
3783 // LAST_FRAME -> LAST2_FRAME -> LAST3_FRAME
3784 // , and then have LAST_FRAME refreshed by the newly coded frame.
3785 //
3786 // To fulfill it, the decoder will be notified to execute following 2 steps:
3787 //
3788 // (a) To change ref_frame_map[] and have the virtual index of LAST3_FRAME
3789 // to point to the newly coded frame, i.e.
David Turnere7ebf902018-12-04 14:04:55 +00003790 // ref_frame_map[lst_fb_idexes[2]] => cur_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003791 //
3792 // (b) To change the 1st layer mapping to have LAST_FRAME mapped to the
3793 // original virtual index of LAST3_FRAME and have the other mappings
3794 // shifted as follows:
Urvang Joshi4d9f15f2018-11-05 15:26:22 -08003795 // LAST_FRAME, LAST2_FRAME, LAST3_FRAME
3796 // | | |
3797 // v v v
3798 // remapped_ref_idx[2], remapped_ref_idx[0], remapped_ref_idx[1]
David Turnere7ebf902018-12-04 14:04:55 +00003799 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003800 &cm->ref_frame_map[get_ref_frame_map_idx(cm, LAST3_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003801 cm->cur_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003802
David Turnera21966b2018-12-05 14:48:49 +00003803 int last3_remapped_idx = get_ref_frame_map_idx(cm, LAST3_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003804
Zoe Liubcef1e62018-04-06 20:56:11 -07003805 shift_last_ref_frames(cpi);
David Turnera21966b2018-12-05 14:48:49 +00003806 cm->remapped_ref_idx[LAST_FRAME - LAST_FRAME] = last3_remapped_idx;
Zoe Liubcef1e62018-04-06 20:56:11 -07003807
Sarah Parker5336b9c2018-10-18 11:34:20 -07003808 assert(!encode_show_existing_frame(cm));
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003809 memcpy(cpi->interp_filter_selected[LAST_FRAME],
3810 cpi->interp_filter_selected[0],
3811 sizeof(cpi->interp_filter_selected[0]));
Zoe Liubcef1e62018-04-06 20:56:11 -07003812
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003813 // If the new structure is used, we will always have overlay frames coupled
3814 // with bwdref frames. Therefore, we won't have to perform this update
3815 // in advance (we do this update when the overlay frame shows up).
3816#if USE_SYMM_MULTI_LAYER
3817 if (cpi->new_bwdref_update_rule == 0 && cpi->rc.is_last_bipred_frame) {
3818#else
Zoe Liubcef1e62018-04-06 20:56:11 -07003819 if (cpi->rc.is_last_bipred_frame) {
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003820#endif
Zoe Liubcef1e62018-04-06 20:56:11 -07003821 // Refresh the LAST_FRAME with the BWDREF_FRAME and retire the
3822 // LAST3_FRAME by updating the virtual indices.
3823 //
3824 // NOTE: The source frame for BWDREF does not have a holding position as
3825 // the OVERLAY frame for ALTREF's. Hence, to resolve the reference
3826 // virtual index reshuffling for BWDREF, the encoder always
3827 // specifies a LAST_BIPRED right before BWDREF and completes the
3828 // reshuffling job accordingly.
David Turnera21966b2018-12-05 14:48:49 +00003829 last3_remapped_idx = get_ref_frame_map_idx(cm, LAST3_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003830
3831 shift_last_ref_frames(cpi);
David Turnera21966b2018-12-05 14:48:49 +00003832 cm->remapped_ref_idx[LAST_FRAME - LAST_FRAME] =
3833 get_ref_frame_map_idx(cm, BWDREF_FRAME);
3834 cm->remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = last3_remapped_idx;
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003835
3836 memcpy(cpi->interp_filter_selected[LAST_FRAME],
3837 cpi->interp_filter_selected[BWDREF_FRAME],
3838 sizeof(cpi->interp_filter_selected[BWDREF_FRAME]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003839 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003840 }
3841
3842#if DUMP_REF_FRAME_IMAGES == 1
3843 // Dump out all reference frame images.
3844 dump_ref_frame_images(cpi);
3845#endif // DUMP_REF_FRAME_IMAGES
3846}
3847
David Turnere7ebf902018-12-04 14:04:55 +00003848static INLINE void alloc_frame_mvs(AV1_COMMON *const cm, RefCntBuffer *buf) {
3849 assert(buf != NULL);
3850 ensure_mv_buffer(buf, cm);
3851 buf->width = cm->width;
3852 buf->height = cm->height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003853}
3854
Cheng Chen46f30c72017-09-07 11:13:33 -07003855static void scale_references(AV1_COMP *cpi) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003856 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003857 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003858 MV_REFERENCE_FRAME ref_frame;
Yaowu Xuf883b422016-08-30 14:01:10 -07003859 const AOM_REFFRAME ref_mask[INTER_REFS_PER_FRAME] = {
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02003860 AOM_LAST_FLAG, AOM_LAST2_FLAG, AOM_LAST3_FLAG, AOM_GOLD_FLAG,
3861 AOM_BWD_FLAG, AOM_ALT2_FLAG, AOM_ALT_FLAG
Yaowu Xuc27fc142016-08-22 16:08:15 -07003862 };
3863
3864 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003865 // Need to convert from AOM_REFFRAME to index into ref_mask (subtract 1).
Yaowu Xuc27fc142016-08-22 16:08:15 -07003866 if (cpi->ref_frame_flags & ref_mask[ref_frame - 1]) {
3867 BufferPool *const pool = cm->buffer_pool;
3868 const YV12_BUFFER_CONFIG *const ref =
David Turnera21966b2018-12-05 14:48:49 +00003869 get_ref_frame_yv12_buf(cm, ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003870
3871 if (ref == NULL) {
David Turnere7ebf902018-12-04 14:04:55 +00003872 cpi->scaled_ref_buf[ref_frame - 1] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003873 continue;
3874 }
3875
Yaowu Xuc27fc142016-08-22 16:08:15 -07003876 if (ref->y_crop_width != cm->width || ref->y_crop_height != cm->height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003877 int force_scaling = 0;
David Turnere7ebf902018-12-04 14:04:55 +00003878 RefCntBuffer *new_fb = cpi->scaled_ref_buf[ref_frame - 1];
3879 if (new_fb == NULL) {
3880 const int new_fb_idx = get_free_fb(cm);
3881 if (new_fb_idx == INVALID_IDX) {
Wan-Teh Chang4a8c0042018-10-05 09:41:52 -07003882 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
3883 "Unable to find free frame buffer");
David Turnere7ebf902018-12-04 14:04:55 +00003884 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003885 force_scaling = 1;
David Turnere7ebf902018-12-04 14:04:55 +00003886 new_fb = &pool->frame_bufs[new_fb_idx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003887 }
David Turnere7ebf902018-12-04 14:04:55 +00003888
3889 if (force_scaling || new_fb->buf.y_crop_width != cm->width ||
3890 new_fb->buf.y_crop_height != cm->height) {
Yaowu Xu671f2bd2016-09-30 15:07:57 -07003891 if (aom_realloc_frame_buffer(
David Turnere7ebf902018-12-04 14:04:55 +00003892 &new_fb->buf, cm->width, cm->height,
Urvang Joshi20cf30e2018-07-19 02:33:58 -07003893 cm->seq_params.subsampling_x, cm->seq_params.subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +05303894 cm->seq_params.use_highbitdepth, cpi->oxcf.border_in_pixels,
Wan-Teh Chang41d286f2018-10-03 11:43:03 -07003895 cm->byte_alignment, NULL, NULL, NULL)) {
3896 if (force_scaling) {
3897 // Release the reference acquired in the get_free_fb() call above.
David Turnere7ebf902018-12-04 14:04:55 +00003898 --new_fb->ref_count;
Wan-Teh Chang41d286f2018-10-03 11:43:03 -07003899 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003900 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003901 "Failed to allocate frame buffer");
Wan-Teh Chang41d286f2018-10-03 11:43:03 -07003902 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07003903 av1_resize_and_extend_frame(
David Turnere7ebf902018-12-04 14:04:55 +00003904 ref, &new_fb->buf, (int)cm->seq_params.bit_depth, num_planes);
3905 cpi->scaled_ref_buf[ref_frame - 1] = new_fb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003906 alloc_frame_mvs(cm, new_fb);
3907 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003908 } else {
David Turnera21966b2018-12-05 14:48:49 +00003909 RefCntBuffer *buf = get_ref_frame_buf(cm, ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003910 buf->buf.y_crop_width = ref->y_crop_width;
3911 buf->buf.y_crop_height = ref->y_crop_height;
David Turnere7ebf902018-12-04 14:04:55 +00003912 cpi->scaled_ref_buf[ref_frame - 1] = buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003913 ++buf->ref_count;
3914 }
3915 } else {
David Turnere7ebf902018-12-04 14:04:55 +00003916 if (cpi->oxcf.pass != 0) cpi->scaled_ref_buf[ref_frame - 1] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003917 }
3918 }
3919}
3920
Yaowu Xuf883b422016-08-30 14:01:10 -07003921static void release_scaled_references(AV1_COMP *cpi) {
Imdad Sardharwalladadaba62018-02-23 12:06:56 +00003922 // TODO(isbs): only refresh the necessary frames, rather than all of them
David Turnere7ebf902018-12-04 14:04:55 +00003923 for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3924 RefCntBuffer *const buf = cpi->scaled_ref_buf[i];
3925 if (buf != NULL) {
Imdad Sardharwalladadaba62018-02-23 12:06:56 +00003926 --buf->ref_count;
David Turnere7ebf902018-12-04 14:04:55 +00003927 cpi->scaled_ref_buf[i] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003928 }
3929 }
3930}
3931
Yaowu Xuf883b422016-08-30 14:01:10 -07003932static void set_mv_search_params(AV1_COMP *cpi) {
3933 const AV1_COMMON *const cm = &cpi->common;
3934 const unsigned int max_mv_def = AOMMIN(cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003935
3936 // Default based on max resolution.
Yaowu Xuf883b422016-08-30 14:01:10 -07003937 cpi->mv_step_param = av1_init_search_range(max_mv_def);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003938
3939 if (cpi->sf.mv.auto_mv_step_size) {
3940 if (frame_is_intra_only(cm)) {
3941 // Initialize max_mv_magnitude for use in the first INTER frame
3942 // after a key/intra-only frame.
3943 cpi->max_mv_magnitude = max_mv_def;
3944 } else {
3945 if (cm->show_frame) {
3946 // Allow mv_steps to correspond to twice the max mv magnitude found
3947 // in the previous frame, capped by the default max_mv_magnitude based
3948 // on resolution.
Yaowu Xuf883b422016-08-30 14:01:10 -07003949 cpi->mv_step_param = av1_init_search_range(
3950 AOMMIN(max_mv_def, 2 * cpi->max_mv_magnitude));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003951 }
3952 cpi->max_mv_magnitude = 0;
3953 }
3954 }
3955}
3956
Yaowu Xuf883b422016-08-30 14:01:10 -07003957static void set_size_independent_vars(AV1_COMP *cpi) {
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003958 int i;
Debargha Mukherjeedf713102018-10-02 12:33:32 -07003959 AV1_COMMON *cm = &cpi->common;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003960 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
Debargha Mukherjeedf713102018-10-02 12:33:32 -07003961 cm->global_motion[i] = default_warp_params;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003962 }
3963 cpi->global_motion_search_done = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07003964 av1_set_speed_features_framesize_independent(cpi);
3965 av1_set_rd_speed_thresholds(cpi);
Debargha Mukherjeedf713102018-10-02 12:33:32 -07003966 cm->interp_filter = SWITCHABLE;
3967 cm->switchable_motion_mode = 1;
3968
3969 if (frame_is_intra_only(cm)) {
3970 if (cm->seq_params.force_screen_content_tools == 2) {
3971 cm->allow_screen_content_tools =
3972 cpi->oxcf.content == AOM_CONTENT_SCREEN ||
3973 is_screen_content(cpi->source->y_buffer,
3974 cpi->source->flags & YV12_FLAG_HIGHBITDEPTH,
3975 cm->seq_params.bit_depth, cpi->source->y_stride,
3976 cpi->source->y_width, cpi->source->y_height);
3977 } else {
3978 cm->allow_screen_content_tools =
3979 cm->seq_params.force_screen_content_tools;
3980 }
3981 }
Aniket Dhokf6d7ed82019-01-04 14:05:57 +05303982 cpi->is_screen_content_type = (cm->allow_screen_content_tools != 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003983}
3984
Yaowu Xuf883b422016-08-30 14:01:10 -07003985static void set_size_dependent_vars(AV1_COMP *cpi, int *q, int *bottom_index,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003986 int *top_index) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003987 AV1_COMMON *const cm = &cpi->common;
3988 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003989
3990 // Setup variables that depend on the dimensions of the frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07003991 av1_set_speed_features_framesize_dependent(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003992
Sebastien Alaiwan41cae6a2018-01-12 12:22:29 +01003993 // Decide q and q bounds.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07003994 *q = av1_rc_pick_q_and_bounds(cpi, cm->width, cm->height, bottom_index,
3995 top_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003996
James Zern01a9d702017-08-25 19:09:33 +00003997 if (!frame_is_intra_only(cm)) {
RogerZhou3b635242017-09-19 10:06:46 -07003998 set_high_precision_mv(cpi, (*q) < HIGH_PRECISION_MV_QTHRESH,
RogerZhou10a03802017-10-26 11:49:48 -07003999 cpi->common.cur_frame_force_integer_mv);
James Zern01a9d702017-08-25 19:09:33 +00004000 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004001
4002 // Configure experimental use of segmentation for enhanced coding of
4003 // static regions if indicated.
4004 // Only allowed in the second pass of a two pass encode, as it requires
4005 // lagged coding, and if the relevant speed feature flag is set.
4006 if (oxcf->pass == 2 && cpi->sf.static_segmentation)
4007 configure_static_seg_features(cpi);
4008}
4009
Yaowu Xuf883b422016-08-30 14:01:10 -07004010static void init_motion_estimation(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004011 int y_stride = cpi->scaled_source.y_stride;
4012
4013 if (cpi->sf.mv.search_method == NSTEP) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004014 av1_init3smotion_compensation(&cpi->ss_cfg, y_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004015 } else if (cpi->sf.mv.search_method == DIAMOND) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004016 av1_init_dsmotion_compensation(&cpi->ss_cfg, y_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004017 }
4018}
4019
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07004020#define COUPLED_CHROMA_FROM_LUMA_RESTORATION 0
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01004021static void set_restoration_unit_size(int width, int height, int sx, int sy,
4022 RestorationInfo *rst) {
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08004023 (void)width;
4024 (void)height;
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07004025 (void)sx;
4026 (void)sy;
4027#if COUPLED_CHROMA_FROM_LUMA_RESTORATION
4028 int s = AOMMIN(sx, sy);
4029#else
4030 int s = 0;
4031#endif // !COUPLED_CHROMA_FROM_LUMA_RESTORATION
4032
Debargha Mukherjee5f7f3672017-08-12 10:22:49 -07004033 if (width * height > 352 * 288)
Urvang Joshi813186b2018-03-08 15:38:46 -08004034 rst[0].restoration_unit_size = RESTORATION_UNITSIZE_MAX;
Debargha Mukherjee5f7f3672017-08-12 10:22:49 -07004035 else
Urvang Joshi813186b2018-03-08 15:38:46 -08004036 rst[0].restoration_unit_size = (RESTORATION_UNITSIZE_MAX >> 1);
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01004037 rst[1].restoration_unit_size = rst[0].restoration_unit_size >> s;
4038 rst[2].restoration_unit_size = rst[1].restoration_unit_size;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08004039}
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08004040
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05304041static void init_ref_frame_bufs(AV1_COMP *cpi) {
4042 AV1_COMMON *const cm = &cpi->common;
Cheng Chen46f30c72017-09-07 11:13:33 -07004043 int i;
4044 BufferPool *const pool = cm->buffer_pool;
Jack Haughtonddb80602018-11-21 16:41:49 +00004045 cm->cur_frame = NULL;
Cheng Chen46f30c72017-09-07 11:13:33 -07004046 for (i = 0; i < REF_FRAMES; ++i) {
David Turnere7ebf902018-12-04 14:04:55 +00004047 cm->ref_frame_map[i] = NULL;
Wan-Teh Changd05e0332018-10-03 12:00:43 -07004048 }
4049 for (i = 0; i < FRAME_BUFFERS; ++i) {
Cheng Chen46f30c72017-09-07 11:13:33 -07004050 pool->frame_bufs[i].ref_count = 0;
4051 }
RogerZhou86902d02018-02-28 15:29:16 -08004052 if (cm->seq_params.force_screen_content_tools) {
Hui Su2d5fd742018-02-21 18:10:37 -08004053 for (i = 0; i < FRAME_BUFFERS; ++i) {
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05304054 av1_hash_table_init(&pool->frame_bufs[i].hash_table, &cpi->td.mb);
Hui Su2d5fd742018-02-21 18:10:37 -08004055 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004056 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004057}
4058
Yaowu Xud3e7c682017-12-21 14:08:25 -08004059static void check_initial_width(AV1_COMP *cpi, int use_highbitdepth,
Cheng Chen46f30c72017-09-07 11:13:33 -07004060 int subsampling_x, int subsampling_y) {
4061 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004062 SequenceHeader *const seq_params = &cm->seq_params;
Cheng Chen46f30c72017-09-07 11:13:33 -07004063
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004064 if (!cpi->initial_width || seq_params->use_highbitdepth != use_highbitdepth ||
4065 seq_params->subsampling_x != subsampling_x ||
4066 seq_params->subsampling_y != subsampling_y) {
4067 seq_params->subsampling_x = subsampling_x;
4068 seq_params->subsampling_y = subsampling_y;
4069 seq_params->use_highbitdepth = use_highbitdepth;
Cheng Chen46f30c72017-09-07 11:13:33 -07004070
4071 alloc_raw_frame_buffers(cpi);
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05304072 init_ref_frame_bufs(cpi);
Cheng Chen46f30c72017-09-07 11:13:33 -07004073 alloc_util_frame_buffers(cpi);
4074
4075 init_motion_estimation(cpi); // TODO(agrange) This can be removed.
4076
4077 cpi->initial_width = cm->width;
4078 cpi->initial_height = cm->height;
4079 cpi->initial_mbs = cm->MBs;
4080 }
4081}
4082
4083// Returns 1 if the assigned width or height was <= 0.
4084static int set_size_literal(AV1_COMP *cpi, int width, int height) {
4085 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004086 const int num_planes = av1_num_planes(cm);
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004087 check_initial_width(cpi, cm->seq_params.use_highbitdepth,
4088 cm->seq_params.subsampling_x,
4089 cm->seq_params.subsampling_y);
Cheng Chen46f30c72017-09-07 11:13:33 -07004090
4091 if (width <= 0 || height <= 0) return 1;
4092
4093 cm->width = width;
Cheng Chen46f30c72017-09-07 11:13:33 -07004094 cm->height = height;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004095
4096 if (cpi->initial_width && cpi->initial_height &&
4097 (cm->width > cpi->initial_width || cm->height > cpi->initial_height)) {
4098 av1_free_context_buffers(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004099 av1_free_pc_tree(&cpi->td, num_planes);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004100 alloc_compressor_data(cpi);
4101 realloc_segmentation_maps(cpi);
4102 cpi->initial_width = cpi->initial_height = 0;
Cheng Chen46f30c72017-09-07 11:13:33 -07004103 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004104 update_frame_size(cpi);
4105
4106 return 0;
4107}
4108
Fergus Simpsonbc189932017-05-16 17:02:39 -07004109static void set_frame_size(AV1_COMP *cpi, int width, int height) {
Fergus Simpsonbc189932017-05-16 17:02:39 -07004110 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004111 const SequenceHeader *const seq_params = &cm->seq_params;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004112 const int num_planes = av1_num_planes(cm);
Fergus Simpsonbc189932017-05-16 17:02:39 -07004113 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004114 int ref_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004115
Fergus Simpsonbc189932017-05-16 17:02:39 -07004116 if (width != cm->width || height != cm->height) {
Fergus Simpson3502d082017-04-10 12:25:07 -07004117 // There has been a change in the encoded frame size
Cheng Chen46f30c72017-09-07 11:13:33 -07004118 set_size_literal(cpi, width, height);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004119 set_mv_search_params(cpi);
Urvang Joshic8b52d52018-03-23 13:16:51 -07004120 // Recalculate 'all_lossless' in case super-resolution was (un)selected.
Cheng Chen09c83a52018-06-05 12:27:36 -07004121 cm->all_lossless = cm->coded_lossless && !av1_superres_scaled(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004122 }
4123
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004124 if (cpi->oxcf.pass == 2) {
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004125 av1_set_target_rate(cpi, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004126 }
4127
David Turnere7ebf902018-12-04 14:04:55 +00004128 alloc_frame_mvs(cm, cm->cur_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004129
Cherma Rajan A71d20db2018-04-27 11:15:32 +05304130 // Allocate above context buffers
Cherma Rajan Af1479082018-05-09 14:26:34 +05304131 if (cm->num_allocated_above_context_planes < av1_num_planes(cm) ||
4132 cm->num_allocated_above_context_mi_col < cm->mi_cols ||
Cherma Rajan A71d20db2018-04-27 11:15:32 +05304133 cm->num_allocated_above_contexts < cm->tile_rows) {
4134 av1_free_above_context_buffers(cm, cm->num_allocated_above_contexts);
4135 if (av1_alloc_above_context_buffers(cm, cm->tile_rows))
4136 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
4137 "Failed to allocate context buffers");
4138 }
4139
Yaowu Xuc27fc142016-08-22 16:08:15 -07004140 // Reset the frame pointers to the current frame size.
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004141 if (aom_realloc_frame_buffer(
Jack Haughtonddb80602018-11-21 16:41:49 +00004142 &cm->cur_frame->buf, cm->width, cm->height, seq_params->subsampling_x,
4143 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman29909962019-01-09 10:31:21 +05304144 cpi->oxcf.border_in_pixels, cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -07004145 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004146 "Failed to allocate frame buffer");
4147
Rupert Swarbrickf88bc042017-10-18 10:45:51 +01004148 const int frame_width = cm->superres_upscaled_width;
4149 const int frame_height = cm->superres_upscaled_height;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004150 set_restoration_unit_size(frame_width, frame_height,
4151 seq_params->subsampling_x,
4152 seq_params->subsampling_y, cm->rst_info);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004153 for (int i = 0; i < num_planes; ++i)
Rupert Swarbrick1a96c3f2017-10-24 11:55:00 +01004154 cm->rst_info[i].frame_restoration_type = RESTORE_NONE;
Rupert Swarbrickf88bc042017-10-18 10:45:51 +01004155
4156 av1_alloc_restoration_buffers(cm);
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004157 alloc_util_frame_buffers(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004158 init_motion_estimation(cpi);
4159
4160 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00004161 RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
David Turnere7ebf902018-12-04 14:04:55 +00004162 if (buf != NULL) {
David Turnera21966b2018-12-05 14:48:49 +00004163 struct scale_factors *sf = get_ref_scale_factors(cm, ref_frame);
4164 av1_setup_scale_factors_for_frame(sf, buf->buf.y_crop_width,
David Turner1bcefb32018-11-19 17:54:00 +00004165 buf->buf.y_crop_height, cm->width,
Debargha Mukherjeee242a812018-03-07 21:43:09 -08004166 cm->height);
David Turnera21966b2018-12-05 14:48:49 +00004167 if (av1_is_scaled(sf)) aom_extend_frame_borders(&buf->buf, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004168 }
4169 }
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004170
Hui Su5ebd8702018-01-08 18:09:20 -08004171 av1_setup_scale_factors_for_frame(&cm->sf_identity, cm->width, cm->height,
Debargha Mukherjeee242a812018-03-07 21:43:09 -08004172 cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004173
4174 set_ref_ptrs(cm, xd, LAST_FRAME, LAST_FRAME);
4175}
4176
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004177static uint8_t calculate_next_resize_scale(const AV1_COMP *cpi) {
4178 // Choose an arbitrary random number
4179 static unsigned int seed = 56789;
4180 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Urvang Joshide71d142017-10-05 12:12:15 -07004181 if (oxcf->pass == 1) return SCALE_NUMERATOR;
4182 uint8_t new_denom = SCALE_NUMERATOR;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004183
Debargha Mukherjee2b7c2b32018-04-10 07:35:28 -07004184 if (cpi->common.seq_params.reduced_still_picture_hdr) return SCALE_NUMERATOR;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004185 switch (oxcf->resize_mode) {
Urvang Joshide71d142017-10-05 12:12:15 -07004186 case RESIZE_NONE: new_denom = SCALE_NUMERATOR; break;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004187 case RESIZE_FIXED:
David Turnerd2a592e2018-11-16 14:59:31 +00004188 if (cpi->common.current_frame.frame_type == KEY_FRAME)
Urvang Joshide71d142017-10-05 12:12:15 -07004189 new_denom = oxcf->resize_kf_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004190 else
Urvang Joshide71d142017-10-05 12:12:15 -07004191 new_denom = oxcf->resize_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004192 break;
Urvang Joshide71d142017-10-05 12:12:15 -07004193 case RESIZE_RANDOM: new_denom = lcg_rand16(&seed) % 9 + 8; break;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004194 default: assert(0);
4195 }
Urvang Joshide71d142017-10-05 12:12:15 -07004196 return new_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004197}
4198
Debargha Mukherjeee3cd5a52018-11-29 11:05:22 -08004199#define ENERGY_BY_Q2_THRESH 0.01
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004200#define ENERGY_BY_AC_THRESH 0.2
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004201
4202static uint8_t get_superres_denom_from_qindex_energy(int qindex, double *energy,
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004203 double threshq,
4204 double threshp) {
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004205 const double q = av1_convert_qindex_to_q(qindex, AOM_BITS_8);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004206 const double tq = threshq * q * q;
4207 const double tp = threshp * energy[1];
4208 const double thresh = AOMMIN(tq, tp);
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004209 int k;
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004210 for (k = 16; k > 8; --k) {
4211 if (energy[k - 1] > thresh) break;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004212 }
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004213 return 3 * SCALE_NUMERATOR - k;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004214}
4215
4216static uint8_t get_superres_denom_for_qindex(const AV1_COMP *cpi, int qindex) {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004217 double energy[16];
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004218 analyze_hor_freq(cpi, energy);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004219 /*
4220 printf("\nenergy = [");
4221 for (int k = 1; k < 16; ++k) printf("%f, ", energy[k]);
4222 printf("]\n");
4223 */
4224 return get_superres_denom_from_qindex_energy(
4225 qindex, energy, ENERGY_BY_Q2_THRESH, ENERGY_BY_AC_THRESH);
Debargha Mukherjeef48b0d22018-11-20 12:23:43 -08004226}
4227
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004228static uint8_t calculate_next_superres_scale(AV1_COMP *cpi) {
4229 // Choose an arbitrary random number
4230 static unsigned int seed = 34567;
4231 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Urvang Joshide71d142017-10-05 12:12:15 -07004232 if (oxcf->pass == 1) return SCALE_NUMERATOR;
4233 uint8_t new_denom = SCALE_NUMERATOR;
Urvang Joshi2c92b072018-03-19 17:23:31 -07004234
4235 // Make sure that superres mode of the frame is consistent with the
4236 // sequence-level flag.
4237 assert(IMPLIES(oxcf->superres_mode != SUPERRES_NONE,
4238 cpi->common.seq_params.enable_superres));
4239 assert(IMPLIES(!cpi->common.seq_params.enable_superres,
4240 oxcf->superres_mode == SUPERRES_NONE));
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004241
4242 switch (oxcf->superres_mode) {
Urvang Joshide71d142017-10-05 12:12:15 -07004243 case SUPERRES_NONE: new_denom = SCALE_NUMERATOR; break;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004244 case SUPERRES_FIXED:
David Turnerd2a592e2018-11-16 14:59:31 +00004245 if (cpi->common.current_frame.frame_type == KEY_FRAME)
Urvang Joshide71d142017-10-05 12:12:15 -07004246 new_denom = oxcf->superres_kf_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004247 else
Urvang Joshide71d142017-10-05 12:12:15 -07004248 new_denom = oxcf->superres_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004249 break;
Urvang Joshide71d142017-10-05 12:12:15 -07004250 case SUPERRES_RANDOM: new_denom = lcg_rand16(&seed) % 9 + 8; break;
Urvang Joshif1fa6862018-01-08 16:39:33 -08004251 case SUPERRES_QTHRESH: {
Debargha Mukherjeedf713102018-10-02 12:33:32 -07004252 // Do not use superres when screen content tools are used.
4253 if (cpi->common.allow_screen_content_tools) break;
Debargha Mukherjee2b2c5fd2018-11-14 13:21:24 -08004254 if (oxcf->rc_mode == AOM_VBR || oxcf->rc_mode == AOM_CQ)
4255 av1_set_target_rate(cpi, cpi->oxcf.width, cpi->oxcf.height);
Urvang Joshi2c92b072018-03-19 17:23:31 -07004256 int bottom_index, top_index;
4257 const int q = av1_rc_pick_q_and_bounds(
4258 cpi, cpi->oxcf.width, cpi->oxcf.height, &bottom_index, &top_index);
Debargha Mukherjeef48b0d22018-11-20 12:23:43 -08004259
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004260 const int qthresh = (frame_is_intra_only(&cpi->common))
4261 ? oxcf->superres_kf_qthresh
4262 : oxcf->superres_qthresh;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004263 if (q < qthresh) {
Urvang Joshide71d142017-10-05 12:12:15 -07004264 new_denom = SCALE_NUMERATOR;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004265 } else {
Debargha Mukherjeee3cd5a52018-11-29 11:05:22 -08004266 new_denom = get_superres_denom_for_qindex(cpi, q);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004267 }
4268 break;
Urvang Joshif1fa6862018-01-08 16:39:33 -08004269 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004270 default: assert(0);
4271 }
Urvang Joshide71d142017-10-05 12:12:15 -07004272 return new_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004273}
4274
Urvang Joshide71d142017-10-05 12:12:15 -07004275static int dimension_is_ok(int orig_dim, int resized_dim, int denom) {
4276 return (resized_dim * SCALE_NUMERATOR >= orig_dim * denom / 2);
4277}
4278
4279static int dimensions_are_ok(int owidth, int oheight, size_params_type *rsz) {
Urvang Joshi94ad3702017-12-06 11:38:08 -08004280 // Only need to check the width, as scaling is horizontal only.
4281 (void)oheight;
4282 return dimension_is_ok(owidth, rsz->resize_width, rsz->superres_denom);
Urvang Joshide71d142017-10-05 12:12:15 -07004283}
4284
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004285static int validate_size_scales(RESIZE_MODE resize_mode,
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004286 SUPERRES_MODE superres_mode, int owidth,
4287 int oheight, size_params_type *rsz) {
Urvang Joshide71d142017-10-05 12:12:15 -07004288 if (dimensions_are_ok(owidth, oheight, rsz)) { // Nothing to do.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004289 return 1;
Urvang Joshide71d142017-10-05 12:12:15 -07004290 }
4291
Urvang Joshi69fde2e2017-10-09 15:34:18 -07004292 // Calculate current resize scale.
Urvang Joshide71d142017-10-05 12:12:15 -07004293 int resize_denom =
4294 AOMMAX(DIVIDE_AND_ROUND(owidth * SCALE_NUMERATOR, rsz->resize_width),
4295 DIVIDE_AND_ROUND(oheight * SCALE_NUMERATOR, rsz->resize_height));
4296
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004297 if (resize_mode != RESIZE_RANDOM && superres_mode == SUPERRES_RANDOM) {
Urvang Joshide71d142017-10-05 12:12:15 -07004298 // Alter superres scale as needed to enforce conformity.
4299 rsz->superres_denom =
4300 (2 * SCALE_NUMERATOR * SCALE_NUMERATOR) / resize_denom;
4301 if (!dimensions_are_ok(owidth, oheight, rsz)) {
4302 if (rsz->superres_denom > SCALE_NUMERATOR) --rsz->superres_denom;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004303 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004304 } else if (resize_mode == RESIZE_RANDOM && superres_mode != SUPERRES_RANDOM) {
Urvang Joshide71d142017-10-05 12:12:15 -07004305 // Alter resize scale as needed to enforce conformity.
4306 resize_denom =
4307 (2 * SCALE_NUMERATOR * SCALE_NUMERATOR) / rsz->superres_denom;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004308 rsz->resize_width = owidth;
4309 rsz->resize_height = oheight;
4310 av1_calculate_scaled_size(&rsz->resize_width, &rsz->resize_height,
Urvang Joshide71d142017-10-05 12:12:15 -07004311 resize_denom);
4312 if (!dimensions_are_ok(owidth, oheight, rsz)) {
4313 if (resize_denom > SCALE_NUMERATOR) {
4314 --resize_denom;
4315 rsz->resize_width = owidth;
4316 rsz->resize_height = oheight;
4317 av1_calculate_scaled_size(&rsz->resize_width, &rsz->resize_height,
4318 resize_denom);
4319 }
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004320 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004321 } else if (resize_mode == RESIZE_RANDOM && superres_mode == SUPERRES_RANDOM) {
Urvang Joshide71d142017-10-05 12:12:15 -07004322 // Alter both resize and superres scales as needed to enforce conformity.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004323 do {
Urvang Joshide71d142017-10-05 12:12:15 -07004324 if (resize_denom > rsz->superres_denom)
4325 --resize_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004326 else
Urvang Joshide71d142017-10-05 12:12:15 -07004327 --rsz->superres_denom;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004328 rsz->resize_width = owidth;
4329 rsz->resize_height = oheight;
4330 av1_calculate_scaled_size(&rsz->resize_width, &rsz->resize_height,
Urvang Joshide71d142017-10-05 12:12:15 -07004331 resize_denom);
4332 } while (!dimensions_are_ok(owidth, oheight, rsz) &&
4333 (resize_denom > SCALE_NUMERATOR ||
4334 rsz->superres_denom > SCALE_NUMERATOR));
Urvang Joshif1fa6862018-01-08 16:39:33 -08004335 } else { // We are allowed to alter neither resize scale nor superres
4336 // scale.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004337 return 0;
4338 }
Urvang Joshide71d142017-10-05 12:12:15 -07004339 return dimensions_are_ok(owidth, oheight, rsz);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004340}
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004341
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004342// Calculates resize and superres params for next frame
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004343size_params_type av1_calculate_next_size_params(AV1_COMP *cpi) {
4344 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Debargha Mukherjee3a4959f2018-02-26 15:34:03 -08004345 size_params_type rsz = { oxcf->width, oxcf->height, SCALE_NUMERATOR };
Urvang Joshide71d142017-10-05 12:12:15 -07004346 int resize_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004347 if (oxcf->pass == 1) return rsz;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004348 if (cpi->resize_pending_width && cpi->resize_pending_height) {
4349 rsz.resize_width = cpi->resize_pending_width;
4350 rsz.resize_height = cpi->resize_pending_height;
4351 cpi->resize_pending_width = cpi->resize_pending_height = 0;
4352 } else {
Urvang Joshide71d142017-10-05 12:12:15 -07004353 resize_denom = calculate_next_resize_scale(cpi);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004354 rsz.resize_width = cpi->oxcf.width;
4355 rsz.resize_height = cpi->oxcf.height;
4356 av1_calculate_scaled_size(&rsz.resize_width, &rsz.resize_height,
Urvang Joshide71d142017-10-05 12:12:15 -07004357 resize_denom);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004358 }
Urvang Joshide71d142017-10-05 12:12:15 -07004359 rsz.superres_denom = calculate_next_superres_scale(cpi);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004360 if (!validate_size_scales(oxcf->resize_mode, oxcf->superres_mode, oxcf->width,
4361 oxcf->height, &rsz))
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004362 assert(0 && "Invalid scale parameters");
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004363 return rsz;
4364}
4365
Urvang Joshi22b150b2019-01-10 14:32:32 -08004366static void setup_frame_size_from_params(AV1_COMP *cpi,
4367 const size_params_type *rsz) {
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004368 int encode_width = rsz->resize_width;
4369 int encode_height = rsz->resize_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004370
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004371 AV1_COMMON *cm = &cpi->common;
4372 cm->superres_upscaled_width = encode_width;
4373 cm->superres_upscaled_height = encode_height;
Urvang Joshide71d142017-10-05 12:12:15 -07004374 cm->superres_scale_denominator = rsz->superres_denom;
Urvang Joshi69fde2e2017-10-09 15:34:18 -07004375 av1_calculate_scaled_superres_size(&encode_width, &encode_height,
4376 rsz->superres_denom);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004377 set_frame_size(cpi, encode_width, encode_height);
4378}
4379
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004380static void setup_frame_size(AV1_COMP *cpi) {
Urvang Joshi22b150b2019-01-10 14:32:32 -08004381 // Reset superres params from previous frame.
4382 cpi->common.superres_scale_denominator = SCALE_NUMERATOR;
4383 const size_params_type rsz = av1_calculate_next_size_params(cpi);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004384 setup_frame_size_from_params(cpi, &rsz);
4385}
4386
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004387static void superres_post_encode(AV1_COMP *cpi) {
4388 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004389 const int num_planes = av1_num_planes(cm);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004390
Cheng Chen09c83a52018-06-05 12:27:36 -07004391 if (!av1_superres_scaled(cm)) return;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004392
Urvang Joshid6b5d512018-03-20 13:34:38 -07004393 assert(cpi->oxcf.enable_superres);
4394 assert(!is_lossless_requested(&cpi->oxcf));
Urvang Joshic8b52d52018-03-23 13:16:51 -07004395 assert(!cm->all_lossless);
Urvang Joshid6b5d512018-03-20 13:34:38 -07004396
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004397 av1_superres_upscale(cm, NULL);
4398
4399 // If regular resizing is occurring the source will need to be downscaled to
4400 // match the upscaled superres resolution. Otherwise the original source is
4401 // used.
Cheng Chen09c83a52018-06-05 12:27:36 -07004402 if (!av1_resize_scaled(cm)) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004403 cpi->source = cpi->unscaled_source;
4404 if (cpi->last_source != NULL) cpi->last_source = cpi->unscaled_last_source;
4405 } else {
Fergus Simpsonabd43432017-06-12 15:54:43 -07004406 assert(cpi->unscaled_source->y_crop_width != cm->superres_upscaled_width);
4407 assert(cpi->unscaled_source->y_crop_height != cm->superres_upscaled_height);
Urvang Joshif1fa6862018-01-08 16:39:33 -08004408 // Do downscale. cm->(width|height) has been updated by
4409 // av1_superres_upscale
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004410 if (aom_realloc_frame_buffer(
4411 &cpi->scaled_source, cm->superres_upscaled_width,
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004412 cm->superres_upscaled_height, cm->seq_params.subsampling_x,
4413 cm->seq_params.subsampling_y, cm->seq_params.use_highbitdepth,
4414 AOM_BORDER_IN_PIXELS, cm->byte_alignment, NULL, NULL, NULL))
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004415 aom_internal_error(
4416 &cm->error, AOM_CODEC_MEM_ERROR,
4417 "Failed to reallocate scaled source buffer for superres");
4418 assert(cpi->scaled_source.y_crop_width == cm->superres_upscaled_width);
4419 assert(cpi->scaled_source.y_crop_height == cm->superres_upscaled_height);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004420 av1_resize_and_extend_frame(cpi->unscaled_source, &cpi->scaled_source,
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004421 (int)cm->seq_params.bit_depth, num_planes);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004422 cpi->source = &cpi->scaled_source;
4423 }
4424}
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004425
4426static void loopfilter_frame(AV1_COMP *cpi, AV1_COMMON *cm) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004427 const int num_planes = av1_num_planes(cm);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004428 MACROBLOCKD *xd = &cpi->td.mb.e_mbd;
Urvang Joshi14072aa2018-03-21 17:43:36 -07004429
Urvang Joshic8b52d52018-03-23 13:16:51 -07004430 assert(IMPLIES(is_lossless_requested(&cpi->oxcf),
4431 cm->coded_lossless && cm->all_lossless));
4432
4433 const int no_loopfilter = cm->coded_lossless || cm->large_scale_tile;
4434 const int no_cdef =
Debargha Mukherjee98a311c2018-03-25 16:33:11 -07004435 !cm->seq_params.enable_cdef || cm->coded_lossless || cm->large_scale_tile;
4436 const int no_restoration = !cm->seq_params.enable_restoration ||
4437 cm->all_lossless || cm->large_scale_tile;
Urvang Joshi14072aa2018-03-21 17:43:36 -07004438
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004439 struct loopfilter *lf = &cm->lf;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004440
4441 if (no_loopfilter) {
Cheng Chen179479f2017-08-04 10:56:39 -07004442 lf->filter_level[0] = 0;
4443 lf->filter_level[1] = 0;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004444 } else {
4445 struct aom_usec_timer timer;
4446
4447 aom_clear_system_state();
4448
4449 aom_usec_timer_start(&timer);
4450
4451 av1_pick_filter_level(cpi->source, cpi, cpi->sf.lpf_pick);
4452
4453 aom_usec_timer_mark(&timer);
4454 cpi->time_pick_lpf += aom_usec_timer_elapsed(&timer);
4455 }
4456
Debargha Mukherjee2382b142018-02-26 14:31:32 -08004457 if (lf->filter_level[0] || lf->filter_level[1]) {
Deepa K G964e72e2018-05-16 16:56:01 +05304458 if (cpi->num_workers > 1)
David Turnerc29e1a92018-12-06 14:10:14 +00004459 av1_loop_filter_frame_mt(&cm->cur_frame->buf, cm, xd, 0, num_planes, 0,
Cheng Chene3600cd2018-09-21 18:45:42 -07004460#if LOOP_FILTER_BITMASK
4461 0,
4462#endif
Deepa K G964e72e2018-05-16 16:56:01 +05304463 cpi->workers, cpi->num_workers,
4464 &cpi->lf_row_sync);
4465 else
David Turnerc29e1a92018-12-06 14:10:14 +00004466 av1_loop_filter_frame(&cm->cur_frame->buf, cm, xd,
Cheng Chen84b09932018-08-12 17:35:13 -07004467#if LOOP_FILTER_BITMASK
4468 0,
Cheng Chen8ab1f442018-04-27 18:01:52 -07004469#endif
Cheng Chen84b09932018-08-12 17:35:13 -07004470 0, num_planes, 0);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004471 }
Debargha Mukherjeee168a782017-08-31 12:30:10 -07004472
Yaowu Xu35ee2342017-11-08 11:50:46 -08004473 if (!no_restoration)
David Turnerc29e1a92018-12-06 14:10:14 +00004474 av1_loop_restoration_save_boundary_lines(&cm->cur_frame->buf, cm, 0);
Ola Hugosson1e7f2d02017-09-22 21:36:26 +02004475
Yaowu Xu35ee2342017-11-08 11:50:46 -08004476 if (no_cdef) {
David Turnerebf96f42018-11-14 16:57:57 +00004477 cm->cdef_info.cdef_bits = 0;
4478 cm->cdef_info.cdef_strengths[0] = 0;
4479 cm->cdef_info.nb_cdef_strengths = 1;
4480 cm->cdef_info.cdef_uv_strengths[0] = 0;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004481 } else {
Steinar Midtskogen59782122017-07-20 08:49:43 +02004482 // Find CDEF parameters
David Turnerc29e1a92018-12-06 14:10:14 +00004483 av1_cdef_search(&cm->cur_frame->buf, cpi->source, cm, xd,
Debargha Mukherjeed7338aa2017-11-04 07:34:50 -07004484 cpi->sf.fast_cdef_search);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004485
4486 // Apply the filter
David Turnerc29e1a92018-12-06 14:10:14 +00004487 av1_cdef_frame(&cm->cur_frame->buf, cm, xd);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004488 }
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004489
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004490 superres_post_encode(cpi);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004491
Yaowu Xu35ee2342017-11-08 11:50:46 -08004492 if (no_restoration) {
4493 cm->rst_info[0].frame_restoration_type = RESTORE_NONE;
4494 cm->rst_info[1].frame_restoration_type = RESTORE_NONE;
4495 cm->rst_info[2].frame_restoration_type = RESTORE_NONE;
4496 } else {
David Turnerc29e1a92018-12-06 14:10:14 +00004497 av1_loop_restoration_save_boundary_lines(&cm->cur_frame->buf, cm, 1);
Yaowu Xu35ee2342017-11-08 11:50:46 -08004498 av1_pick_filter_restoration(cpi->source, cpi);
4499 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
4500 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
4501 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05304502 if (cpi->num_workers > 1)
David Turnerc29e1a92018-12-06 14:10:14 +00004503 av1_loop_restoration_filter_frame_mt(&cm->cur_frame->buf, cm, 0,
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05304504 cpi->workers, cpi->num_workers,
4505 &cpi->lr_row_sync, &cpi->lr_ctxt);
4506 else
David Turnerc29e1a92018-12-06 14:10:14 +00004507 av1_loop_restoration_filter_frame(&cm->cur_frame->buf, cm, 0,
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05304508 &cpi->lr_ctxt);
Yaowu Xu35ee2342017-11-08 11:50:46 -08004509 }
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004510 }
Fergus Simpsonbc189932017-05-16 17:02:39 -07004511}
4512
David Turner996b2c12018-12-07 15:52:30 +00004513static int get_refresh_frame_flags(const AV1_COMP *const cpi) {
4514 const AV1_COMMON *const cm = &cpi->common;
4515
4516 // Switch frames and shown key-frames overwrite all reference slots
4517 if ((cm->current_frame.frame_type == KEY_FRAME && cm->show_frame) ||
4518 frame_is_sframe(cm))
4519 return 0xFF;
4520
4521 int refresh_mask = 0;
4522
4523 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
4524 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
4525 // the 3 LAST reference frames will be updated accordingly, i.e.:
4526 // (1) The original virtual index for LAST3_FRAME will become the new virtual
4527 // index for LAST_FRAME; and
4528 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
4529 // shifted and become the new virtual indexes for LAST2_FRAME and
4530 // LAST3_FRAME.
4531 refresh_mask |=
4532 (cpi->refresh_last_frame << get_ref_frame_map_idx(cm, LAST3_FRAME));
4533
4534#if USE_SYMM_MULTI_LAYER
4535 const int bwd_ref_frame =
4536 (cpi->new_bwdref_update_rule == 1) ? EXTREF_FRAME : BWDREF_FRAME;
4537#else
4538 const int bwd_ref_frame = BWDREF_FRAME;
4539#endif
4540 refresh_mask |=
4541 (cpi->refresh_bwd_ref_frame << get_ref_frame_map_idx(cm, bwd_ref_frame));
4542
4543 refresh_mask |=
4544 (cpi->refresh_alt2_ref_frame << get_ref_frame_map_idx(cm, ALTREF2_FRAME));
4545
4546 if (av1_preserve_existing_gf(cpi)) {
4547 // We have decided to preserve the previously existing golden frame as our
4548 // new ARF frame. However, in the short term we leave it in the GF slot and,
4549 // if we're updating the GF with the current decoded frame, we save it
4550 // instead to the ARF slot.
4551 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
4552 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
4553 // there so that it can be done outside of the recode loop.
4554 // Note: This is highly specific to the use of ARF as a forward reference,
4555 // and this needs to be generalized as other uses are implemented
4556 // (like RTC/temporal scalability).
4557
4558 if (!cpi->preserve_arf_as_gld) {
4559 refresh_mask |= (cpi->refresh_golden_frame
4560 << get_ref_frame_map_idx(cm, ALTREF_FRAME));
4561 }
4562 } else {
4563 refresh_mask |=
4564 (cpi->refresh_golden_frame << get_ref_frame_map_idx(cm, GOLDEN_FRAME));
4565 refresh_mask |=
4566 (cpi->refresh_alt_ref_frame << get_ref_frame_map_idx(cm, ALTREF_FRAME));
4567 }
4568 return refresh_mask;
4569}
4570
David Turnerf2b334c2018-12-13 13:00:55 +00004571static void fix_interp_filter(InterpFilter *const interp_filter,
4572 const FRAME_COUNTS *const counts) {
4573 if (*interp_filter == SWITCHABLE) {
4574 // Check to see if only one of the filters is actually used
4575 int count[SWITCHABLE_FILTERS] = { 0 };
4576 int num_filters_used = 0;
4577 for (int i = 0; i < SWITCHABLE_FILTERS; ++i) {
4578 for (int j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
4579 count[i] += counts->switchable_interp[j][i];
4580 num_filters_used += (count[i] > 0);
4581 }
4582 if (num_filters_used == 1) {
4583 // Only one filter is used. So set the filter at frame level
4584 for (int i = 0; i < SWITCHABLE_FILTERS; ++i) {
4585 if (count[i]) {
4586 if (i == EIGHTTAP_REGULAR) *interp_filter = i;
4587 break;
4588 }
4589 }
4590 }
4591 }
4592}
4593
David Turner996b2c12018-12-07 15:52:30 +00004594static void finalize_encoded_frame(AV1_COMP *const cpi) {
4595 AV1_COMMON *const cm = &cpi->common;
David Turner99e990e2018-12-10 12:54:26 +00004596 CurrentFrame *const current_frame = &cm->current_frame;
David Turner996b2c12018-12-07 15:52:30 +00004597
4598 // This bitfield indicates which reference frame slots will be overwritten by
4599 // the current frame
David Turner99e990e2018-12-10 12:54:26 +00004600 current_frame->refresh_frame_flags = get_refresh_frame_flags(cpi);
4601
4602 if (!encode_show_existing_frame(cm)) {
4603 // Refresh fb_of_context_type[]: see encoder.h for explanation
4604 if (current_frame->frame_type == KEY_FRAME) {
4605 // All ref frames are refreshed, pick one that will live long enough
4606 cpi->fb_of_context_type[REGULAR_FRAME] = 0;
4607 } else {
4608 // If more than one frame is refreshed, it doesn't matter which one we
4609 // pick so pick the first. LST sometimes doesn't refresh any: this is ok
4610 const int current_frame_ref_type = get_current_frame_ref_type(cpi);
4611 for (int i = 0; i < REF_FRAMES; i++) {
4612 if (current_frame->refresh_frame_flags & (1 << i)) {
4613 cpi->fb_of_context_type[current_frame_ref_type] = i;
4614 break;
4615 }
4616 }
4617 }
4618 }
4619
4620 if (!cm->seq_params.reduced_still_picture_hdr &&
4621 encode_show_existing_frame(cm)) {
4622 RefCntBuffer *const frame_to_show =
4623 cm->ref_frame_map[cpi->existing_fb_idx_to_show];
4624
Wan-Teh Chang88cd1662019-01-14 12:38:41 -08004625 if (frame_to_show == NULL) {
David Turner99e990e2018-12-10 12:54:26 +00004626 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
4627 "Buffer does not contain a reconstructed frame");
4628 }
Wan-Teh Chang88cd1662019-01-14 12:38:41 -08004629 assert(frame_to_show->ref_count > 0);
David Turner99e990e2018-12-10 12:54:26 +00004630 assign_frame_buffer_p(&cm->cur_frame, frame_to_show);
4631 if (cm->reset_decoder_state && frame_to_show->frame_type != KEY_FRAME) {
4632 aom_internal_error(
4633 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
4634 "show_existing_frame to reset state on KEY_FRAME only");
4635 }
4636 }
David Turner08f909c2018-12-18 13:29:14 +00004637
4638 if (!encode_show_existing_frame(cm) &&
4639 cm->seq_params.film_grain_params_present &&
4640 (cm->show_frame || cm->showable_frame)) {
4641 // Copy the current frame's film grain params to the its corresponding
4642 // RefCntBuffer slot.
4643 cm->cur_frame->film_grain_params = cm->film_grain_params;
4644
4645 // We must update the parameters if this is not an INTER_FRAME
4646 if (current_frame->frame_type != INTER_FRAME)
4647 cm->cur_frame->film_grain_params.update_parameters = 1;
4648
4649 // Iterate the random seed for the next frame.
4650 cm->film_grain_params.random_seed += 3381;
4651 if (cm->film_grain_params.random_seed == 0)
4652 cm->film_grain_params.random_seed = 7391;
4653 }
David Turnerf2b334c2018-12-13 13:00:55 +00004654
4655 // Initialise all tiles' contexts from the global frame context
4656 for (int tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
4657 for (int tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
4658 const int tile_idx = tile_row * cm->tile_cols + tile_col;
4659 cpi->tile_data[tile_idx].tctx = *cm->fc;
4660 }
4661 }
4662
4663 fix_interp_filter(&cm->interp_filter, cpi->td.counts);
David Turner996b2c12018-12-07 15:52:30 +00004664}
4665
David Turner2f3b5df2019-01-02 14:30:50 +00004666// Called after encode_with_recode_loop() has just encoded a frame and packed
4667// its bitstream. This function works out whether we under- or over-shot
4668// our bitrate target and adjusts q as appropriate. Also decides whether
4669// or not we should do another recode loop, indicated by *loop
4670static void recode_loop_update_q(AV1_COMP *const cpi, int *const loop,
4671 int *const q, int *const q_low,
4672 int *const q_high, const int top_index,
4673 const int bottom_index,
4674 int *const undershoot_seen,
4675 int *const overshoot_seen,
4676 const int loop_at_this_size) {
4677 AV1_COMMON *const cm = &cpi->common;
4678 RATE_CONTROL *const rc = &cpi->rc;
4679
4680 int frame_over_shoot_limit = 0, frame_under_shoot_limit = 0;
4681 av1_rc_compute_frame_size_bounds(cpi, rc->this_frame_target,
4682 &frame_under_shoot_limit,
4683 &frame_over_shoot_limit);
4684 if (frame_over_shoot_limit == 0) frame_over_shoot_limit = 1;
4685
4686 if ((cm->current_frame.frame_type == KEY_FRAME) &&
4687 rc->this_key_frame_forced &&
4688 (rc->projected_frame_size < rc->max_frame_bandwidth)) {
4689 int last_q = *q;
4690 int64_t kf_err;
4691
4692 int64_t high_err_target = cpi->ambient_err;
4693 int64_t low_err_target = cpi->ambient_err >> 1;
4694
4695 if (cm->seq_params.use_highbitdepth) {
4696 kf_err = aom_highbd_get_y_sse(cpi->source, &cm->cur_frame->buf);
4697 } else {
4698 kf_err = aom_get_y_sse(cpi->source, &cm->cur_frame->buf);
4699 }
4700 // Prevent possible divide by zero error below for perfect KF
4701 kf_err += !kf_err;
4702
4703 // The key frame is not good enough or we can afford
4704 // to make it better without undue risk of popping.
4705 if ((kf_err > high_err_target &&
4706 rc->projected_frame_size <= frame_over_shoot_limit) ||
4707 (kf_err > low_err_target &&
4708 rc->projected_frame_size <= frame_under_shoot_limit)) {
4709 // Lower q_high
4710 *q_high = *q > *q_low ? *q - 1 : *q_low;
4711
4712 // Adjust Q
4713 *q = (int)((*q * high_err_target) / kf_err);
4714 *q = AOMMIN(*q, (*q_high + *q_low) >> 1);
4715 } else if (kf_err < low_err_target &&
4716 rc->projected_frame_size >= frame_under_shoot_limit) {
4717 // The key frame is much better than the previous frame
4718 // Raise q_low
4719 *q_low = *q < *q_high ? *q + 1 : *q_high;
4720
4721 // Adjust Q
4722 *q = (int)((*q * low_err_target) / kf_err);
4723 *q = AOMMIN(*q, (*q_high + *q_low + 1) >> 1);
4724 }
4725
4726 // Clamp Q to upper and lower limits:
4727 *q = clamp(*q, *q_low, *q_high);
4728
4729 *loop = *q != last_q;
4730 } else if (recode_loop_test(cpi, frame_over_shoot_limit,
4731 frame_under_shoot_limit, *q,
4732 AOMMAX(*q_high, top_index), bottom_index)) {
4733 // Is the projected frame size out of range and are we allowed
4734 // to attempt to recode.
4735 int last_q = *q;
4736 int retries = 0;
4737
4738 // Frame size out of permitted range:
4739 // Update correction factor & compute new Q to try...
4740 // Frame is too large
4741 if (rc->projected_frame_size > rc->this_frame_target) {
4742 // Special case if the projected size is > the max allowed.
4743 if (rc->projected_frame_size >= rc->max_frame_bandwidth)
4744 *q_high = rc->worst_quality;
4745
4746 // Raise Qlow as to at least the current value
4747 *q_low = *q < *q_high ? *q + 1 : *q_high;
4748
4749 if (*undershoot_seen || loop_at_this_size > 1) {
4750 // Update rate_correction_factor unless
4751 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4752
4753 *q = (*q_high + *q_low + 1) / 2;
4754 } else {
4755 // Update rate_correction_factor unless
4756 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4757
4758 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4759 AOMMAX(*q_high, top_index), cm->width,
4760 cm->height);
4761
4762 while (*q < *q_low && retries < 10) {
4763 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4764 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4765 AOMMAX(*q_high, top_index), cm->width,
4766 cm->height);
4767 retries++;
4768 }
4769 }
4770
4771 *overshoot_seen = 1;
4772 } else {
4773 // Frame is too small
4774 *q_high = *q > *q_low ? *q - 1 : *q_low;
4775
4776 if (*overshoot_seen || loop_at_this_size > 1) {
4777 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4778 *q = (*q_high + *q_low) / 2;
4779 } else {
4780 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4781 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4782 top_index, cm->width, cm->height);
4783 // Special case reset for qlow for constrained quality.
4784 // This should only trigger where there is very substantial
4785 // undershoot on a frame and the auto cq level is above
4786 // the user passsed in value.
4787 if (cpi->oxcf.rc_mode == AOM_CQ && *q < *q_low) {
4788 *q_low = *q;
4789 }
4790
4791 while (*q > *q_high && retries < 10) {
4792 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4793 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4794 top_index, cm->width, cm->height);
4795 retries++;
4796 }
4797 }
4798
4799 *undershoot_seen = 1;
4800 }
4801
4802 // Clamp Q to upper and lower limits:
4803 *q = clamp(*q, *q_low, *q_high);
4804
4805 *loop = (*q != last_q);
4806 } else {
4807 *loop = 0;
4808 }
4809}
4810
Tom Finegane4099e32018-01-23 12:01:51 -08004811static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004812 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004813 RATE_CONTROL *const rc = &cpi->rc;
David Turner2f3b5df2019-01-02 14:30:50 +00004814 const int allow_recode = cpi->sf.recode_loop != DISALLOW_RECODE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004815
4816 set_size_independent_vars(cpi);
4817
Yaowu Xu9b0f7032017-07-31 11:01:19 -07004818 cpi->source->buf_8bit_valid = 0;
Yaowu Xu9b0f7032017-07-31 11:01:19 -07004819
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004820 setup_frame_size(cpi);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004821
David Turner2f3b5df2019-01-02 14:30:50 +00004822 int top_index = 0, bottom_index = 0;
4823 int q = 0, q_low = 0, q_high = 0;
4824 set_size_dependent_vars(cpi, &q, &bottom_index, &top_index);
4825 q_low = bottom_index;
4826 q_high = top_index;
4827
4828 // Loop variables
4829 int loop_count = 0;
4830 int loop_at_this_size = 0;
4831 int loop = 0;
4832 int overshoot_seen = 0;
4833 int undershoot_seen = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004834 do {
Yaowu Xuf883b422016-08-30 14:01:10 -07004835 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07004836
Urvang Joshif1fa6862018-01-08 16:39:33 -08004837 // if frame was scaled calculate global_motion_search again if already
4838 // done
David Turner2f3b5df2019-01-02 14:30:50 +00004839 if (loop_count > 0 && cpi->source && cpi->global_motion_search_done) {
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004840 if (cpi->source->y_crop_width != cm->width ||
David Turner2f3b5df2019-01-02 14:30:50 +00004841 cpi->source->y_crop_height != cm->height) {
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004842 cpi->global_motion_search_done = 0;
David Turner2f3b5df2019-01-02 14:30:50 +00004843 }
4844 }
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004845 cpi->source =
4846 av1_scale_if_required(cm, cpi->unscaled_source, &cpi->scaled_source);
David Turner2f3b5df2019-01-02 14:30:50 +00004847 if (cpi->unscaled_last_source != NULL) {
Debargha Mukherjee17e7b082017-08-13 09:33:03 -07004848 cpi->last_source = av1_scale_if_required(cm, cpi->unscaled_last_source,
4849 &cpi->scaled_last_source);
David Turner2f3b5df2019-01-02 14:30:50 +00004850 }
Debargha Mukherjee17e7b082017-08-13 09:33:03 -07004851
David Turner2f3b5df2019-01-02 14:30:50 +00004852 if (!frame_is_intra_only(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004853 if (loop_count > 0) {
4854 release_scaled_references(cpi);
4855 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004856 scale_references(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004857 }
Yaowu Xuf883b422016-08-30 14:01:10 -07004858 av1_set_quantizer(cm, q);
Debargha Mukherjeef48b0d22018-11-20 12:23:43 -08004859 // printf("Frame %d/%d: q = %d, frame_type = %d superres_denom = %d\n",
4860 // cm->current_frame.frame_number, cm->show_frame, q,
4861 // cm->current_frame.frame_type, cm->superres_scale_denominator);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004862
David Turner2f3b5df2019-01-02 14:30:50 +00004863 if (loop_count == 0) {
4864 setup_frame(cpi);
4865 } else if (get_primary_ref_frame_buf(cm) == NULL) {
4866 // Base q-index may have changed, so we need to assign proper default coef
4867 // probs before every iteration.
Yaowu Xuf883b422016-08-30 14:01:10 -07004868 av1_default_coef_probs(cm);
Hui Su3694c832017-11-10 14:15:58 -08004869 av1_setup_frame_contexts(cm);
David Barkerfc91b392018-03-09 15:32:03 +00004870 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004871
Yaowu Xuc27fc142016-08-22 16:08:15 -07004872 if (cpi->oxcf.aq_mode == VARIANCE_AQ) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004873 av1_vaq_frame_setup(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004874 } else if (cpi->oxcf.aq_mode == COMPLEXITY_AQ) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004875 av1_setup_in_frame_q_adj(cpi);
David Turner2f3b5df2019-01-02 14:30:50 +00004876 } else if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && !allow_recode) {
4877 suppress_active_map(cpi);
4878 av1_cyclic_refresh_setup(cpi);
4879 apply_active_map(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004880 }
David Turner2f3b5df2019-01-02 14:30:50 +00004881
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00004882 if (cm->seg.enabled) {
David Barkercab37552018-03-21 11:56:24 +00004883 if (!cm->seg.update_data && cm->prev_frame) {
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00004884 segfeatures_copy(&cm->seg, &cm->prev_frame->seg);
David Barker11c93562018-06-05 12:00:07 +01004885 } else {
4886 calculate_segdata(&cm->seg);
Yue Chend90d3432018-03-16 11:28:42 -07004887 }
David Barkercab37552018-03-21 11:56:24 +00004888 } else {
4889 memset(&cm->seg, 0, sizeof(cm->seg));
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00004890 }
David Barkercab37552018-03-21 11:56:24 +00004891 segfeatures_copy(&cm->cur_frame->seg, &cm->seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004892
David Turner2f3b5df2019-01-02 14:30:50 +00004893 if (allow_recode) save_coding_context(cpi);
4894
Yaowu Xuc27fc142016-08-22 16:08:15 -07004895 // transform / motion compensation build reconstruction frame
Yaowu Xuf883b422016-08-30 14:01:10 -07004896 av1_encode_frame(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004897
David Turner2f3b5df2019-01-02 14:30:50 +00004898 // Update some stats from cyclic refresh, and check if we should not update
4899 // golden reference, for 1 pass CBR.
4900 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ &&
4901 cm->current_frame.frame_type != KEY_FRAME &&
4902 (cpi->oxcf.pass == 0 && cpi->oxcf.rc_mode == AOM_CBR)) {
4903 av1_cyclic_refresh_check_golden_update(cpi);
4904 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004905
Yaowu Xuf883b422016-08-30 14:01:10 -07004906 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07004907
4908 // Dummy pack of the bitstream using up to date stats to get an
4909 // accurate estimate of output frame size to determine if we need
4910 // to recode.
4911 if (cpi->sf.recode_loop >= ALLOW_RECODE_KFARFGF) {
Jingning Han8f661602017-08-19 08:16:50 -07004912 restore_coding_context(cpi);
Tom Finegane4099e32018-01-23 12:01:51 -08004913
David Turner996b2c12018-12-07 15:52:30 +00004914 finalize_encoded_frame(cpi);
David Turner35cba132018-12-10 15:48:15 +00004915 int largest_tile_id = 0; // Output from bitstream: unused here
4916 if (av1_pack_bitstream(cpi, dest, size, &largest_tile_id) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08004917 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004918
4919 rc->projected_frame_size = (int)(*size) << 3;
4920 restore_coding_context(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004921 }
4922
David Turner2f3b5df2019-01-02 14:30:50 +00004923 if (allow_recode && cpi->oxcf.rc_mode != AOM_Q) {
4924 // Update q and decide whether to do a recode loop
4925 recode_loop_update_q(cpi, &loop, &q, &q_low, &q_high, top_index,
4926 bottom_index, &undershoot_seen, &overshoot_seen,
4927 loop_at_this_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004928 }
4929
4930 // Special case for overlay frame.
4931 if (rc->is_src_frame_alt_ref &&
4932 rc->projected_frame_size < rc->max_frame_bandwidth)
4933 loop = 0;
4934
David Turner2f3b5df2019-01-02 14:30:50 +00004935 if (allow_recode && !cpi->sf.gm_disable_recode &&
4936 recode_loop_test_global_motion(cpi)) {
4937 loop = 1;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08004938 }
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08004939
Yaowu Xuc27fc142016-08-22 16:08:15 -07004940 if (loop) {
4941 ++loop_count;
4942 ++loop_at_this_size;
4943
4944#if CONFIG_INTERNAL_STATS
4945 ++cpi->tot_recode_hits;
4946#endif
4947 }
4948 } while (loop);
Tom Finegane4099e32018-01-23 12:01:51 -08004949
4950 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004951}
4952
Yaowu Xuc27fc142016-08-22 16:08:15 -07004953#define DUMP_RECON_FRAMES 0
4954
4955#if DUMP_RECON_FRAMES == 1
4956// NOTE(zoeliu): For debug - Output the filtered reconstructed video.
Yaowu Xuf883b422016-08-30 14:01:10 -07004957static void dump_filtered_recon_frames(AV1_COMP *cpi) {
4958 AV1_COMMON *const cm = &cpi->common;
David Turnerd2a592e2018-11-16 14:59:31 +00004959 const CurrentFrame *const current_frame = &cm->current_frame;
David Turnerc29e1a92018-12-06 14:10:14 +00004960 const YV12_BUFFER_CONFIG *recon_buf = &cm->cur_frame->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004961
Zoe Liub4f31032017-11-03 23:48:35 -07004962 if (recon_buf == NULL) {
David Turnerd2a592e2018-11-16 14:59:31 +00004963 printf("Frame %d is not ready.\n", current_frame->frame_number);
Zoe Liub4f31032017-11-03 23:48:35 -07004964 return;
4965 }
4966
Zoe Liu27deb382018-03-27 15:13:56 -07004967 static const int flag_list[REF_FRAMES] = { 0,
4968 AOM_LAST_FLAG,
4969 AOM_LAST2_FLAG,
4970 AOM_LAST3_FLAG,
4971 AOM_GOLD_FLAG,
4972 AOM_BWD_FLAG,
4973 AOM_ALT2_FLAG,
4974 AOM_ALT_FLAG };
Zoe Liub4f31032017-11-03 23:48:35 -07004975 printf(
4976 "\n***Frame=%d (frame_offset=%d, show_frame=%d, "
4977 "show_existing_frame=%d) "
4978 "[LAST LAST2 LAST3 GOLDEN BWD ALT2 ALT]=[",
David Turnerd2a592e2018-11-16 14:59:31 +00004979 current_frame->frame_number, current_frame->order_hint, cm->show_frame,
Zoe Liub4f31032017-11-03 23:48:35 -07004980 cm->show_existing_frame);
4981 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00004982 const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
4983 const int ref_offset = buf != NULL ? (int)buf->order_hint : -1;
David Turner1bcefb32018-11-19 17:54:00 +00004984 printf(" %d(%c-%d-%4.2f)", ref_offset,
4985 (cpi->ref_frame_flags & flag_list[ref_frame]) ? 'Y' : 'N',
David Turnera21966b2018-12-05 14:48:49 +00004986 buf ? (int)buf->frame_rf_level : -1,
4987 buf ? rate_factor_deltas[buf->frame_rf_level] : -1);
Zoe Liub4f31032017-11-03 23:48:35 -07004988 }
4989 printf(" ]\n");
Zoe Liub4f31032017-11-03 23:48:35 -07004990
4991 if (!cm->show_frame) {
4992 printf("Frame %d is a no show frame, so no image dump.\n",
David Turnerd2a592e2018-11-16 14:59:31 +00004993 current_frame->frame_number);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004994 return;
4995 }
4996
Zoe Liub4f31032017-11-03 23:48:35 -07004997 int h;
4998 char file_name[256] = "/tmp/enc_filtered_recon.yuv";
4999 FILE *f_recon = NULL;
5000
David Turnerd2a592e2018-11-16 14:59:31 +00005001 if (current_frame->frame_number == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005002 if ((f_recon = fopen(file_name, "wb")) == NULL) {
5003 printf("Unable to open file %s to write.\n", file_name);
5004 return;
5005 }
5006 } else {
5007 if ((f_recon = fopen(file_name, "ab")) == NULL) {
5008 printf("Unable to open file %s to append.\n", file_name);
5009 return;
5010 }
5011 }
5012 printf(
Zoe Liuf40a9572017-10-13 12:37:19 -07005013 "\nFrame=%5d, encode_update_type[%5d]=%1d, frame_offset=%d, "
5014 "show_frame=%d, show_existing_frame=%d, source_alt_ref_active=%d, "
5015 "refresh_alt_ref_frame=%d, rf_level=%d, "
5016 "y_stride=%4d, uv_stride=%4d, cm->width=%4d, cm->height=%4d\n\n",
David Turnerd2a592e2018-11-16 14:59:31 +00005017 current_frame->frame_number, cpi->twopass.gf_group.index,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005018 cpi->twopass.gf_group.update_type[cpi->twopass.gf_group.index],
David Turnerd2a592e2018-11-16 14:59:31 +00005019 current_frame->order_hint, cm->show_frame, cm->show_existing_frame,
Zoe Liuf40a9572017-10-13 12:37:19 -07005020 cpi->rc.source_alt_ref_active, cpi->refresh_alt_ref_frame,
5021 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index],
5022 recon_buf->y_stride, recon_buf->uv_stride, cm->width, cm->height);
Zoe Liue9b15e22017-07-19 15:53:01 -07005023#if 0
5024 int ref_frame;
5025 printf("get_ref_frame_map_idx: [");
5026 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame)
David Turnera21966b2018-12-05 14:48:49 +00005027 printf(" %d", get_ref_frame_map_idx(cm, ref_frame));
Zoe Liue9b15e22017-07-19 15:53:01 -07005028 printf(" ]\n");
Zoe Liue9b15e22017-07-19 15:53:01 -07005029#endif // 0
Yaowu Xuc27fc142016-08-22 16:08:15 -07005030
5031 // --- Y ---
5032 for (h = 0; h < cm->height; ++h) {
5033 fwrite(&recon_buf->y_buffer[h * recon_buf->y_stride], 1, cm->width,
5034 f_recon);
5035 }
5036 // --- U ---
5037 for (h = 0; h < (cm->height >> 1); ++h) {
5038 fwrite(&recon_buf->u_buffer[h * recon_buf->uv_stride], 1, (cm->width >> 1),
5039 f_recon);
5040 }
5041 // --- V ---
5042 for (h = 0; h < (cm->height >> 1); ++h) {
5043 fwrite(&recon_buf->v_buffer[h * recon_buf->uv_stride], 1, (cm->width >> 1),
5044 f_recon);
5045 }
5046
5047 fclose(f_recon);
5048}
5049#endif // DUMP_RECON_FRAMES
5050
Wei-Ting Linfb7dc062018-06-28 18:26:13 -07005051static INLINE int is_frame_droppable(AV1_COMP *cpi) {
5052 return !(cpi->refresh_alt_ref_frame || cpi->refresh_alt2_ref_frame ||
5053 cpi->refresh_bwd_ref_frame || cpi->refresh_golden_frame ||
5054 cpi->refresh_last_frame);
5055}
5056
Sachin Kumar Gargfd39b232019-01-03 17:41:09 +05305057static int setup_interp_filter_search_mask(AV1_COMP *cpi) {
5058 InterpFilters ifilter;
5059 int ref_total[REF_FRAMES] = { 0 };
5060 MV_REFERENCE_FRAME ref;
5061 int mask = 0;
5062 int arf_idx = ALTREF_FRAME;
5063 if (cpi->common.last_frame_type == KEY_FRAME || cpi->refresh_alt_ref_frame)
5064 return mask;
5065 for (ref = LAST_FRAME; ref <= ALTREF_FRAME; ++ref)
5066 for (ifilter = EIGHTTAP_REGULAR; ifilter <= MULTITAP_SHARP; ++ifilter)
5067 ref_total[ref] += cpi->interp_filter_selected[ref][ifilter];
5068
5069 for (ifilter = EIGHTTAP_REGULAR; ifilter <= MULTITAP_SHARP; ++ifilter) {
5070 if ((ref_total[LAST_FRAME] &&
5071 cpi->interp_filter_selected[LAST_FRAME][ifilter] * 30 <=
5072 ref_total[LAST_FRAME]) &&
5073 (((cpi->interp_filter_selected[LAST2_FRAME][ifilter] * 20) +
5074 (cpi->interp_filter_selected[LAST3_FRAME][ifilter] * 20) +
5075 (cpi->interp_filter_selected[GOLDEN_FRAME][ifilter] * 20) +
5076 (cpi->interp_filter_selected[BWDREF_FRAME][ifilter] * 10) +
5077 (cpi->interp_filter_selected[ALTREF2_FRAME][ifilter] * 10) +
5078 (cpi->interp_filter_selected[arf_idx][ifilter] * 10)) <
5079 (ref_total[LAST2_FRAME] + ref_total[LAST3_FRAME] +
5080 ref_total[GOLDEN_FRAME] + ref_total[BWDREF_FRAME] +
5081 ref_total[ALTREF2_FRAME] + ref_total[ALTREF_FRAME])))
5082 mask |= 1 << ifilter;
5083 }
5084 return mask;
5085}
5086
Tom Finegane4099e32018-01-23 12:01:51 -08005087static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size, uint8_t *dest,
Tom Finegane4099e32018-01-23 12:01:51 -08005088 unsigned int *frame_flags) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005089 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005090 SequenceHeader *const seq_params = &cm->seq_params;
David Turnerd2a592e2018-11-16 14:59:31 +00005091 CurrentFrame *const current_frame = &cm->current_frame;
Yaowu Xuf883b422016-08-30 14:01:10 -07005092 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005093 struct segmentation *const seg = &cm->seg;
Thomas Davies4822e142017-10-10 11:30:36 +01005094
Yaowu Xuf883b422016-08-30 14:01:10 -07005095 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07005096
Fangwen Fu8d164de2016-12-14 13:40:54 -08005097 // frame type has been decided outside of this function call
David Turnerd2a592e2018-11-16 14:59:31 +00005098 cm->cur_frame->frame_type = current_frame->frame_type;
Debargha Mukherjee07a7c1f2018-03-21 17:39:13 -07005099
Yunqing Wang9612d552018-05-15 14:58:30 -07005100 cm->large_scale_tile = cpi->oxcf.large_scale_tile;
5101 cm->single_tile_decoding = cpi->oxcf.single_tile_decoding;
Yunqing Wang9612d552018-05-15 14:58:30 -07005102
sarahparker21dbca42018-03-30 17:43:44 -07005103 cm->allow_ref_frame_mvs &= frame_might_allow_ref_frame_mvs(cm);
Yunqing Wangd48fb162018-06-15 10:55:28 -07005104 // cm->allow_ref_frame_mvs needs to be written into the frame header while
5105 // cm->large_scale_tile is 1, therefore, "cm->large_scale_tile=1" case is
5106 // separated from frame_might_allow_ref_frame_mvs().
5107 cm->allow_ref_frame_mvs &= !cm->large_scale_tile;
5108
Debargha Mukherjee1d7217e2018-03-26 13:32:13 -07005109 cm->allow_warped_motion =
Debargha Mukherjeea5b810a2018-03-26 19:19:55 -07005110 cpi->oxcf.allow_warped_motion && frame_might_allow_warped_motion(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005111
Jingning Hand8a15a62017-10-30 10:53:42 -07005112 // Reset the frame packet stamp index.
David Turnerd2a592e2018-11-16 14:59:31 +00005113 if (current_frame->frame_type == KEY_FRAME && cm->show_frame)
5114 current_frame->frame_number = 0;
Jingning Hand8a15a62017-10-30 10:53:42 -07005115
Sachin Kumar Gargfd39b232019-01-03 17:41:09 +05305116 cm->last_frame_type = current_frame->frame_type;
5117 if (cpi->oxcf.pass == 2 && cpi->sf.adaptive_interp_filter_search)
5118 cpi->sf.interp_filter_search_mask = setup_interp_filter_search_mask(cpi);
5119
Sarah Parker33005522018-07-27 14:46:25 -07005120 if (encode_show_existing_frame(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005121 // NOTE(zoeliu): In BIDIR_PRED, the existing frame to show is the current
5122 // BWDREF_FRAME in the reference frame buffer.
David Turnerd2a592e2018-11-16 14:59:31 +00005123 if (current_frame->frame_type == KEY_FRAME) {
Sarah Parkerb9041612018-05-22 19:06:47 -07005124 cm->reset_decoder_state = 1;
5125 } else {
David Turnerd2a592e2018-11-16 14:59:31 +00005126 current_frame->frame_type = INTER_FRAME;
Sarah Parkerb9041612018-05-22 19:06:47 -07005127 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005128 cm->show_frame = 1;
5129 cpi->frame_flags = *frame_flags;
5130
Jingning Han8f661602017-08-19 08:16:50 -07005131 restore_coding_context(cpi);
Zoe Liub4f31032017-11-03 23:48:35 -07005132
David Turner996b2c12018-12-07 15:52:30 +00005133 finalize_encoded_frame(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005134 // Build the bitstream
David Turner35cba132018-12-10 15:48:15 +00005135 int largest_tile_id = 0; // Output from bitstream: unused here
5136 if (av1_pack_bitstream(cpi, dest, size, &largest_tile_id) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08005137 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005138
David Turner90311862018-11-29 13:34:36 +00005139 if (seq_params->frame_id_numbers_present_flag &&
5140 current_frame->frame_type == KEY_FRAME) {
5141 // Displaying a forward key-frame, so reset the ref buffer IDs
5142 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
5143 for (int i = 0; i < REF_FRAMES; i++)
5144 cm->ref_frame_id[i] = display_frame_id;
5145 }
5146
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07005147 cpi->seq_params_locked = 1;
5148
Zoe Liub4f31032017-11-03 23:48:35 -07005149 // Update current frame offset.
Jack Haughtonddb80602018-11-21 16:41:49 +00005150 current_frame->order_hint = cm->cur_frame->order_hint;
Zoe Liub4f31032017-11-03 23:48:35 -07005151
Yaowu Xuc27fc142016-08-22 16:08:15 -07005152#if DUMP_RECON_FRAMES == 1
5153 // NOTE(zoeliu): For debug - Output the filtered reconstructed video.
5154 dump_filtered_recon_frames(cpi);
5155#endif // DUMP_RECON_FRAMES
5156
5157 // Update the LAST_FRAME in the reference frame buffer.
Zoe Liue9b15e22017-07-19 15:53:01 -07005158 // NOTE:
5159 // (1) For BWDREF_FRAME as the show_existing_frame, the reference frame
5160 // update has been done previously when handling the LAST_BIPRED_FRAME
5161 // right before BWDREF_FRAME (in the display order);
5162 // (2) For INTNL_OVERLAY as the show_existing_frame, the reference frame
Urvang Joshif1fa6862018-01-08 16:39:33 -08005163 // update will be done when the following is called, which will
5164 // exchange
Zoe Liue9b15e22017-07-19 15:53:01 -07005165 // the virtual indexes between LAST_FRAME and ALTREF2_FRAME, so that
Urvang Joshif1fa6862018-01-08 16:39:33 -08005166 // LAST3 will get retired, LAST2 becomes LAST3, LAST becomes LAST2,
5167 // and
Zoe Liue9b15e22017-07-19 15:53:01 -07005168 // ALTREF2_FRAME will serve as the new LAST_FRAME.
Cheng Chen46f30c72017-09-07 11:13:33 -07005169 update_reference_frames(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005170
5171 // Update frame flags
5172 cpi->frame_flags &= ~FRAMEFLAGS_GOLDEN;
5173 cpi->frame_flags &= ~FRAMEFLAGS_BWDREF;
5174 cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
5175
5176 *frame_flags = cpi->frame_flags & ~FRAMEFLAGS_KEY;
5177
Yaowu Xuc27fc142016-08-22 16:08:15 -07005178 // Since we allocate a spot for the OVERLAY frame in the gf group, we need
5179 // to do post-encoding update accordingly.
5180 if (cpi->rc.is_src_frame_alt_ref) {
Debargha Mukherjee7166f222017-09-05 21:32:42 -07005181 av1_set_target_rate(cpi, cm->width, cm->height);
Yaowu Xuf883b422016-08-30 14:01:10 -07005182 av1_rc_postencode_update(cpi, *size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005183 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005184
David Turnerd2a592e2018-11-16 14:59:31 +00005185 ++current_frame->frame_number;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005186
Tom Finegane4099e32018-01-23 12:01:51 -08005187 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005188 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005189
5190 // Set default state for segment based loop filter update flags.
5191 cm->lf.mode_ref_delta_update = 0;
5192
Yaowu Xuc27fc142016-08-22 16:08:15 -07005193 // Set various flags etc to special state if it is a key frame.
Tarek AMARAc9813852018-03-05 18:40:18 -05005194 if (frame_is_intra_only(cm) || frame_is_sframe(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005195 // Reset the loop filter deltas and segmentation map.
Yaowu Xuf883b422016-08-30 14:01:10 -07005196 av1_reset_segment_features(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005197
5198 // If segmentation is enabled force a map update for key frames.
5199 if (seg->enabled) {
5200 seg->update_map = 1;
5201 seg->update_data = 1;
5202 }
5203
5204 // The alternate reference frame cannot be active for a key frame.
5205 cpi->rc.source_alt_ref_active = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005206 }
Thomas Daviesaf6df172016-11-09 14:04:18 +00005207 if (cpi->oxcf.mtu == 0) {
5208 cm->num_tg = cpi->oxcf.num_tile_groups;
5209 } else {
Yaowu Xu859a5272016-11-10 15:32:21 -08005210 // Use a default value for the purposes of weighting costs in probability
5211 // updates
Thomas Daviesaf6df172016-11-09 14:04:18 +00005212 cm->num_tg = DEFAULT_MAX_NUM_TG;
5213 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005214
5215 // For 1 pass CBR, check if we are dropping this frame.
5216 // Never drop on key frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07005217 if (oxcf->pass == 0 && oxcf->rc_mode == AOM_CBR &&
David Turnerd2a592e2018-11-16 14:59:31 +00005218 current_frame->frame_type != KEY_FRAME) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005219 if (av1_rc_drop_frame(cpi)) {
5220 av1_rc_postencode_update_drop_frame(cpi);
Tom Finegane4099e32018-01-23 12:01:51 -08005221 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005222 }
5223 }
5224
Yaowu Xuf883b422016-08-30 14:01:10 -07005225 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07005226
5227#if CONFIG_INTERNAL_STATS
5228 memset(cpi->mode_chosen_counts, 0,
5229 MAX_MODES * sizeof(*cpi->mode_chosen_counts));
5230#endif
5231
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005232 if (seq_params->frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005233 /* Non-normative definition of current_frame_id ("frame counter" with
Johann123e8a62017-12-28 14:40:49 -08005234 * wraparound) */
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005235 if (cm->current_frame_id == -1) {
David Barker49a76562016-12-07 14:50:21 +00005236 int lsb, msb;
Yaowu Xud3e7c682017-12-21 14:08:25 -08005237 /* quasi-random initialization of current_frame_id for a key frame */
Alex Conversef77fd0b2017-04-20 11:00:24 -07005238 if (cpi->source->flags & YV12_FLAG_HIGHBITDEPTH) {
5239 lsb = CONVERT_TO_SHORTPTR(cpi->source->y_buffer)[0] & 0xff;
5240 msb = CONVERT_TO_SHORTPTR(cpi->source->y_buffer)[1] & 0xff;
David Barker49a76562016-12-07 14:50:21 +00005241 } else {
Alex Conversef77fd0b2017-04-20 11:00:24 -07005242 lsb = cpi->source->y_buffer[0] & 0xff;
5243 msb = cpi->source->y_buffer[1] & 0xff;
David Barker49a76562016-12-07 14:50:21 +00005244 }
David Turner760a2f42018-12-07 15:25:36 +00005245 cm->current_frame_id =
5246 ((msb << 8) + lsb) % (1 << seq_params->frame_id_length);
Tarek AMARAc9813852018-03-05 18:40:18 -05005247
5248 // S_frame is meant for stitching different streams of different
5249 // resolutions together, so current_frame_id must be the
5250 // same across different streams of the same content current_frame_id
5251 // should be the same and not random. 0x37 is a chosen number as start
5252 // point
5253 if (cpi->oxcf.sframe_enabled) cm->current_frame_id = 0x37;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005254 } else {
5255 cm->current_frame_id =
David Turner760a2f42018-12-07 15:25:36 +00005256 (cm->current_frame_id + 1 + (1 << seq_params->frame_id_length)) %
5257 (1 << seq_params->frame_id_length);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005258 }
5259 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005260
Hui Su483a8452018-02-26 12:28:48 -08005261 switch (cpi->oxcf.cdf_update_mode) {
5262 case 0: // No CDF update for any frames(4~6% compression loss).
5263 cm->disable_cdf_update = 1;
5264 break;
5265 case 1: // Enable CDF update for all frames.
5266 cm->disable_cdf_update = 0;
5267 break;
5268 case 2:
5269 // Strategically determine at which frames to do CDF update.
5270 // Currently only enable CDF update for all-intra and no-show frames(1.5%
5271 // compression loss).
5272 // TODO(huisu@google.com): design schemes for various trade-offs between
5273 // compression quality and decoding speed.
Hui Sub1b76b32018-02-27 15:24:48 -08005274 cm->disable_cdf_update =
5275 (frame_is_intra_only(cm) || !cm->show_frame) ? 0 : 1;
Hui Su483a8452018-02-26 12:28:48 -08005276 break;
5277 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005278 cm->timing_info_present &= !seq_params->reduced_still_picture_hdr;
Hui Su483a8452018-02-26 12:28:48 -08005279
David Turner2f3b5df2019-01-02 14:30:50 +00005280 if (encode_with_recode_loop(cpi, size, dest) != AOM_CODEC_OK)
5281 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005282
5283#ifdef OUTPUT_YUV_SKINMAP
David Turnerd2a592e2018-11-16 14:59:31 +00005284 if (cpi->common.current_frame.frame_number > 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005285 av1_compute_skin_map(cpi, yuv_skinmap_file);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005286 }
5287#endif // OUTPUT_YUV_SKINMAP
5288
5289 // Special case code to reduce pulsing when key frames are forced at a
5290 // fixed interval. Note the reconstruction error if it is the frame before
5291 // the force key frame
5292 if (cpi->rc.next_key_frame_forced && cpi->rc.frames_to_key == 1) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005293 if (seq_params->use_highbitdepth) {
Jack Haughtonddb80602018-11-21 16:41:49 +00005294 cpi->ambient_err = aom_highbd_get_y_sse(cpi->source, &cm->cur_frame->buf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005295 } else {
Jack Haughtonddb80602018-11-21 16:41:49 +00005296 cpi->ambient_err = aom_get_y_sse(cpi->source, &cm->cur_frame->buf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005297 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005298 }
5299
Tarek AMARAc9813852018-03-05 18:40:18 -05005300 // If the encoder forced a KEY_FRAME decision or if frame is an S_FRAME
David Turnerd2a592e2018-11-16 14:59:31 +00005301 if ((current_frame->frame_type == KEY_FRAME && cm->show_frame) ||
5302 frame_is_sframe(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005303 cpi->refresh_last_frame = 1;
5304 }
5305
David Turnerc29e1a92018-12-06 14:10:14 +00005306 cm->cur_frame->buf.color_primaries = seq_params->color_primaries;
5307 cm->cur_frame->buf.transfer_characteristics =
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005308 seq_params->transfer_characteristics;
David Turnerc29e1a92018-12-06 14:10:14 +00005309 cm->cur_frame->buf.matrix_coefficients = seq_params->matrix_coefficients;
5310 cm->cur_frame->buf.monochrome = seq_params->monochrome;
5311 cm->cur_frame->buf.chroma_sample_position =
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005312 seq_params->chroma_sample_position;
David Turnerc29e1a92018-12-06 14:10:14 +00005313 cm->cur_frame->buf.color_range = seq_params->color_range;
5314 cm->cur_frame->buf.render_width = cm->render_width;
5315 cm->cur_frame->buf.render_height = cm->render_height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005316
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02005317 // TODO(zoeliu): For non-ref frames, loop filtering may need to be turned
5318 // off.
Yaowu Xuc27fc142016-08-22 16:08:15 -07005319
5320 // Pick the loop filter level for the frame.
Cheng Chen68dc9142018-05-02 17:46:28 -07005321 if (!cm->allow_intrabc) {
David Barker218556e2018-02-14 14:23:12 +00005322 loopfilter_frame(cpi, cm);
Hui Su06463e42018-02-23 22:17:36 -08005323 } else {
Hui Su06463e42018-02-23 22:17:36 -08005324 cm->lf.filter_level[0] = 0;
5325 cm->lf.filter_level[1] = 0;
David Turnerebf96f42018-11-14 16:57:57 +00005326 cm->cdef_info.cdef_bits = 0;
5327 cm->cdef_info.cdef_strengths[0] = 0;
5328 cm->cdef_info.nb_cdef_strengths = 1;
5329 cm->cdef_info.cdef_uv_strengths[0] = 0;
Hui Su06463e42018-02-23 22:17:36 -08005330 cm->rst_info[0].frame_restoration_type = RESTORE_NONE;
5331 cm->rst_info[1].frame_restoration_type = RESTORE_NONE;
5332 cm->rst_info[2].frame_restoration_type = RESTORE_NONE;
Hui Su06463e42018-02-23 22:17:36 -08005333 }
David Barker218556e2018-02-14 14:23:12 +00005334
5335 // TODO(debargha): Fix mv search range on encoder side
David Turnerc29e1a92018-12-06 14:10:14 +00005336 // aom_extend_frame_inner_borders(&cm->cur_frame->buf, av1_num_planes(cm));
5337 aom_extend_frame_borders(&cm->cur_frame->buf, av1_num_planes(cm));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005338
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07005339#ifdef OUTPUT_YUV_REC
David Turnerc29e1a92018-12-06 14:10:14 +00005340 aom_write_one_yuv_frame(cm, &cm->cur_frame->buf);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07005341#endif
5342
David Turner996b2c12018-12-07 15:52:30 +00005343 finalize_encoded_frame(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005344 // Build the bitstream
David Turner35cba132018-12-10 15:48:15 +00005345 int largest_tile_id = 0; // Output from pack_bitstream
5346 if (av1_pack_bitstream(cpi, dest, size, &largest_tile_id) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08005347 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005348
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07005349 cpi->seq_params_locked = 1;
5350
David Turner996b2c12018-12-07 15:52:30 +00005351 // Update reference frame ids for reference frames this frame will overwrite
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005352 if (seq_params->frame_id_numbers_present_flag) {
David Turner996b2c12018-12-07 15:52:30 +00005353 for (int i = 0; i < REF_FRAMES; i++) {
5354 if ((current_frame->refresh_frame_flags >> i) & 1) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005355 cm->ref_frame_id[i] = cm->current_frame_id;
5356 }
5357 }
5358 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005359
Yaowu Xuc27fc142016-08-22 16:08:15 -07005360#if DUMP_RECON_FRAMES == 1
5361 // NOTE(zoeliu): For debug - Output the filtered reconstructed video.
Zoe Liub4f31032017-11-03 23:48:35 -07005362 dump_filtered_recon_frames(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005363#endif // DUMP_RECON_FRAMES
5364
Soo-Chul Han934af352017-10-15 15:21:51 -04005365 if (cm->seg.enabled) {
5366 if (cm->seg.update_map) {
5367 update_reference_segmentation_map(cpi);
Yue Chend90d3432018-03-16 11:28:42 -07005368 } else if (cm->last_frame_seg_map) {
David Turnerb757ce02018-11-12 15:01:28 +00005369 memcpy(cm->cur_frame->seg_map, cm->last_frame_seg_map,
Soo-Chul Han934af352017-10-15 15:21:51 -04005370 cm->mi_cols * cm->mi_rows * sizeof(uint8_t));
5371 }
5372 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005373
5374 if (frame_is_intra_only(cm) == 0) {
5375 release_scaled_references(cpi);
5376 }
5377
Cheng Chen46f30c72017-09-07 11:13:33 -07005378 update_reference_frames(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005379
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08005380#if CONFIG_ENTROPY_STATS
Yue Chencc6a6ef2018-05-21 16:21:05 -07005381 av1_accumulate_frame_counts(&aggregate_fc, &cpi->counts);
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08005382#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005383
Hui Sudc54be62018-03-14 19:14:28 -07005384 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
David Turner35cba132018-12-10 15:48:15 +00005385 *cm->fc = cpi->tile_data[largest_tile_id].tctx;
Hui Sudc54be62018-03-14 19:14:28 -07005386 av1_reset_cdf_symbol_counters(cm->fc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005387 }
5388
5389 if (cpi->refresh_golden_frame == 1)
5390 cpi->frame_flags |= FRAMEFLAGS_GOLDEN;
5391 else
5392 cpi->frame_flags &= ~FRAMEFLAGS_GOLDEN;
5393
5394 if (cpi->refresh_alt_ref_frame == 1)
5395 cpi->frame_flags |= FRAMEFLAGS_ALTREF;
5396 else
5397 cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
5398
Yaowu Xuc27fc142016-08-22 16:08:15 -07005399 if (cpi->refresh_bwd_ref_frame == 1)
5400 cpi->frame_flags |= FRAMEFLAGS_BWDREF;
5401 else
5402 cpi->frame_flags &= ~FRAMEFLAGS_BWDREF;
Sachin Kumar Gargfd39b232019-01-03 17:41:09 +05305403 cm->last_frame_type = current_frame->frame_type;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005404
Yaowu Xuf883b422016-08-30 14:01:10 -07005405 av1_rc_postencode_update(cpi, *size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005406
David Turnerd2a592e2018-11-16 14:59:31 +00005407 if (current_frame->frame_type == KEY_FRAME) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005408 // Tell the caller that the frame was coded as a key frame
5409 *frame_flags = cpi->frame_flags | FRAMEFLAGS_KEY;
5410 } else {
5411 *frame_flags = cpi->frame_flags & ~FRAMEFLAGS_KEY;
5412 }
5413
5414 // Clear the one shot update flags for segmentation map and mode/ref loop
5415 // filter deltas.
5416 cm->seg.update_map = 0;
5417 cm->seg.update_data = 0;
5418 cm->lf.mode_ref_delta_update = 0;
5419
Wei-Ting Linfb7dc062018-06-28 18:26:13 -07005420 // A droppable frame might not be shown but it always
5421 // takes a space in the gf group. Therefore, even when
5422 // it is not shown, we still need update the count down.
5423
Yaowu Xuc27fc142016-08-22 16:08:15 -07005424 if (cm->show_frame) {
Urvang Joshif1fa6862018-01-08 16:39:33 -08005425 // TODO(zoeliu): We may only swamp mi and prev_mi for those frames that
5426 // are
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02005427 // being used as reference.
Cheng Chen46f30c72017-09-07 11:13:33 -07005428 swap_mi_and_prev_mi(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005429 // Don't increment frame counters if this was an altref buffer
5430 // update not a real frame
Wei-Ting Lin96ee0eb2018-06-22 15:27:22 -07005431
David Turnerd2a592e2018-11-16 14:59:31 +00005432 ++current_frame->frame_number;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005433 }
5434
Tom Finegane4099e32018-01-23 12:01:51 -08005435 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005436}
5437
David Turner056f7cd2019-01-07 17:48:13 +00005438int av1_encode(AV1_COMP *const cpi, uint8_t *const dest,
5439 const EncodeFrameParams *const frame_params,
5440 EncodeFrameResults *const frame_results) {
David Turner07dbd8e2019-01-08 17:16:25 +00005441 AV1_COMMON *const cm = &cpi->common;
5442
David Turner056f7cd2019-01-07 17:48:13 +00005443 // TODO(david.turner@argondesign.com): Copy data from frame_params to cpi and
5444 // cm as appropriate
5445
David Turner07dbd8e2019-01-08 17:16:25 +00005446 cm->error_resilient_mode = frame_params->error_resilient_mode;
5447 cpi->ref_frame_flags = frame_params->ref_frame_flags;
5448
David Turner056f7cd2019-01-07 17:48:13 +00005449 if (encode_frame_to_data_rate(cpi, &frame_results->size, dest,
5450 frame_params->frame_flags) != AOM_CODEC_OK) {
5451 return AOM_CODEC_ERROR;
5452 }
5453
5454 return AOM_CODEC_OK;
5455}
5456
Sarah Parker3491dd22018-08-08 18:38:31 -07005457static INLINE void update_keyframe_counters(AV1_COMP *cpi) {
5458 // TODO(zoeliu): To investigate whether we should treat BWDREF_FRAME
5459 // differently here for rc->avg_frame_bandwidth.
5460 if (cpi->common.show_frame || cpi->rc.is_bwd_ref_frame) {
5461 if (!cpi->common.show_existing_frame || cpi->rc.is_src_frame_alt_ref ||
David Turnerd2a592e2018-11-16 14:59:31 +00005462 cpi->common.current_frame.frame_type == KEY_FRAME) {
Sarah Parker3491dd22018-08-08 18:38:31 -07005463 // If this is a show_existing_frame with a source other than altref,
5464 // or if it is not a displayed forward keyframe, the keyframe update
5465 // counters were incremented when it was originally encoded.
5466 cpi->rc.frames_since_key++;
5467 cpi->rc.frames_to_key--;
5468 }
5469 }
5470}
5471
5472static INLINE void update_frames_till_gf_update(AV1_COMP *cpi) {
5473 // TODO(weitinglin): Updating this counter for is_frame_droppable
5474 // is a work-around to handle the condition when a frame is drop.
5475 // We should fix the cpi->common.show_frame flag
5476 // instead of checking the other condition to update the counter properly.
5477 if (cpi->common.show_frame || is_frame_droppable(cpi)) {
5478 // Decrement count down till next gf
5479 if (cpi->rc.frames_till_gf_update_due > 0)
5480 cpi->rc.frames_till_gf_update_due--;
5481 }
5482}
5483
5484static INLINE void update_twopass_gf_group_index(AV1_COMP *cpi) {
5485 // Increment the gf group index ready for the next frame. If this is
5486 // a show_existing_frame with a source other than altref, or if it is not
5487 // a displayed forward keyframe, the index was incremented when it was
5488 // originally encoded.
5489 if (!cpi->common.show_existing_frame || cpi->rc.is_src_frame_alt_ref ||
David Turnerd2a592e2018-11-16 14:59:31 +00005490 cpi->common.current_frame.frame_type == KEY_FRAME) {
Sarah Parker3491dd22018-08-08 18:38:31 -07005491 ++cpi->twopass.gf_group.index;
5492 }
5493}
5494
5495static void update_rc_counts(AV1_COMP *cpi) {
5496 update_keyframe_counters(cpi);
5497 update_frames_till_gf_update(cpi);
5498 if (cpi->oxcf.pass == 2) update_twopass_gf_group_index(cpi);
5499}
5500
Debargha Mukherjee57378252018-09-21 18:29:37 -07005501static void set_additional_frame_flags(AV1_COMMON *const cm,
5502 unsigned int *frame_flags) {
5503 if (frame_is_intra_only(cm)) *frame_flags |= FRAMEFLAGS_INTRAONLY;
5504 if (frame_is_sframe(cm)) *frame_flags |= FRAMEFLAGS_SWITCH;
5505 if (cm->error_resilient_mode) *frame_flags |= FRAMEFLAGS_ERROR_RESILIENT;
5506}
5507
Tom Finegane4099e32018-01-23 12:01:51 -08005508static int Pass0Encode(AV1_COMP *cpi, size_t *size, uint8_t *dest,
David Turner056f7cd2019-01-07 17:48:13 +00005509 unsigned int *frame_flags) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005510 if (cpi->oxcf.rc_mode == AOM_CBR) {
5511 av1_rc_get_one_pass_cbr_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005512 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07005513 av1_rc_get_one_pass_vbr_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005514 }
David Turner056f7cd2019-01-07 17:48:13 +00005515 if (av1_encode_strategy(cpi, size, dest, frame_flags) != AOM_CODEC_OK) {
Debargha Mukherjeeff48c092018-04-04 23:53:40 -07005516 return AOM_CODEC_ERROR;
5517 }
Debargha Mukherjee57378252018-09-21 18:29:37 -07005518 set_additional_frame_flags(&cpi->common, frame_flags);
5519
Sarah Parker3491dd22018-08-08 18:38:31 -07005520 update_rc_counts(cpi);
Debargha Mukherjeeff48c092018-04-04 23:53:40 -07005521 check_show_existing_frame(cpi);
5522 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005523}
5524
Tom Finegane4099e32018-01-23 12:01:51 -08005525static int Pass2Encode(AV1_COMP *cpi, size_t *size, uint8_t *dest,
5526 unsigned int *frame_flags) {
Angie Chiang5b5f4df2017-12-06 10:41:12 -08005527#if CONFIG_MISMATCH_DEBUG
5528 mismatch_move_frame_idx_w();
5529#endif
Angie Chiang4d55d762017-12-13 16:18:37 -08005530#if TXCOEFF_COST_TIMER
5531 AV1_COMMON *cm = &cpi->common;
5532 cm->txcoeff_cost_timer = 0;
5533 cm->txcoeff_cost_count = 0;
5534#endif
Tom Finegane4099e32018-01-23 12:01:51 -08005535
David Turner056f7cd2019-01-07 17:48:13 +00005536 if (av1_encode_strategy(cpi, size, dest, frame_flags) != AOM_CODEC_OK) {
Tom Finegane4099e32018-01-23 12:01:51 -08005537 return AOM_CODEC_ERROR;
5538 }
Debargha Mukherjee57378252018-09-21 18:29:37 -07005539 set_additional_frame_flags(&cpi->common, frame_flags);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005540
Angie Chiang4d55d762017-12-13 16:18:37 -08005541#if TXCOEFF_COST_TIMER
5542 cm->cum_txcoeff_cost_timer += cm->txcoeff_cost_timer;
5543 fprintf(stderr,
5544 "\ntxb coeff cost block number: %ld, frame time: %ld, cum time %ld "
5545 "in us\n",
5546 cm->txcoeff_cost_count, cm->txcoeff_cost_timer,
5547 cm->cum_txcoeff_cost_timer);
5548#endif
5549
Sarah Parker3491dd22018-08-08 18:38:31 -07005550 av1_twopass_postencode_update(cpi);
5551 update_rc_counts(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005552 check_show_existing_frame(cpi);
Tom Finegane4099e32018-01-23 12:01:51 -08005553 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005554}
5555
Neil Birkbecka2893ab2018-06-08 14:45:13 -07005556#if CONFIG_DENOISE
5557static int apply_denoise_2d(AV1_COMP *cpi, YV12_BUFFER_CONFIG *sd,
5558 int block_size, float noise_level,
5559 int64_t time_stamp, int64_t end_time) {
5560 AV1_COMMON *const cm = &cpi->common;
5561 if (!cpi->denoise_and_model) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005562 cpi->denoise_and_model = aom_denoise_and_model_alloc(
5563 cm->seq_params.bit_depth, block_size, noise_level);
Neil Birkbecka2893ab2018-06-08 14:45:13 -07005564 if (!cpi->denoise_and_model) {
5565 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
5566 "Error allocating denoise and model");
5567 return -1;
5568 }
5569 }
5570 if (!cpi->film_grain_table) {
5571 cpi->film_grain_table = aom_malloc(sizeof(*cpi->film_grain_table));
5572 if (!cpi->film_grain_table) {
5573 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
5574 "Error allocating grain table");
5575 return -1;
5576 }
5577 memset(cpi->film_grain_table, 0, sizeof(*cpi->film_grain_table));
5578 }
5579 if (aom_denoise_and_model_run(cpi->denoise_and_model, sd,
5580 &cm->film_grain_params)) {
5581 if (cm->film_grain_params.apply_grain) {
5582 aom_film_grain_table_append(cpi->film_grain_table, time_stamp, end_time,
5583 &cm->film_grain_params);
5584 }
5585 }
5586 return 0;
5587}
5588#endif
5589
James Zern3e2613b2017-03-30 23:14:40 -07005590int av1_receive_raw_frame(AV1_COMP *cpi, aom_enc_frame_flags_t frame_flags,
Yaowu Xuf883b422016-08-30 14:01:10 -07005591 YV12_BUFFER_CONFIG *sd, int64_t time_stamp,
5592 int64_t end_time) {
5593 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005594 const SequenceHeader *const seq_params = &cm->seq_params;
Yaowu Xuf883b422016-08-30 14:01:10 -07005595 struct aom_usec_timer timer;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005596 int res = 0;
5597 const int subsampling_x = sd->subsampling_x;
5598 const int subsampling_y = sd->subsampling_y;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005599 const int use_highbitdepth = (sd->flags & YV12_FLAG_HIGHBITDEPTH) != 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005600
Yaowu Xuc27fc142016-08-22 16:08:15 -07005601 check_initial_width(cpi, use_highbitdepth, subsampling_x, subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005602
Yaowu Xuf883b422016-08-30 14:01:10 -07005603 aom_usec_timer_start(&timer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005604
Neil Birkbecka2893ab2018-06-08 14:45:13 -07005605#if CONFIG_DENOISE
5606 if (cpi->oxcf.noise_level > 0)
5607 if (apply_denoise_2d(cpi, sd, cpi->oxcf.noise_block_size,
5608 cpi->oxcf.noise_level, time_stamp, end_time) < 0)
5609 res = -1;
5610#endif // CONFIG_DENOISE
5611
Yaowu Xuf883b422016-08-30 14:01:10 -07005612 if (av1_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
Yaowu Xud3e7c682017-12-21 14:08:25 -08005613 use_highbitdepth, frame_flags))
Yaowu Xuc27fc142016-08-22 16:08:15 -07005614 res = -1;
Yaowu Xuf883b422016-08-30 14:01:10 -07005615 aom_usec_timer_mark(&timer);
5616 cpi->time_receive_data += aom_usec_timer_elapsed(&timer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005617
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005618 if ((seq_params->profile == PROFILE_0) && !seq_params->monochrome &&
Yaowu Xuc27fc142016-08-22 16:08:15 -07005619 (subsampling_x != 1 || subsampling_y != 1)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005620 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005621 "Non-4:2:0 color format requires profile 1 or 2");
Yaowu Xuc27fc142016-08-22 16:08:15 -07005622 res = -1;
5623 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005624 if ((seq_params->profile == PROFILE_1) &&
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005625 !(subsampling_x == 0 && subsampling_y == 0)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005626 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005627 "Profile 1 requires 4:4:4 color format");
5628 res = -1;
5629 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005630 if ((seq_params->profile == PROFILE_2) &&
5631 (seq_params->bit_depth <= AOM_BITS_10) &&
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005632 !(subsampling_x == 1 && subsampling_y == 0)) {
5633 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
5634 "Profile 2 bit-depth < 10 requires 4:2:2 color format");
Yaowu Xuc27fc142016-08-22 16:08:15 -07005635 res = -1;
5636 }
5637
5638 return res;
5639}
5640
Yaowu Xuf883b422016-08-30 14:01:10 -07005641static void adjust_frame_rate(AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005642 const struct lookahead_entry *source) {
5643 int64_t this_duration;
5644 int step = 0;
5645
5646 if (source->ts_start == cpi->first_time_stamp_ever) {
5647 this_duration = source->ts_end - source->ts_start;
5648 step = 1;
5649 } else {
5650 int64_t last_duration =
5651 cpi->last_end_time_stamp_seen - cpi->last_time_stamp_seen;
5652
5653 this_duration = source->ts_end - cpi->last_end_time_stamp_seen;
5654
5655 // do a step update if the duration changes by 10%
5656 if (last_duration)
5657 step = (int)((this_duration - last_duration) * 10 / last_duration);
5658 }
5659
5660 if (this_duration) {
5661 if (step) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005662 av1_new_framerate(cpi, 10000000.0 / this_duration);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005663 } else {
5664 // Average this frame's rate into the last second's average
5665 // frame rate. If we haven't seen 1 second yet, then average
5666 // over the whole interval seen.
Yaowu Xuf883b422016-08-30 14:01:10 -07005667 const double interval = AOMMIN(
Yaowu Xuc27fc142016-08-22 16:08:15 -07005668 (double)(source->ts_end - cpi->first_time_stamp_ever), 10000000.0);
5669 double avg_duration = 10000000.0 / cpi->framerate;
5670 avg_duration *= (interval - avg_duration + this_duration);
5671 avg_duration /= interval;
5672
Yaowu Xuf883b422016-08-30 14:01:10 -07005673 av1_new_framerate(cpi, 10000000.0 / avg_duration);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005674 }
5675 }
5676 cpi->last_time_stamp_seen = source->ts_start;
5677 cpi->last_end_time_stamp_seen = source->ts_end;
5678}
5679
5680// Returns 0 if this is not an alt ref else the offset of the source frame
5681// used as the arf midpoint.
Yaowu Xuf883b422016-08-30 14:01:10 -07005682static int get_arf_src_index(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005683 RATE_CONTROL *const rc = &cpi->rc;
5684 int arf_src_index = 0;
5685 if (is_altref_enabled(cpi)) {
5686 if (cpi->oxcf.pass == 2) {
5687 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5688 if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
5689 arf_src_index = gf_group->arf_src_offset[gf_group->index];
5690 }
5691 } else if (rc->source_alt_ref_pending) {
5692 arf_src_index = rc->frames_till_gf_update_due;
5693 }
5694 }
5695 return arf_src_index;
5696}
5697
Yaowu Xuf883b422016-08-30 14:01:10 -07005698static int get_brf_src_index(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005699 int brf_src_index = 0;
5700 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5701
5702 // TODO(zoeliu): We need to add the check on the -bwd_ref command line setup
5703 // flag.
5704 if (gf_group->bidir_pred_enabled[gf_group->index]) {
5705 if (cpi->oxcf.pass == 2) {
5706 if (gf_group->update_type[gf_group->index] == BRF_UPDATE)
5707 brf_src_index = gf_group->brf_src_offset[gf_group->index];
5708 } else {
5709 // TODO(zoeliu): To re-visit the setup for this scenario
5710 brf_src_index = cpi->rc.bipred_group_interval - 1;
5711 }
5712 }
5713
5714 return brf_src_index;
5715}
Zoe Liue9b15e22017-07-19 15:53:01 -07005716
Zoe Liue9b15e22017-07-19 15:53:01 -07005717// Returns 0 if this is not an alt ref else the offset of the source frame
5718// used as the arf midpoint.
5719static int get_arf2_src_index(AV1_COMP *cpi) {
5720 int arf2_src_index = 0;
5721 if (is_altref_enabled(cpi) && cpi->num_extra_arfs) {
5722 if (cpi->oxcf.pass == 2) {
5723 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5724 if (gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE) {
5725 arf2_src_index = gf_group->arf_src_offset[gf_group->index];
5726 }
5727 }
5728 }
5729 return arf2_src_index;
5730}
Yaowu Xuc27fc142016-08-22 16:08:15 -07005731
Yaowu Xuf883b422016-08-30 14:01:10 -07005732static void check_src_altref(AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005733 const struct lookahead_entry *source) {
5734 RATE_CONTROL *const rc = &cpi->rc;
5735
5736 // If pass == 2, the parameters set here will be reset in
Yaowu Xuf883b422016-08-30 14:01:10 -07005737 // av1_rc_get_second_pass_params()
Yaowu Xuc27fc142016-08-22 16:08:15 -07005738
5739 if (cpi->oxcf.pass == 2) {
5740 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5741 rc->is_src_frame_alt_ref =
Yaowu Xuc27fc142016-08-22 16:08:15 -07005742 (gf_group->update_type[gf_group->index] == INTNL_OVERLAY_UPDATE) ||
Yaowu Xuc27fc142016-08-22 16:08:15 -07005743 (gf_group->update_type[gf_group->index] == OVERLAY_UPDATE);
Zoe Liue9b15e22017-07-19 15:53:01 -07005744 rc->is_src_frame_ext_arf =
5745 gf_group->update_type[gf_group->index] == INTNL_OVERLAY_UPDATE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005746 } else {
5747 rc->is_src_frame_alt_ref =
5748 cpi->alt_ref_source && (source == cpi->alt_ref_source);
5749 }
5750
5751 if (rc->is_src_frame_alt_ref) {
5752 // Current frame is an ARF overlay frame.
5753 cpi->alt_ref_source = NULL;
5754
Zoe Liue9b15e22017-07-19 15:53:01 -07005755 if (rc->is_src_frame_ext_arf && !cpi->common.show_existing_frame) {
5756 // For INTNL_OVERLAY, when show_existing_frame == 0, they do need to
5757 // refresh the LAST_FRAME, i.e. LAST3 gets retired, LAST2 becomes LAST3,
5758 // LAST becomes LAST2, and INTNL_OVERLAY becomes LAST.
5759 cpi->refresh_last_frame = 1;
5760 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -07005761 // Don't refresh the last buffer for an ARF overlay frame. It will
5762 // become the GF so preserve last as an alternative prediction option.
5763 cpi->refresh_last_frame = 0;
Zoe Liue9b15e22017-07-19 15:53:01 -07005764 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005765 }
5766}
5767
5768#if CONFIG_INTERNAL_STATS
Yaowu Xuf883b422016-08-30 14:01:10 -07005769extern double av1_get_blockiness(const unsigned char *img1, int img1_pitch,
5770 const unsigned char *img2, int img2_pitch,
5771 int width, int height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005772
5773static void adjust_image_stat(double y, double u, double v, double all,
5774 ImageStat *s) {
Wan-Teh Changc25c92a2018-04-23 15:04:14 -07005775 s->stat[STAT_Y] += y;
5776 s->stat[STAT_U] += u;
5777 s->stat[STAT_V] += v;
5778 s->stat[STAT_ALL] += all;
Yaowu Xuf883b422016-08-30 14:01:10 -07005779 s->worst = AOMMIN(s->worst, all);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005780}
5781
Angie Chiang08a22a62017-07-17 17:29:17 -07005782static void compute_internal_stats(AV1_COMP *cpi, int frame_bytes) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005783 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005784 double samples = 0.0;
5785 uint32_t in_bit_depth = 8;
5786 uint32_t bit_depth = 8;
5787
Angie Chiang08a22a62017-07-17 17:29:17 -07005788#if CONFIG_INTER_STATS_ONLY
David Turnerd2a592e2018-11-16 14:59:31 +00005789 if (cm->current_frame.frame_type == KEY_FRAME) return; // skip key frame
Angie Chiang08a22a62017-07-17 17:29:17 -07005790#endif
5791 cpi->bytes += frame_bytes;
5792
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005793 if (cm->seq_params.use_highbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005794 in_bit_depth = cpi->oxcf.input_bit_depth;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005795 bit_depth = cm->seq_params.bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005796 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005797 if (cm->show_frame) {
Alex Conversef77fd0b2017-04-20 11:00:24 -07005798 const YV12_BUFFER_CONFIG *orig = cpi->source;
David Turnerc29e1a92018-12-06 14:10:14 +00005799 const YV12_BUFFER_CONFIG *recon = &cpi->common.cur_frame->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005800 double y, u, v, frame_all;
5801
5802 cpi->count++;
5803 if (cpi->b_calculate_psnr) {
5804 PSNR_STATS psnr;
5805 double frame_ssim2 = 0.0, weight = 0.0;
Yaowu Xuf883b422016-08-30 14:01:10 -07005806 aom_clear_system_state();
Yaowu Xud3e7c682017-12-21 14:08:25 -08005807 // TODO(yaowu): unify these two versions into one.
Yaowu Xuf883b422016-08-30 14:01:10 -07005808 aom_calc_highbd_psnr(orig, recon, &psnr, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005809
5810 adjust_image_stat(psnr.psnr[1], psnr.psnr[2], psnr.psnr[3], psnr.psnr[0],
5811 &cpi->psnr);
5812 cpi->total_sq_error += psnr.sse[0];
5813 cpi->total_samples += psnr.samples[0];
5814 samples = psnr.samples[0];
Yaowu Xud3e7c682017-12-21 14:08:25 -08005815 // TODO(yaowu): unify these two versions into one.
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005816 if (cm->seq_params.use_highbitdepth)
Yaowu Xuc27fc142016-08-22 16:08:15 -07005817 frame_ssim2 =
Yaowu Xuf883b422016-08-30 14:01:10 -07005818 aom_highbd_calc_ssim(orig, recon, &weight, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005819 else
Yaowu Xuf883b422016-08-30 14:01:10 -07005820 frame_ssim2 = aom_calc_ssim(orig, recon, &weight);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005821
Yaowu Xuf883b422016-08-30 14:01:10 -07005822 cpi->worst_ssim = AOMMIN(cpi->worst_ssim, frame_ssim2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005823 cpi->summed_quality += frame_ssim2 * weight;
5824 cpi->summed_weights += weight;
5825
5826#if 0
5827 {
5828 FILE *f = fopen("q_used.stt", "a");
Zoe Liuee202be2017-11-17 12:14:33 -08005829 double y2 = psnr.psnr[1];
5830 double u2 = psnr.psnr[2];
5831 double v2 = psnr.psnr[3];
5832 double frame_psnr2 = psnr.psnr[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07005833 fprintf(f, "%5d : Y%f7.3:U%f7.3:V%f7.3:F%f7.3:S%7.3f\n",
David Turnerd2a592e2018-11-16 14:59:31 +00005834 cm->current_frame.frame_number, y2, u2, v2,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005835 frame_psnr2, frame_ssim2);
5836 fclose(f);
5837 }
5838#endif
5839 }
5840 if (cpi->b_calculate_blockiness) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005841 if (!cm->seq_params.use_highbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005842 const double frame_blockiness =
Yaowu Xuf883b422016-08-30 14:01:10 -07005843 av1_get_blockiness(orig->y_buffer, orig->y_stride, recon->y_buffer,
5844 recon->y_stride, orig->y_width, orig->y_height);
5845 cpi->worst_blockiness = AOMMAX(cpi->worst_blockiness, frame_blockiness);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005846 cpi->total_blockiness += frame_blockiness;
5847 }
5848
5849 if (cpi->b_calculate_consistency) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005850 if (!cm->seq_params.use_highbitdepth) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005851 const double this_inconsistency = aom_get_ssim_metrics(
Yaowu Xuc27fc142016-08-22 16:08:15 -07005852 orig->y_buffer, orig->y_stride, recon->y_buffer, recon->y_stride,
5853 orig->y_width, orig->y_height, cpi->ssim_vars, &cpi->metrics, 1);
5854
5855 const double peak = (double)((1 << in_bit_depth) - 1);
5856 const double consistency =
Yaowu Xuf883b422016-08-30 14:01:10 -07005857 aom_sse_to_psnr(samples, peak, cpi->total_inconsistency);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005858 if (consistency > 0.0)
5859 cpi->worst_consistency =
Yaowu Xuf883b422016-08-30 14:01:10 -07005860 AOMMIN(cpi->worst_consistency, consistency);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005861 cpi->total_inconsistency += this_inconsistency;
5862 }
5863 }
5864 }
5865
5866 frame_all =
Yaowu Xuf883b422016-08-30 14:01:10 -07005867 aom_calc_fastssim(orig, recon, &y, &u, &v, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005868 adjust_image_stat(y, u, v, frame_all, &cpi->fastssim);
Yaowu Xuf883b422016-08-30 14:01:10 -07005869 frame_all = aom_psnrhvs(orig, recon, &y, &u, &v, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005870 adjust_image_stat(y, u, v, frame_all, &cpi->psnrhvs);
5871 }
5872}
5873#endif // CONFIG_INTERNAL_STATS
5874
RogerZhou3b635242017-09-19 10:06:46 -07005875static int is_integer_mv(AV1_COMP *cpi, const YV12_BUFFER_CONFIG *cur_picture,
5876 const YV12_BUFFER_CONFIG *last_picture,
5877 hash_table *last_hash_table) {
5878 aom_clear_system_state();
5879 // check use hash ME
5880 int k;
5881 uint32_t hash_value_1;
5882 uint32_t hash_value_2;
5883
5884 const int block_size = 8;
5885 const double threshold_current = 0.8;
5886 const double threshold_average = 0.95;
5887 const int max_history_size = 32;
5888 int T = 0; // total block
5889 int C = 0; // match with collocated block
5890 int S = 0; // smooth region but not match with collocated block
5891 int M = 0; // match with other block
5892
5893 const int pic_width = cur_picture->y_width;
5894 const int pic_height = cur_picture->y_height;
5895 for (int i = 0; i + block_size <= pic_height; i += block_size) {
5896 for (int j = 0; j + block_size <= pic_width; j += block_size) {
5897 const int x_pos = j;
5898 const int y_pos = i;
5899 int match = 1;
5900 T++;
5901
5902 // check whether collocated block match with current
5903 uint8_t *p_cur = cur_picture->y_buffer;
5904 uint8_t *p_ref = last_picture->y_buffer;
5905 int stride_cur = cur_picture->y_stride;
5906 int stride_ref = last_picture->y_stride;
5907 p_cur += (y_pos * stride_cur + x_pos);
5908 p_ref += (y_pos * stride_ref + x_pos);
5909
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005910 if (cur_picture->flags & YV12_FLAG_HIGHBITDEPTH) {
5911 uint16_t *p16_cur = CONVERT_TO_SHORTPTR(p_cur);
5912 uint16_t *p16_ref = CONVERT_TO_SHORTPTR(p_ref);
5913 for (int tmpY = 0; tmpY < block_size && match; tmpY++) {
5914 for (int tmpX = 0; tmpX < block_size && match; tmpX++) {
5915 if (p16_cur[tmpX] != p16_ref[tmpX]) {
5916 match = 0;
5917 }
RogerZhou3b635242017-09-19 10:06:46 -07005918 }
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005919 p16_cur += stride_cur;
5920 p16_ref += stride_ref;
RogerZhou3b635242017-09-19 10:06:46 -07005921 }
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005922 } else {
5923 for (int tmpY = 0; tmpY < block_size && match; tmpY++) {
5924 for (int tmpX = 0; tmpX < block_size && match; tmpX++) {
5925 if (p_cur[tmpX] != p_ref[tmpX]) {
5926 match = 0;
5927 }
5928 }
5929 p_cur += stride_cur;
5930 p_ref += stride_ref;
5931 }
RogerZhou3b635242017-09-19 10:06:46 -07005932 }
5933
5934 if (match) {
5935 C++;
5936 continue;
5937 }
5938
5939 if (av1_hash_is_horizontal_perfect(cur_picture, block_size, x_pos,
5940 y_pos) ||
5941 av1_hash_is_vertical_perfect(cur_picture, block_size, x_pos, y_pos)) {
5942 S++;
5943 continue;
5944 }
5945
5946 av1_get_block_hash_value(
5947 cur_picture->y_buffer + y_pos * stride_cur + x_pos, stride_cur,
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005948 block_size, &hash_value_1, &hash_value_2,
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05305949 (cur_picture->flags & YV12_FLAG_HIGHBITDEPTH), &cpi->td.mb);
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005950 // Hashing does not work for highbitdepth currently.
5951 // TODO(Roger): Make it work for highbitdepth.
5952 if (av1_use_hash_me(&cpi->common)) {
5953 if (av1_has_exact_match(last_hash_table, hash_value_1, hash_value_2)) {
5954 M++;
5955 }
RogerZhou3b635242017-09-19 10:06:46 -07005956 }
5957 }
5958 }
5959
5960 assert(T > 0);
5961 double csm_rate = ((double)(C + S + M)) / ((double)(T));
5962 double m_rate = ((double)(M)) / ((double)(T));
5963
5964 cpi->csm_rate_array[cpi->rate_index] = csm_rate;
5965 cpi->m_rate_array[cpi->rate_index] = m_rate;
5966
5967 cpi->rate_index = (cpi->rate_index + 1) % max_history_size;
5968 cpi->rate_size++;
5969 cpi->rate_size = AOMMIN(cpi->rate_size, max_history_size);
5970
5971 if (csm_rate < threshold_current) {
5972 return 0;
5973 }
5974
5975 if (C == T) {
5976 return 1;
5977 }
5978
5979 double csm_average = 0.0;
5980 double m_average = 0.0;
5981
5982 for (k = 0; k < cpi->rate_size; k++) {
5983 csm_average += cpi->csm_rate_array[k];
5984 m_average += cpi->m_rate_array[k];
5985 }
5986 csm_average /= cpi->rate_size;
5987 m_average /= cpi->rate_size;
5988
5989 if (csm_average < threshold_average) {
5990 return 0;
5991 }
5992
5993 if (M > (T - C - S) / 3) {
5994 return 1;
5995 }
5996
5997 if (csm_rate > 0.99 && m_rate > 0.01) {
5998 return 1;
5999 }
6000
6001 if (csm_average + m_average > 1.01) {
6002 return 1;
6003 }
6004
6005 return 0;
6006}
RogerZhou3b635242017-09-19 10:06:46 -07006007
Yue Chen7cae98f2018-08-24 10:43:16 -07006008// Code for temporal dependency model
6009typedef struct GF_PICTURE {
6010 YV12_BUFFER_CONFIG *frame;
6011 int ref_frame[7];
6012} GF_PICTURE;
6013
Sarah Parkercf644442018-10-11 15:23:44 -07006014static void init_gop_frames(AV1_COMP *cpi, GF_PICTURE *gf_picture,
6015 const GF_GROUP *gf_group, int *tpl_group_frames) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006016 AV1_COMMON *cm = &cpi->common;
6017 const SequenceHeader *const seq_params = &cm->seq_params;
6018 int frame_idx = 0;
6019 int i;
6020 int gld_index = -1;
6021 int alt_index = -1;
6022 int lst_index = -1;
6023 int extend_frame_count = 0;
6024 int pframe_qindex = cpi->tpl_stats[2].base_qindex;
6025
6026 RefCntBuffer *frame_bufs = cm->buffer_pool->frame_bufs;
6027 int recon_frame_index[INTER_REFS_PER_FRAME + 1] = { -1, -1, -1, -1,
6028 -1, -1, -1, -1 };
6029
6030 // TODO(jingning): To be used later for gf frame type parsing.
6031 (void)gf_group;
6032
6033 for (i = 0; i < FRAME_BUFFERS && frame_idx < INTER_REFS_PER_FRAME + 1; ++i) {
6034 if (frame_bufs[i].ref_count == 0) {
David Turnere7ebf902018-12-04 14:04:55 +00006035 alloc_frame_mvs(cm, &frame_bufs[i]);
Yue Chen7cae98f2018-08-24 10:43:16 -07006036 if (aom_realloc_frame_buffer(
6037 &frame_bufs[i].buf, cm->width, cm->height,
6038 seq_params->subsampling_x, seq_params->subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +05306039 seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
Yue Chen7cae98f2018-08-24 10:43:16 -07006040 cm->byte_alignment, NULL, NULL, NULL))
6041 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
6042 "Failed to allocate frame buffer");
6043
6044 recon_frame_index[frame_idx] = i;
6045 ++frame_idx;
6046 }
6047 }
6048
6049 for (i = 0; i < INTER_REFS_PER_FRAME + 1; ++i) {
6050 assert(recon_frame_index[i] >= 0);
6051 cpi->tpl_recon_frames[i] = &frame_bufs[recon_frame_index[i]].buf;
6052 }
6053
6054 *tpl_group_frames = 0;
6055
6056 // Initialize Golden reference frame.
David Turnera21966b2018-12-05 14:48:49 +00006057 gf_picture[0].frame = NULL;
6058 RefCntBuffer *ref_buf = get_ref_frame_buf(cm, GOLDEN_FRAME);
6059 if (ref_buf) gf_picture[0].frame = &ref_buf->buf;
Yue Chen7cae98f2018-08-24 10:43:16 -07006060 for (i = 0; i < 7; ++i) gf_picture[0].ref_frame[i] = -1;
6061 gld_index = 0;
6062 ++*tpl_group_frames;
6063
6064 // Initialize ARF frame
6065 gf_picture[1].frame = cpi->source;
6066 gf_picture[1].ref_frame[0] = gld_index;
6067 gf_picture[1].ref_frame[1] = lst_index;
6068 gf_picture[1].ref_frame[2] = alt_index;
6069 // TODO(yuec) Need o figure out full AV1 reference model
6070 for (i = 3; i < 7; ++i) gf_picture[1].ref_frame[i] = -1;
6071 alt_index = 1;
6072 ++*tpl_group_frames;
6073
6074 // Initialize P frames
6075 for (frame_idx = 2; frame_idx < MAX_LAG_BUFFERS; ++frame_idx) {
6076 struct lookahead_entry *buf =
6077 av1_lookahead_peek(cpi->lookahead, frame_idx - 2);
6078
6079 if (buf == NULL) break;
6080
6081 gf_picture[frame_idx].frame = &buf->img;
6082 gf_picture[frame_idx].ref_frame[0] = gld_index;
6083 gf_picture[frame_idx].ref_frame[1] = lst_index;
6084 gf_picture[frame_idx].ref_frame[2] = alt_index;
6085 for (i = 3; i < 7; ++i) gf_picture[frame_idx].ref_frame[i] = -1;
6086
6087 ++*tpl_group_frames;
6088 lst_index = frame_idx;
6089
6090 if (frame_idx == cpi->rc.baseline_gf_interval + 1) break;
6091 }
6092
6093 gld_index = frame_idx;
6094 lst_index = AOMMAX(0, frame_idx - 1);
6095 alt_index = -1;
6096 ++frame_idx;
6097
6098 // Extend two frames outside the current gf group.
6099 for (; frame_idx < MAX_LAG_BUFFERS && extend_frame_count < 2; ++frame_idx) {
6100 struct lookahead_entry *buf =
6101 av1_lookahead_peek(cpi->lookahead, frame_idx - 2);
6102
6103 if (buf == NULL) break;
6104
6105 cpi->tpl_stats[frame_idx].base_qindex = pframe_qindex;
6106
6107 gf_picture[frame_idx].frame = &buf->img;
6108 gf_picture[frame_idx].ref_frame[0] = gld_index;
6109 gf_picture[frame_idx].ref_frame[1] = lst_index;
6110 gf_picture[frame_idx].ref_frame[2] = alt_index;
6111 for (i = 3; i < 7; ++i) gf_picture[frame_idx].ref_frame[i] = -1;
6112 lst_index = frame_idx;
6113 ++*tpl_group_frames;
6114 ++extend_frame_count;
6115 }
6116}
6117
Sarah Parkercf644442018-10-11 15:23:44 -07006118static void init_tpl_stats(AV1_COMP *cpi) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006119 int frame_idx;
6120 for (frame_idx = 0; frame_idx < MAX_LAG_BUFFERS; ++frame_idx) {
6121 TplDepFrame *tpl_frame = &cpi->tpl_stats[frame_idx];
6122 memset(tpl_frame->tpl_stats_ptr, 0,
6123 tpl_frame->height * tpl_frame->width *
6124 sizeof(*tpl_frame->tpl_stats_ptr));
6125 tpl_frame->is_valid = 0;
6126 }
6127}
6128
Sarah Parkercf644442018-10-11 15:23:44 -07006129static uint32_t motion_compensated_prediction(AV1_COMP *cpi, ThreadData *td,
6130 uint8_t *cur_frame_buf,
6131 uint8_t *ref_frame_buf,
6132 int stride, BLOCK_SIZE bsize,
6133 int mi_row, int mi_col) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006134 AV1_COMMON *cm = &cpi->common;
6135 MACROBLOCK *const x = &td->mb;
6136 MACROBLOCKD *const xd = &x->e_mbd;
6137 MV_SPEED_FEATURES *const mv_sf = &cpi->sf.mv;
6138 const SEARCH_METHODS search_method = NSTEP;
6139 int step_param;
6140 int sadpb = x->sadperbit16;
6141 uint32_t bestsme = UINT_MAX;
6142 int distortion;
6143 uint32_t sse;
6144 int cost_list[5];
6145 const MvLimits tmp_mv_limits = x->mv_limits;
6146
6147 MV best_ref_mv1 = { 0, 0 };
6148 MV best_ref_mv1_full; /* full-pixel value of best_ref_mv1 */
6149
6150 best_ref_mv1_full.col = best_ref_mv1.col >> 3;
6151 best_ref_mv1_full.row = best_ref_mv1.row >> 3;
6152
6153 // Setup frame pointers
6154 x->plane[0].src.buf = cur_frame_buf;
6155 x->plane[0].src.stride = stride;
6156 xd->plane[0].pre[0].buf = ref_frame_buf;
6157 xd->plane[0].pre[0].stride = stride;
6158
6159 step_param = mv_sf->reduce_first_step_size;
6160 step_param = AOMMIN(step_param, MAX_MVSEARCH_STEPS - 2);
6161
6162 av1_set_mv_search_range(&x->mv_limits, &best_ref_mv1);
6163
6164 av1_full_pixel_search(cpi, x, bsize, &best_ref_mv1_full, step_param,
6165 search_method, 0, sadpb, cond_cost_list(cpi, cost_list),
6166 &best_ref_mv1, INT_MAX, 0, (MI_SIZE * mi_col),
6167 (MI_SIZE * mi_row), 0);
6168
6169 /* restore UMV window */
6170 x->mv_limits = tmp_mv_limits;
6171
6172 const int pw = block_size_wide[bsize];
6173 const int ph = block_size_high[bsize];
6174 bestsme = cpi->find_fractional_mv_step(
6175 x, cm, mi_row, mi_col, &best_ref_mv1, cpi->common.allow_high_precision_mv,
6176 x->errorperbit, &cpi->fn_ptr[bsize], 0, mv_sf->subpel_iters_per_step,
6177 cond_cost_list(cpi, cost_list), NULL, NULL, &distortion, &sse, NULL, NULL,
6178 0, 0, pw, ph, 1, 1);
6179
6180 return bestsme;
6181}
6182
Sarah Parkercf644442018-10-11 15:23:44 -07006183static int get_overlap_area(int grid_pos_row, int grid_pos_col, int ref_pos_row,
6184 int ref_pos_col, int block, BLOCK_SIZE bsize) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006185 int width = 0, height = 0;
6186 int bw = 4 << mi_size_wide_log2[bsize];
6187 int bh = 4 << mi_size_high_log2[bsize];
6188
6189 switch (block) {
6190 case 0:
6191 width = grid_pos_col + bw - ref_pos_col;
6192 height = grid_pos_row + bh - ref_pos_row;
6193 break;
6194 case 1:
6195 width = ref_pos_col + bw - grid_pos_col;
6196 height = grid_pos_row + bh - ref_pos_row;
6197 break;
6198 case 2:
6199 width = grid_pos_col + bw - ref_pos_col;
6200 height = ref_pos_row + bh - grid_pos_row;
6201 break;
6202 case 3:
6203 width = ref_pos_col + bw - grid_pos_col;
6204 height = ref_pos_row + bh - grid_pos_row;
6205 break;
6206 default: assert(0);
6207 }
6208
6209 return width * height;
6210}
6211
Sarah Parkercf644442018-10-11 15:23:44 -07006212static int round_floor(int ref_pos, int bsize_pix) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006213 int round;
6214 if (ref_pos < 0)
6215 round = -(1 + (-ref_pos - 1) / bsize_pix);
6216 else
6217 round = ref_pos / bsize_pix;
6218
6219 return round;
6220}
6221
Sarah Parkercf644442018-10-11 15:23:44 -07006222static void tpl_model_store(TplDepStats *tpl_stats, int mi_row, int mi_col,
6223 BLOCK_SIZE bsize, int stride,
6224 const TplDepStats *src_stats) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006225 const int mi_height = mi_size_high[bsize];
6226 const int mi_width = mi_size_wide[bsize];
6227 int idx, idy;
6228
6229 int64_t intra_cost = src_stats->intra_cost / (mi_height * mi_width);
6230 int64_t inter_cost = src_stats->inter_cost / (mi_height * mi_width);
6231
6232 TplDepStats *tpl_ptr;
6233
6234 intra_cost = AOMMAX(1, intra_cost);
6235 inter_cost = AOMMAX(1, inter_cost);
6236
6237 for (idy = 0; idy < mi_height; ++idy) {
6238 tpl_ptr = &tpl_stats[(mi_row + idy) * stride + mi_col];
6239 for (idx = 0; idx < mi_width; ++idx) {
6240 tpl_ptr->intra_cost = intra_cost;
6241 tpl_ptr->inter_cost = inter_cost;
6242 tpl_ptr->mc_dep_cost = tpl_ptr->intra_cost + tpl_ptr->mc_flow;
6243 tpl_ptr->ref_frame_index = src_stats->ref_frame_index;
6244 tpl_ptr->mv.as_int = src_stats->mv.as_int;
6245 ++tpl_ptr;
6246 }
6247 }
6248}
6249
Sarah Parkercf644442018-10-11 15:23:44 -07006250static void tpl_model_update_b(TplDepFrame *tpl_frame, TplDepStats *tpl_stats,
6251 int mi_row, int mi_col, const BLOCK_SIZE bsize) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006252 TplDepFrame *ref_tpl_frame = &tpl_frame[tpl_stats->ref_frame_index];
6253 TplDepStats *ref_stats = ref_tpl_frame->tpl_stats_ptr;
6254 MV mv = tpl_stats->mv.as_mv;
6255 int mv_row = mv.row >> 3;
6256 int mv_col = mv.col >> 3;
6257
6258 int ref_pos_row = mi_row * MI_SIZE + mv_row;
6259 int ref_pos_col = mi_col * MI_SIZE + mv_col;
6260
6261 const int bw = 4 << mi_size_wide_log2[bsize];
6262 const int bh = 4 << mi_size_high_log2[bsize];
6263 const int mi_height = mi_size_high[bsize];
6264 const int mi_width = mi_size_wide[bsize];
6265 const int pix_num = bw * bh;
6266
6267 // top-left on grid block location in pixel
6268 int grid_pos_row_base = round_floor(ref_pos_row, bh) * bh;
6269 int grid_pos_col_base = round_floor(ref_pos_col, bw) * bw;
6270 int block;
6271
6272 for (block = 0; block < 4; ++block) {
6273 int grid_pos_row = grid_pos_row_base + bh * (block >> 1);
6274 int grid_pos_col = grid_pos_col_base + bw * (block & 0x01);
6275
6276 if (grid_pos_row >= 0 && grid_pos_row < ref_tpl_frame->mi_rows * MI_SIZE &&
6277 grid_pos_col >= 0 && grid_pos_col < ref_tpl_frame->mi_cols * MI_SIZE) {
6278 int overlap_area = get_overlap_area(
6279 grid_pos_row, grid_pos_col, ref_pos_row, ref_pos_col, block, bsize);
6280 int ref_mi_row = round_floor(grid_pos_row, bh) * mi_height;
6281 int ref_mi_col = round_floor(grid_pos_col, bw) * mi_width;
6282
6283 int64_t mc_flow = tpl_stats->mc_dep_cost -
6284 (tpl_stats->mc_dep_cost * tpl_stats->inter_cost) /
6285 tpl_stats->intra_cost;
6286
6287 int idx, idy;
6288
6289 for (idy = 0; idy < mi_height; ++idy) {
6290 for (idx = 0; idx < mi_width; ++idx) {
6291 TplDepStats *des_stats =
6292 &ref_stats[(ref_mi_row + idy) * ref_tpl_frame->stride +
6293 (ref_mi_col + idx)];
6294
6295 des_stats->mc_flow += (mc_flow * overlap_area) / pix_num;
6296 des_stats->mc_ref_cost +=
6297 ((tpl_stats->intra_cost - tpl_stats->inter_cost) * overlap_area) /
6298 pix_num;
6299 assert(overlap_area >= 0);
6300 }
6301 }
6302 }
6303 }
6304}
6305
Sarah Parkercf644442018-10-11 15:23:44 -07006306static void tpl_model_update(TplDepFrame *tpl_frame, TplDepStats *tpl_stats,
6307 int mi_row, int mi_col, const BLOCK_SIZE bsize) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006308 int idx, idy;
6309 const int mi_height = mi_size_high[bsize];
6310 const int mi_width = mi_size_wide[bsize];
6311
6312 for (idy = 0; idy < mi_height; ++idy) {
6313 for (idx = 0; idx < mi_width; ++idx) {
6314 TplDepStats *tpl_ptr =
6315 &tpl_stats[(mi_row + idy) * tpl_frame->stride + (mi_col + idx)];
6316 tpl_model_update_b(tpl_frame, tpl_ptr, mi_row + idy, mi_col + idx,
6317 BLOCK_4X4);
6318 }
6319 }
6320}
6321
Sarah Parkercf644442018-10-11 15:23:44 -07006322static void get_quantize_error(MACROBLOCK *x, int plane, tran_low_t *coeff,
6323 tran_low_t *qcoeff, tran_low_t *dqcoeff,
6324 TX_SIZE tx_size, int64_t *recon_error,
6325 int64_t *sse) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006326 const struct macroblock_plane *const p = &x->plane[plane];
6327 const SCAN_ORDER *const scan_order = &av1_default_scan_orders[tx_size];
6328 uint16_t eob;
6329 int pix_num = 1 << num_pels_log2_lookup[txsize_to_bsize[tx_size]];
6330 const int shift = tx_size == TX_32X32 ? 0 : 2;
6331
6332 av1_quantize_fp_32x32(coeff, pix_num, p->zbin_QTX, p->round_fp_QTX,
6333 p->quant_fp_QTX, p->quant_shift_QTX, qcoeff, dqcoeff,
6334 p->dequant_QTX, &eob, scan_order->scan,
6335 scan_order->iscan);
6336
6337 *recon_error = av1_block_error(coeff, dqcoeff, pix_num, sse) >> shift;
6338 *recon_error = AOMMAX(*recon_error, 1);
6339
6340 *sse = (*sse) >> shift;
6341 *sse = AOMMAX(*sse, 1);
6342}
6343
Sarah Parkercf644442018-10-11 15:23:44 -07006344static void wht_fwd_txfm(int16_t *src_diff, int bw, tran_low_t *coeff,
6345 TX_SIZE tx_size) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006346 switch (tx_size) {
6347 case TX_8X8: aom_hadamard_8x8(src_diff, bw, coeff); break;
6348 case TX_16X16: aom_hadamard_16x16(src_diff, bw, coeff); break;
6349 case TX_32X32: aom_hadamard_32x32(src_diff, bw, coeff); break;
6350 default: assert(0);
6351 }
6352}
6353
Sarah Parkercf644442018-10-11 15:23:44 -07006354static void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, MACROBLOCKD *xd,
6355 struct scale_factors *sf, GF_PICTURE *gf_picture,
6356 int frame_idx, int16_t *src_diff, tran_low_t *coeff,
6357 tran_low_t *qcoeff, tran_low_t *dqcoeff, int mi_row,
6358 int mi_col, BLOCK_SIZE bsize, TX_SIZE tx_size,
6359 YV12_BUFFER_CONFIG *ref_frame[], uint8_t *predictor,
6360 int64_t *recon_error, int64_t *sse,
6361 TplDepStats *tpl_stats) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006362 AV1_COMMON *cm = &cpi->common;
6363 ThreadData *td = &cpi->td;
6364
6365 const int bw = 4 << mi_size_wide_log2[bsize];
6366 const int bh = 4 << mi_size_high_log2[bsize];
6367 const int pix_num = bw * bh;
6368 int best_rf_idx = -1;
6369 int_mv best_mv;
6370 int64_t best_inter_cost = INT64_MAX;
6371 int64_t inter_cost;
6372 int rf_idx;
6373 const InterpFilters kernel =
6374 av1_make_interp_filters(EIGHTTAP_REGULAR, EIGHTTAP_REGULAR);
6375
6376 int64_t best_intra_cost = INT64_MAX;
6377 int64_t intra_cost;
6378 PREDICTION_MODE mode;
6379 int mb_y_offset = mi_row * MI_SIZE * xd->cur_buf->y_stride + mi_col * MI_SIZE;
6380 MB_MODE_INFO mi_above, mi_left;
6381
6382 memset(tpl_stats, 0, sizeof(*tpl_stats));
6383
6384 xd->mb_to_top_edge = -((mi_row * MI_SIZE) * 8);
6385 xd->mb_to_bottom_edge = ((cm->mi_rows - 1 - mi_row) * MI_SIZE) * 8;
6386 xd->mb_to_left_edge = -((mi_col * MI_SIZE) * 8);
6387 xd->mb_to_right_edge = ((cm->mi_cols - 1 - mi_col) * MI_SIZE) * 8;
6388 xd->above_mbmi = (mi_row > 0) ? &mi_above : NULL;
6389 xd->left_mbmi = (mi_col > 0) ? &mi_left : NULL;
6390
6391 // Intra prediction search
6392 for (mode = DC_PRED; mode <= PAETH_PRED; ++mode) {
6393 uint8_t *src, *dst;
6394 int src_stride, dst_stride;
6395
6396 src = xd->cur_buf->y_buffer + mb_y_offset;
6397 src_stride = xd->cur_buf->y_stride;
6398
6399 dst = &predictor[0];
6400 dst_stride = bw;
6401
6402 xd->mi[0]->sb_type = bsize;
6403 xd->mi[0]->ref_frame[0] = INTRA_FRAME;
6404
6405 av1_predict_intra_block(
6406 cm, xd, block_size_wide[bsize], block_size_high[bsize], tx_size, mode,
6407 0, 0, FILTER_INTRA_MODES, src, src_stride, dst, dst_stride, 0, 0, 0);
6408
6409 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
6410 aom_highbd_subtract_block(bh, bw, src_diff, bw, src, src_stride, dst,
6411 dst_stride, xd->bd);
6412 } else {
6413 aom_subtract_block(bh, bw, src_diff, bw, src, src_stride, dst,
6414 dst_stride);
6415 }
6416
6417 wht_fwd_txfm(src_diff, bw, coeff, tx_size);
6418
6419 intra_cost = aom_satd(coeff, pix_num);
6420
6421 if (intra_cost < best_intra_cost) best_intra_cost = intra_cost;
6422 }
6423
6424 // Motion compensated prediction
6425 best_mv.as_int = 0;
6426
6427 (void)mb_y_offset;
6428 // Motion estimation column boundary
6429 x->mv_limits.col_min = -((mi_col * MI_SIZE) + (17 - 2 * AOM_INTERP_EXTEND));
6430 x->mv_limits.col_max =
6431 ((cm->mi_cols - 1 - mi_col) * MI_SIZE) + (17 - 2 * AOM_INTERP_EXTEND);
6432
6433 for (rf_idx = 0; rf_idx < 7; ++rf_idx) {
6434 if (ref_frame[rf_idx] == NULL) continue;
6435
6436 motion_compensated_prediction(cpi, td, xd->cur_buf->y_buffer + mb_y_offset,
6437 ref_frame[rf_idx]->y_buffer + mb_y_offset,
6438 xd->cur_buf->y_stride, bsize, mi_row, mi_col);
6439
6440 // TODO(jingning): Not yet support high bit-depth in the next three
6441 // steps.
6442 ConvolveParams conv_params = get_conv_params(0, 0, xd->bd);
6443 WarpTypesAllowed warp_types;
6444 memset(&warp_types, 0, sizeof(WarpTypesAllowed));
6445
6446 av1_build_inter_predictor(
6447 ref_frame[rf_idx]->y_buffer + mb_y_offset, ref_frame[rf_idx]->y_stride,
6448 &predictor[0], bw, &x->best_mv.as_mv, sf, bw, bh, &conv_params, kernel,
6449 &warp_types, mi_col * MI_SIZE, mi_row * MI_SIZE, 0, 0, MV_PRECISION_Q3,
6450 mi_col * MI_SIZE, mi_row * MI_SIZE, xd, 0);
6451 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
6452 aom_highbd_subtract_block(
6453 bh, bw, src_diff, bw, xd->cur_buf->y_buffer + mb_y_offset,
6454 xd->cur_buf->y_stride, &predictor[0], bw, xd->bd);
6455 } else {
6456 aom_subtract_block(bh, bw, src_diff, bw,
6457 xd->cur_buf->y_buffer + mb_y_offset,
6458 xd->cur_buf->y_stride, &predictor[0], bw);
6459 }
6460 wht_fwd_txfm(src_diff, bw, coeff, tx_size);
6461
6462 inter_cost = aom_satd(coeff, pix_num);
6463 if (inter_cost < best_inter_cost) {
6464 best_rf_idx = rf_idx;
6465 best_inter_cost = inter_cost;
6466 best_mv.as_int = x->best_mv.as_int;
6467 get_quantize_error(x, 0, coeff, qcoeff, dqcoeff, tx_size, recon_error,
6468 sse);
6469 }
6470 }
6471 best_intra_cost = AOMMAX(best_intra_cost, 1);
6472 best_inter_cost = AOMMIN(best_intra_cost, best_inter_cost);
6473 tpl_stats->inter_cost = best_inter_cost << TPL_DEP_COST_SCALE_LOG2;
6474 tpl_stats->intra_cost = best_intra_cost << TPL_DEP_COST_SCALE_LOG2;
6475 tpl_stats->mc_dep_cost = tpl_stats->intra_cost + tpl_stats->mc_flow;
6476
6477 tpl_stats->ref_frame_index = gf_picture[frame_idx].ref_frame[best_rf_idx];
6478 tpl_stats->mv.as_int = best_mv.as_int;
6479}
6480
Sarah Parkercf644442018-10-11 15:23:44 -07006481static void mc_flow_dispenser(AV1_COMP *cpi, GF_PICTURE *gf_picture,
6482 int frame_idx) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006483 TplDepFrame *tpl_frame = &cpi->tpl_stats[frame_idx];
6484 YV12_BUFFER_CONFIG *this_frame = gf_picture[frame_idx].frame;
6485 YV12_BUFFER_CONFIG *ref_frame[7] = {
6486 NULL, NULL, NULL, NULL, NULL, NULL, NULL
6487 };
6488
6489 AV1_COMMON *cm = &cpi->common;
6490 struct scale_factors sf;
6491 int rdmult, idx;
6492 ThreadData *td = &cpi->td;
6493 MACROBLOCK *x = &td->mb;
6494 MACROBLOCKD *xd = &x->e_mbd;
6495 int mi_row, mi_col;
6496
6497 DECLARE_ALIGNED(16, uint16_t, predictor16[32 * 32 * 3]);
6498 DECLARE_ALIGNED(16, uint8_t, predictor8[32 * 32 * 3]);
6499 uint8_t *predictor;
6500 DECLARE_ALIGNED(16, int16_t, src_diff[32 * 32]);
6501 DECLARE_ALIGNED(16, tran_low_t, coeff[32 * 32]);
6502 DECLARE_ALIGNED(16, tran_low_t, qcoeff[32 * 32]);
6503 DECLARE_ALIGNED(16, tran_low_t, dqcoeff[32 * 32]);
6504
6505 const BLOCK_SIZE bsize = BLOCK_32X32;
6506 const TX_SIZE tx_size = max_txsize_lookup[bsize];
6507 const int mi_height = mi_size_high[bsize];
6508 const int mi_width = mi_size_wide[bsize];
6509 int64_t recon_error, sse;
6510
6511 // Setup scaling factor
6512 av1_setup_scale_factors_for_frame(
6513 &sf, this_frame->y_crop_width, this_frame->y_crop_height,
6514 this_frame->y_crop_width, this_frame->y_crop_height);
6515
6516 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH)
6517 predictor = CONVERT_TO_BYTEPTR(predictor16);
6518 else
6519 predictor = predictor8;
6520
6521 // Prepare reference frame pointers. If any reference frame slot is
6522 // unavailable, the pointer will be set to Null.
6523 for (idx = 0; idx < 7; ++idx) {
6524 int rf_idx = gf_picture[frame_idx].ref_frame[idx];
6525 if (rf_idx != -1) ref_frame[idx] = gf_picture[rf_idx].frame;
6526 }
6527
6528 xd->mi = cm->mi_grid_visible;
6529 xd->mi[0] = cm->mi;
6530 xd->cur_buf = this_frame;
6531
6532 // Get rd multiplier set up.
6533 rdmult = (int)av1_compute_rd_mult(cpi, tpl_frame->base_qindex);
6534 if (rdmult < 1) rdmult = 1;
6535 set_error_per_bit(&cpi->td.mb, rdmult);
6536 av1_initialize_me_consts(cpi, &cpi->td.mb, tpl_frame->base_qindex);
6537
6538 tpl_frame->is_valid = 1;
6539
6540 cm->base_qindex = tpl_frame->base_qindex;
6541 av1_frame_init_quantizer(cpi);
6542
6543 for (mi_row = 0; mi_row < cm->mi_rows; mi_row += mi_height) {
6544 // Motion estimation row boundary
6545 x->mv_limits.row_min = -((mi_row * MI_SIZE) + (17 - 2 * AOM_INTERP_EXTEND));
6546 x->mv_limits.row_max =
6547 (cm->mi_rows - 1 - mi_row) * MI_SIZE + (17 - 2 * AOM_INTERP_EXTEND);
6548 for (mi_col = 0; mi_col < cm->mi_cols; mi_col += mi_width) {
6549 TplDepStats tpl_stats;
6550 mode_estimation(cpi, x, xd, &sf, gf_picture, frame_idx, src_diff, coeff,
6551 qcoeff, dqcoeff, mi_row, mi_col, bsize, tx_size,
6552 ref_frame, predictor, &recon_error, &sse, &tpl_stats);
6553
6554 // Motion flow dependency dispenser.
6555 tpl_model_store(tpl_frame->tpl_stats_ptr, mi_row, mi_col, bsize,
6556 tpl_frame->stride, &tpl_stats);
6557
6558 tpl_model_update(cpi->tpl_stats, tpl_frame->tpl_stats_ptr, mi_row, mi_col,
6559 bsize);
6560 }
6561 }
6562}
6563
6564static void setup_tpl_stats(AV1_COMP *cpi) {
6565 GF_PICTURE gf_picture[MAX_LAG_BUFFERS];
6566 const GF_GROUP *gf_group = &cpi->twopass.gf_group;
6567 int tpl_group_frames = 0;
6568 int frame_idx;
6569
6570 init_gop_frames(cpi, gf_picture, gf_group, &tpl_group_frames);
6571
6572 init_tpl_stats(cpi);
6573
6574 // Backward propagation from tpl_group_frames to 1.
6575 for (frame_idx = tpl_group_frames - 1; frame_idx > 0; --frame_idx)
6576 mc_flow_dispenser(cpi, gf_picture, frame_idx);
6577}
6578
David Turner0308a5a2019-01-07 10:36:16 +00006579// Determine whether there is a forced keyframe pending in the lookahead buffer
6580static int is_forced_keyframe_pending(struct lookahead_ctx *lookahead,
6581 const int up_to_index) {
6582 for (int i = 0; i <= up_to_index; i++) {
6583 const struct lookahead_entry *e = av1_lookahead_peek(lookahead, i);
6584 if (e == NULL) {
6585 // We have reached the end of the lookahead buffer and not early-returned
6586 // so there isn't a forced key-frame pending.
6587 return 0;
6588 } else if (e->flags == AOM_EFLAG_FORCE_KF) {
6589 return 1;
6590 } else {
6591 continue;
6592 }
6593 }
6594 return 0; // Never reached
6595}
6596
6597// Don't allow a show_existing_frame to coincide with an error resilient or
6598// S-Frame. An exception can be made in the case of a keyframe, since it does
6599// not depend on any previous frames.
6600static int allow_show_existing(const AV1_COMP *const cpi) {
6601 if (cpi->common.current_frame.frame_number == 0) return 0;
6602
6603 const struct lookahead_entry *lookahead_src =
6604 av1_lookahead_peek(cpi->lookahead, 0);
6605 if (lookahead_src == NULL) return 1;
6606
6607 const int is_error_resilient =
6608 cpi->oxcf.error_resilient_mode ||
6609 (lookahead_src->flags & AOM_EFLAG_ERROR_RESILIENT);
6610 const int is_s_frame =
6611 cpi->oxcf.s_frame_mode || (lookahead_src->flags & AOM_EFLAG_SET_S_FRAME);
6612 const int is_key_frame =
6613 (cpi->rc.frames_to_key == 0) || (cpi->frame_flags & FRAMEFLAGS_KEY);
6614 return !(is_error_resilient || is_s_frame) || is_key_frame;
6615}
6616
David Turner85287b42019-01-10 16:11:59 +00006617// Called if this frame is an ARF or ARF2. Also handles forward-keyframes
6618// For an ARF set arf2=0, for ARF2 set arf2=1
6619// temporal_filtered is set to 1 if we temporally filter the ARF frame, so that
6620// the correct post-filter buffer can be used.
6621static struct lookahead_entry *setup_arf_or_arf2(AV1_COMP *const cpi,
6622 const int arf_src_index,
6623 const int arf2,
6624 int *temporal_filtered) {
6625 AV1_COMMON *const cm = &cpi->common;
6626 RATE_CONTROL *const rc = &cpi->rc;
6627 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
6628
6629 assert(arf_src_index <= rc->frames_to_key);
6630 *temporal_filtered = 0;
6631
6632 struct lookahead_entry *source =
6633 av1_lookahead_peek(cpi->lookahead, arf_src_index);
6634
6635 if (source != NULL) {
6636 cm->showable_frame = 1;
6637 cpi->alt_ref_source = source;
6638
6639 // When arf_src_index == rc->frames_to_key, it indicates a fwd_kf
6640 if (!arf2 && arf_src_index == rc->frames_to_key) {
6641 // Skip temporal filtering and mark as intra_only if we have a fwd_kf
6642 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
6643 int which_arf = gf_group->arf_update_idx[gf_group->index];
6644 cpi->is_arf_filter_off[which_arf] = 1;
6645 cpi->no_show_kf = 1;
6646 } else {
6647 if (oxcf->arnr_max_frames > 0) {
6648 // Produce the filtered ARF frame.
6649 av1_temporal_filter(cpi, arf_src_index);
6650 aom_extend_frame_borders(&cpi->alt_ref_buffer, av1_num_planes(cm));
6651 *temporal_filtered = 1;
6652 }
6653 }
6654 cm->show_frame = 0;
6655
6656 if (oxcf->pass < 2) {
6657 // In second pass, the buffer updates configure will be set
6658 // in the function av1_rc_get_second_pass_params
6659 if (!arf2) {
6660 av1_configure_buffer_updates_firstpass(cpi, ARF_UPDATE);
6661 } else {
6662 av1_configure_buffer_updates_firstpass(cpi, INTNL_ARF_UPDATE);
6663 }
6664 }
6665 }
6666 rc->source_alt_ref_pending = 0;
6667 return source;
6668}
6669
Andrey Norkin795ba872018-03-06 13:24:14 -08006670int av1_get_compressed_data(AV1_COMP *cpi, unsigned int *frame_flags,
6671 size_t *size, uint8_t *dest, int64_t *time_stamp,
6672 int64_t *time_end, int flush,
6673 const aom_rational_t *timebase) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006674 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
6675 AV1_COMMON *const cm = &cpi->common;
David Turnerd2a592e2018-11-16 14:59:31 +00006676 CurrentFrame *const current_frame = &cm->current_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006677 RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -07006678 struct aom_usec_timer cmptimer;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006679 struct lookahead_entry *last_source = NULL;
6680 struct lookahead_entry *source = NULL;
6681 int arf_src_index;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006682 int brf_src_index;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006683 int i;
6684
6685#if CONFIG_BITSTREAM_DEBUG
6686 assert(cpi->oxcf.max_threads == 0 &&
6687 "bitstream debug tool does not support multithreading");
6688 bitstream_queue_record_write();
David Turnerd2a592e2018-11-16 14:59:31 +00006689 bitstream_queue_set_frame_write(current_frame->frame_number * 2 +
6690 cm->show_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006691#endif
6692
Dominic Symesd4929012018-01-31 17:32:01 +01006693 cm->showable_frame = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07006694 aom_usec_timer_start(&cmptimer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006695
RogerZhou3b635242017-09-19 10:06:46 -07006696 set_high_precision_mv(cpi, ALTREF_HIGH_PRECISION_MV, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006697
Debargha Mukherjeeba7b8fe2018-03-15 23:10:07 -07006698 // Normal defaults
sarahparker27d686a2018-03-30 17:43:44 -07006699 cm->refresh_frame_context = oxcf->frame_parallel_decoding_mode
6700 ? REFRESH_FRAME_CONTEXT_DISABLED
6701 : REFRESH_FRAME_CONTEXT_BACKWARD;
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01006702 if (oxcf->large_scale_tile)
James Zernf34dfc82018-02-23 16:53:33 -08006703 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_DISABLED;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006704
Wei-Ting Lin2e8d0452018-06-27 09:32:39 -07006705 // default reference buffers update config
6706 av1_configure_buffer_updates_firstpass(cpi, LF_UPDATE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006707
Sarah Parkerb9041612018-05-22 19:06:47 -07006708 // Initialize fields related to forward keyframes
Sarah Parkeraf32a7b2018-06-29 14:59:05 -07006709 cpi->no_show_kf = 0;
Zoe Liub4991202017-12-21 15:31:06 -08006710 cm->reset_decoder_state = 0;
Zoe Liub4991202017-12-21 15:31:06 -08006711
David Turner0308a5a2019-01-07 10:36:16 +00006712 if (oxcf->pass == 2 && cm->show_existing_frame && allow_show_existing(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07006713 // Manage the source buffer and flush out the source frame that has been
6714 // coded already; Also get prepared for PSNR calculation if needed.
Yaowu Xuf883b422016-08-30 14:01:10 -07006715 if ((source = av1_lookahead_pop(cpi->lookahead, flush)) == NULL) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07006716 *size = 0;
6717 return -1;
6718 }
sarahparker21dbca42018-03-30 17:43:44 -07006719 av1_apply_encoding_flags(cpi, source->flags);
Alex Conversef77fd0b2017-04-20 11:00:24 -07006720 cpi->source = &source->img;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006721 // TODO(zoeliu): To track down to determine whether it's needed to adjust
6722 // the frame rate.
6723 *time_stamp = source->ts_start;
6724 *time_end = source->ts_end;
6725
6726 // We need to adjust frame rate for an overlay frame
Zoe Liue04abf72017-04-19 15:37:11 -07006727 if (cpi->rc.is_src_frame_alt_ref) adjust_frame_rate(cpi, source);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006728
David Turner0308a5a2019-01-07 10:36:16 +00006729 if (assign_cur_frame_new_fb(cm) == NULL) return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006730
6731 // Clear down mmx registers
Yaowu Xuf883b422016-08-30 14:01:10 -07006732 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006733
6734 // Start with a 0 size frame.
6735 *size = 0;
6736
6737 // We need to update the gf_group for show_existing overlay frame
Zoe Liue04abf72017-04-19 15:37:11 -07006738 if (cpi->rc.is_src_frame_alt_ref) av1_rc_get_second_pass_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006739
Tom Finegane4099e32018-01-23 12:01:51 -08006740 if (Pass2Encode(cpi, size, dest, frame_flags) != AOM_CODEC_OK)
6741 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006742
6743 if (cpi->b_calculate_psnr) generate_psnr_packet(cpi);
6744
6745#if CONFIG_INTERNAL_STATS
Angie Chiang08a22a62017-07-17 17:29:17 -07006746 compute_internal_stats(cpi, (int)(*size));
Yaowu Xuc27fc142016-08-22 16:08:15 -07006747#endif // CONFIG_INTERNAL_STATS
6748
6749 // Clear down mmx registers
Yaowu Xuf883b422016-08-30 14:01:10 -07006750 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006751
6752 cm->show_existing_frame = 0;
6753 return 0;
6754 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006755
David Turner85287b42019-01-10 16:11:59 +00006756 int temporal_filtered = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006757 // Should we encode an arf frame.
6758 arf_src_index = get_arf_src_index(cpi);
David Turner0308a5a2019-01-07 10:36:16 +00006759 if (arf_src_index &&
6760 is_forced_keyframe_pending(cpi->lookahead, arf_src_index)) {
6761 arf_src_index = 0;
6762 flush = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006763 }
6764
6765 if (arf_src_index) {
David Turner85287b42019-01-10 16:11:59 +00006766 source = setup_arf_or_arf2(cpi, arf_src_index, 0, &temporal_filtered);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006767 }
6768
David Turner85287b42019-01-10 16:11:59 +00006769 // Should we encode an arf2 frame (mutually exclusive to ARF)
Zoe Liue9b15e22017-07-19 15:53:01 -07006770 arf_src_index = get_arf2_src_index(cpi);
David Turner0308a5a2019-01-07 10:36:16 +00006771 if (arf_src_index &&
6772 is_forced_keyframe_pending(cpi->lookahead, arf_src_index)) {
6773 arf_src_index = 0;
6774 flush = 1;
Zoe Liue9b15e22017-07-19 15:53:01 -07006775 }
6776
6777 if (arf_src_index) {
David Turner85287b42019-01-10 16:11:59 +00006778 source = setup_arf_or_arf2(cpi, arf_src_index, 1, &temporal_filtered);
Zoe Liue9b15e22017-07-19 15:53:01 -07006779 }
Zoe Liue9b15e22017-07-19 15:53:01 -07006780
Yaowu Xuc27fc142016-08-22 16:08:15 -07006781 rc->is_bwd_ref_frame = 0;
6782 brf_src_index = get_brf_src_index(cpi);
6783 if (brf_src_index) {
6784 assert(brf_src_index <= rc->frames_to_key);
Yaowu Xuf883b422016-08-30 14:01:10 -07006785 if ((source = av1_lookahead_peek(cpi->lookahead, brf_src_index)) != NULL) {
Dominic Symesd4929012018-01-31 17:32:01 +01006786 cm->showable_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006787 cm->show_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006788
Wei-Ting Lin2e8d0452018-06-27 09:32:39 -07006789 if (oxcf->pass < 2) {
6790 // In second pass, the buffer updates configure will be set
6791 // in the function av1_rc_get_second_pass_params
6792 av1_configure_buffer_updates_firstpass(cpi, BIPRED_UPDATE);
6793 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006794 }
6795 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006796
6797 if (!source) {
6798 // Get last frame source.
David Turnerd2a592e2018-11-16 14:59:31 +00006799 if (current_frame->frame_number > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006800 if ((last_source = av1_lookahead_peek(cpi->lookahead, -1)) == NULL)
Yaowu Xuc27fc142016-08-22 16:08:15 -07006801 return -1;
6802 }
David Turnerd2a592e2018-11-16 14:59:31 +00006803 if (current_frame->frame_number > 0) assert(last_source != NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006804 // Read in the source frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07006805 source = av1_lookahead_pop(cpi->lookahead, flush);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006806
6807 if (source != NULL) {
6808 cm->show_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006809
6810 // Check to see if the frame should be encoded as an arf overlay.
6811 check_src_altref(cpi, source);
6812 }
6813 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006814 if (source) {
David Turner85287b42019-01-10 16:11:59 +00006815 if (temporal_filtered) {
6816 cpi->unscaled_source = &cpi->alt_ref_buffer;
6817 cpi->source = &cpi->alt_ref_buffer;
6818 } else {
6819 cpi->unscaled_source = &source->img;
6820 cpi->source = &source->img;
6821 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006822 cpi->unscaled_last_source = last_source != NULL ? &last_source->img : NULL;
6823
6824 *time_stamp = source->ts_start;
6825 *time_end = source->ts_end;
Sarah Parker73556772018-03-28 18:28:05 -07006826 av1_apply_encoding_flags(cpi, source->flags);
Yaowu Xuf883b422016-08-30 14:01:10 -07006827 *frame_flags = (source->flags & AOM_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006828
6829 } else {
6830 *size = 0;
6831 if (flush && oxcf->pass == 1 && !cpi->twopass.first_pass_done) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006832 av1_end_first_pass(cpi); /* get last stats packet */
Yaowu Xuc27fc142016-08-22 16:08:15 -07006833 cpi->twopass.first_pass_done = 1;
6834 }
6835 return -1;
6836 }
6837
6838 if (source->ts_start < cpi->first_time_stamp_ever) {
6839 cpi->first_time_stamp_ever = source->ts_start;
6840 cpi->last_end_time_stamp_seen = source->ts_start;
6841 }
6842
6843 // Clear down mmx registers
Yaowu Xuf883b422016-08-30 14:01:10 -07006844 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006845
6846 // adjust frame rates based on timestamps given
6847 if (cm->show_frame) adjust_frame_rate(cpi, source);
6848
David Turner0308a5a2019-01-07 10:36:16 +00006849 if (assign_cur_frame_new_fb(cm) == NULL) return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006850
Zoe Liuf452fdf2017-11-02 23:08:12 -07006851 // Retain the RF_LEVEL for the current newly coded frame.
David Turner1bcefb32018-11-19 17:54:00 +00006852 cm->cur_frame->frame_rf_level =
Zoe Liuf452fdf2017-11-02 23:08:12 -07006853 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index];
Zoe Liuf452fdf2017-11-02 23:08:12 -07006854
Yaowu Xu9b0f7032017-07-31 11:01:19 -07006855 cm->cur_frame->buf.buf_8bit_valid = 0;
Neil Birkbeckeb895ef2018-03-14 17:51:03 -07006856
Neil Birkbecka2893ab2018-06-08 14:45:13 -07006857 if (cpi->film_grain_table) {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07006858 cm->seq_params.film_grain_params_present = aom_film_grain_table_lookup(
Neil Birkbecka2893ab2018-06-08 14:45:13 -07006859 cpi->film_grain_table, *time_stamp, *time_end, 0 /* =erase */,
Neil Birkbeckeb895ef2018-03-14 17:51:03 -07006860 &cm->film_grain_params);
6861 }
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07006862 cm->cur_frame->film_grain_params_present =
6863 cm->seq_params.film_grain_params_present;
Zoe Liu6cfaff92016-10-18 17:12:11 -07006864
Andrey Norkin795ba872018-03-06 13:24:14 -08006865 // only one operating point supported now
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07006866 const int64_t pts64 = ticks_to_timebase_units(timebase, *time_stamp);
6867 if (pts64 < 0 || pts64 > UINT32_MAX) return AOM_CODEC_ERROR;
6868 cpi->common.frame_presentation_time = (uint32_t)pts64;
Andrey Norkin795ba872018-03-06 13:24:14 -08006869
Yaowu Xuc27fc142016-08-22 16:08:15 -07006870 // Start with a 0 size frame.
6871 *size = 0;
6872
6873 cpi->frame_flags = *frame_flags;
6874
6875 if (oxcf->pass == 2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006876 av1_rc_get_second_pass_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006877 } else if (oxcf->pass == 1) {
Fergus Simpsonbc189932017-05-16 17:02:39 -07006878 setup_frame_size(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006879 }
6880
6881 if (cpi->oxcf.pass != 0 || frame_is_intra_only(cm) == 1) {
David Turnere7ebf902018-12-04 14:04:55 +00006882 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) cpi->scaled_ref_buf[i] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006883 }
6884
Yaowu Xuc27fc142016-08-22 16:08:15 -07006885 cm->using_qmatrix = cpi->oxcf.using_qm;
6886 cm->min_qmlevel = cpi->oxcf.qm_minlevel;
6887 cm->max_qmlevel = cpi->oxcf.qm_maxlevel;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006888
David Turner936235c2018-11-28 13:42:01 +00006889 if (cm->seq_params.frame_id_numbers_present_flag && *time_stamp == 0) {
6890 cpi->common.current_frame_id = -1;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01006891 }
Zoe Liuca0cd3f2018-02-26 15:07:50 -08006892
Debargha Mukherjeee41a6672018-02-27 11:56:31 -08006893 if (oxcf->pass != 1 && cpi->common.allow_screen_content_tools &&
6894 !frame_is_intra_only(cm)) {
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00006895 if (cpi->common.seq_params.force_integer_mv == 2) {
RogerZhou3b635242017-09-19 10:06:46 -07006896 struct lookahead_entry *previous_entry =
Debargha Mukherjeea71e3db2018-02-28 07:47:17 -08006897 av1_lookahead_peek(cpi->lookahead, cpi->previous_index);
6898 if (!previous_entry)
6899 cpi->common.cur_frame_force_integer_mv = 0;
6900 else
6901 cpi->common.cur_frame_force_integer_mv = is_integer_mv(
6902 cpi, cpi->source, &previous_entry->img, cpi->previous_hash_table);
RogerZhou3b635242017-09-19 10:06:46 -07006903 } else {
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00006904 cpi->common.cur_frame_force_integer_mv =
6905 cpi->common.seq_params.force_integer_mv;
RogerZhou3b635242017-09-19 10:06:46 -07006906 }
6907 } else {
RogerZhou10a03802017-10-26 11:49:48 -07006908 cpi->common.cur_frame_force_integer_mv = 0;
RogerZhou3b635242017-09-19 10:06:46 -07006909 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01006910
Yue Chen7cae98f2018-08-24 10:43:16 -07006911 if (cpi->twopass.gf_group.index == 1 && cpi->oxcf.enable_tpl_model) {
6912 set_frame_size(cpi, cm->width, cm->height);
6913 setup_tpl_stats(cpi);
6914 }
6915
Yaowu Xuc27fc142016-08-22 16:08:15 -07006916 if (oxcf->pass == 1) {
6917 cpi->td.mb.e_mbd.lossless[0] = is_lossless_requested(oxcf);
Yaowu Xuf883b422016-08-30 14:01:10 -07006918 av1_first_pass(cpi, source);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006919 } else if (oxcf->pass == 2) {
Tom Finegane4099e32018-01-23 12:01:51 -08006920 if (Pass2Encode(cpi, size, dest, frame_flags) != AOM_CODEC_OK)
6921 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006922 } else {
6923 // One pass encode
David Turner056f7cd2019-01-07 17:48:13 +00006924 if (Pass0Encode(cpi, size, dest, frame_flags) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08006925 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006926 }
RogerZhoucc5d35d2017-08-07 22:20:15 -07006927 if (oxcf->pass != 1 && cpi->common.allow_screen_content_tools) {
Debargha Mukherjeee41a6672018-02-27 11:56:31 -08006928 cpi->previous_hash_table = &cm->cur_frame->hash_table;
RogerZhou3b635242017-09-19 10:06:46 -07006929 {
6930 int l;
6931 for (l = -MAX_PRE_FRAMES; l < cpi->lookahead->max_sz; l++) {
6932 if ((cpi->lookahead->buf + l) == source) {
Debargha Mukherjeee41a6672018-02-27 11:56:31 -08006933 cpi->previous_index = l;
RogerZhou3b635242017-09-19 10:06:46 -07006934 break;
6935 }
6936 }
6937
6938 if (l == cpi->lookahead->max_sz) {
6939 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
6940 "Failed to find last frame original buffer");
6941 }
6942 }
RogerZhoucc5d35d2017-08-07 22:20:15 -07006943 }
6944
Yunqing Wang267e3272017-11-09 14:23:22 -08006945 if (!cm->large_scale_tile) {
David Turner1bcefb32018-11-19 17:54:00 +00006946 cm->cur_frame->frame_context = *cm->fc;
Yunqing Wang267e3272017-11-09 14:23:22 -08006947 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006948
Yunqing Wangb041d8a2017-11-15 12:31:18 -08006949#define EXT_TILE_DEBUG 0
6950#if EXT_TILE_DEBUG
6951 if (cm->large_scale_tile && oxcf->pass == 2) {
6952 char fn[20] = "./fc";
David Turnerd2a592e2018-11-16 14:59:31 +00006953 fn[4] = current_frame->frame_number / 100 + '0';
6954 fn[5] = (current_frame->frame_number % 100) / 10 + '0';
6955 fn[6] = (current_frame->frame_number % 10) + '0';
Yunqing Wangb041d8a2017-11-15 12:31:18 -08006956 fn[7] = '\0';
6957 av1_print_frame_contexts(cm->fc, fn);
6958 }
6959#endif // EXT_TILE_DEBUG
6960#undef EXT_TILE_DEBUG
Yaowu Xuc7119a72018-03-29 09:59:37 -07006961
Dominic Symesd4929012018-01-31 17:32:01 +01006962 cm->showable_frame = !cm->show_frame && cm->showable_frame;
Yunqing Wangb041d8a2017-11-15 12:31:18 -08006963
Yaowu Xuc27fc142016-08-22 16:08:15 -07006964 // No frame encoded, or frame was dropped, release scaled references.
6965 if ((*size == 0) && (frame_is_intra_only(cm) == 0)) {
6966 release_scaled_references(cpi);
6967 }
6968
6969 if (*size > 0) {
Debargha Mukherjee8adee102018-09-25 11:01:00 -07006970 cpi->droppable = is_frame_droppable(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006971 }
6972
Yaowu Xuf883b422016-08-30 14:01:10 -07006973 aom_usec_timer_mark(&cmptimer);
6974 cpi->time_compress_data += aom_usec_timer_elapsed(&cmptimer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006975
6976 if (cpi->b_calculate_psnr && oxcf->pass != 1 && cm->show_frame)
6977 generate_psnr_packet(cpi);
6978
6979#if CONFIG_INTERNAL_STATS
6980 if (oxcf->pass != 1) {
Angie Chiang08a22a62017-07-17 17:29:17 -07006981 compute_internal_stats(cpi, (int)(*size));
Yaowu Xuc27fc142016-08-22 16:08:15 -07006982 }
6983#endif // CONFIG_INTERNAL_STATS
Debargha Mukherjee0857e662019-01-04 16:22:09 -08006984#if CONFIG_SPEED_STATS
6985 if (cpi->oxcf.pass != 1) {
6986 cpi->tx_search_count += cpi->td.mb.tx_search_count;
6987 cpi->td.mb.tx_search_count = 0;
6988 }
6989#endif // CONFIG_SPEED_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07006990
Yaowu Xuf883b422016-08-30 14:01:10 -07006991 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006992
6993 return 0;
6994}
6995
Yaowu Xuf883b422016-08-30 14:01:10 -07006996int av1_get_preview_raw_frame(AV1_COMP *cpi, YV12_BUFFER_CONFIG *dest) {
6997 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006998 if (!cm->show_frame) {
6999 return -1;
7000 } else {
7001 int ret;
David Turnerc29e1a92018-12-06 14:10:14 +00007002 if (cm->cur_frame != NULL) {
7003 *dest = cm->cur_frame->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007004 dest->y_width = cm->width;
7005 dest->y_height = cm->height;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07007006 dest->uv_width = cm->width >> cm->seq_params.subsampling_x;
7007 dest->uv_height = cm->height >> cm->seq_params.subsampling_y;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007008 ret = 0;
7009 } else {
7010 ret = -1;
7011 }
Yaowu Xuf883b422016-08-30 14:01:10 -07007012 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07007013 return ret;
7014 }
7015}
7016
Yaowu Xuf883b422016-08-30 14:01:10 -07007017int av1_get_last_show_frame(AV1_COMP *cpi, YV12_BUFFER_CONFIG *frame) {
David Turnere7ebf902018-12-04 14:04:55 +00007018 if (cpi->last_show_frame_buf == NULL) return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007019
David Turnere7ebf902018-12-04 14:04:55 +00007020 *frame = cpi->last_show_frame_buf->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007021 return 0;
7022}
7023
Yunqing Wangff9bfca2018-06-06 11:46:08 -07007024static int equal_dimensions_and_border(const YV12_BUFFER_CONFIG *a,
7025 const YV12_BUFFER_CONFIG *b) {
7026 return a->y_height == b->y_height && a->y_width == b->y_width &&
7027 a->uv_height == b->uv_height && a->uv_width == b->uv_width &&
7028 a->y_stride == b->y_stride && a->uv_stride == b->uv_stride &&
7029 a->border == b->border &&
7030 (a->flags & YV12_FLAG_HIGHBITDEPTH) ==
7031 (b->flags & YV12_FLAG_HIGHBITDEPTH);
7032}
7033
Yunqing Wang93b18f32018-06-08 21:08:29 -07007034aom_codec_err_t av1_copy_new_frame_enc(AV1_COMMON *cm,
7035 YV12_BUFFER_CONFIG *new_frame,
7036 YV12_BUFFER_CONFIG *sd) {
Yunqing Wangff9bfca2018-06-06 11:46:08 -07007037 const int num_planes = av1_num_planes(cm);
7038 if (!equal_dimensions_and_border(new_frame, sd))
7039 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
7040 "Incorrect buffer dimensions");
7041 else
7042 aom_yv12_copy_frame(new_frame, sd, num_planes);
7043
7044 return cm->error.error_code;
7045}
7046
Yaowu Xuf883b422016-08-30 14:01:10 -07007047int av1_set_internal_size(AV1_COMP *cpi, AOM_SCALING horiz_mode,
7048 AOM_SCALING vert_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07007049 int hr = 0, hs = 0, vr = 0, vs = 0;
7050
7051 if (horiz_mode > ONETWO || vert_mode > ONETWO) return -1;
7052
7053 Scale2Ratio(horiz_mode, &hr, &hs);
7054 Scale2Ratio(vert_mode, &vr, &vs);
7055
7056 // always go to the next whole number
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07007057 cpi->resize_pending_width = (hs - 1 + cpi->oxcf.width * hr) / hs;
7058 cpi->resize_pending_height = (vs - 1 + cpi->oxcf.height * vr) / vs;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007059
7060 return 0;
7061}
7062
Yaowu Xuf883b422016-08-30 14:01:10 -07007063int av1_get_quantizer(AV1_COMP *cpi) { return cpi->common.base_qindex; }
Yaowu Xuc27fc142016-08-22 16:08:15 -07007064
Soo-Chul Han29c46fb2018-03-23 16:02:00 -04007065int av1_convert_sect5obus_to_annexb(uint8_t *buffer, size_t *frame_size) {
7066 size_t output_size = 0;
7067 size_t total_bytes_read = 0;
7068 size_t remaining_size = *frame_size;
7069 uint8_t *buff_ptr = buffer;
7070
7071 // go through each OBUs
7072 while (total_bytes_read < *frame_size) {
7073 uint8_t saved_obu_header[2];
7074 uint64_t obu_payload_size;
7075 size_t length_of_payload_size;
7076 size_t length_of_obu_size;
7077 uint32_t obu_header_size = (buff_ptr[0] >> 2) & 0x1 ? 2 : 1;
7078 size_t obu_bytes_read = obu_header_size; // bytes read for current obu
7079
7080 // save the obu header (1 or 2 bytes)
7081 memmove(saved_obu_header, buff_ptr, obu_header_size);
7082 // clear the obu_has_size_field
7083 saved_obu_header[0] = saved_obu_header[0] & (~0x2);
7084
7085 // get the payload_size and length of payload_size
7086 if (aom_uleb_decode(buff_ptr + obu_header_size, remaining_size,
7087 &obu_payload_size, &length_of_payload_size) != 0) {
7088 return AOM_CODEC_ERROR;
7089 }
7090 obu_bytes_read += length_of_payload_size;
7091
7092 // calculate the length of size of the obu header plus payload
7093 length_of_obu_size =
7094 aom_uleb_size_in_bytes((uint64_t)(obu_header_size + obu_payload_size));
7095
7096 // move the rest of data to new location
7097 memmove(buff_ptr + length_of_obu_size + obu_header_size,
7098 buff_ptr + obu_bytes_read, remaining_size - obu_bytes_read);
Yaowu Xu9e494202018-04-03 11:19:49 -07007099 obu_bytes_read += (size_t)obu_payload_size;
Soo-Chul Han29c46fb2018-03-23 16:02:00 -04007100
7101 // write the new obu size
7102 const uint64_t obu_size = obu_header_size + obu_payload_size;
7103 size_t coded_obu_size;
7104 if (aom_uleb_encode(obu_size, sizeof(obu_size), buff_ptr,
7105 &coded_obu_size) != 0) {
7106 return AOM_CODEC_ERROR;
7107 }
7108
7109 // write the saved (modified) obu_header following obu size
7110 memmove(buff_ptr + length_of_obu_size, saved_obu_header, obu_header_size);
7111
7112 total_bytes_read += obu_bytes_read;
7113 remaining_size -= obu_bytes_read;
7114 buff_ptr += length_of_obu_size + obu_size;
Yaowu Xu9e494202018-04-03 11:19:49 -07007115 output_size += length_of_obu_size + (size_t)obu_size;
Soo-Chul Han29c46fb2018-03-23 16:02:00 -04007116 }
7117
7118 *frame_size = output_size;
7119 return AOM_CODEC_OK;
7120}
7121
Yaowu Xuf883b422016-08-30 14:01:10 -07007122void av1_apply_encoding_flags(AV1_COMP *cpi, aom_enc_frame_flags_t flags) {
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007123 // TODO(yunqingwang): For what references to use, external encoding flags
7124 // should be consistent with internal reference frame selection. Need to
7125 // ensure that there is not conflict between the two. In AV1 encoder, the
7126 // priority rank for 7 reference frames are: LAST, ALTREF, LAST2, LAST3,
7127 // GOLDEN, BWDREF, ALTREF2. If only one reference frame is used, it must be
7128 // LAST.
Yunqing Wangf2e7a392017-11-08 00:27:21 -08007129 cpi->ext_ref_frame_flags = AOM_REFFRAME_ALL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007130 if (flags &
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007131 (AOM_EFLAG_NO_REF_LAST | AOM_EFLAG_NO_REF_LAST2 | AOM_EFLAG_NO_REF_LAST3 |
7132 AOM_EFLAG_NO_REF_GF | AOM_EFLAG_NO_REF_ARF | AOM_EFLAG_NO_REF_BWD |
7133 AOM_EFLAG_NO_REF_ARF2)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07007134 if (flags & AOM_EFLAG_NO_REF_LAST) {
Yunqing Wangf2e7a392017-11-08 00:27:21 -08007135 cpi->ext_ref_frame_flags = 0;
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007136 } else {
7137 int ref = AOM_REFFRAME_ALL;
7138
7139 if (flags & AOM_EFLAG_NO_REF_LAST2) ref ^= AOM_LAST2_FLAG;
7140 if (flags & AOM_EFLAG_NO_REF_LAST3) ref ^= AOM_LAST3_FLAG;
7141
7142 if (flags & AOM_EFLAG_NO_REF_GF) ref ^= AOM_GOLD_FLAG;
7143
7144 if (flags & AOM_EFLAG_NO_REF_ARF) {
7145 ref ^= AOM_ALT_FLAG;
7146 ref ^= AOM_BWD_FLAG;
7147 ref ^= AOM_ALT2_FLAG;
7148 } else {
7149 if (flags & AOM_EFLAG_NO_REF_BWD) ref ^= AOM_BWD_FLAG;
7150 if (flags & AOM_EFLAG_NO_REF_ARF2) ref ^= AOM_ALT2_FLAG;
7151 }
7152
7153 av1_use_as_reference(cpi, ref);
Yaowu Xuc27fc142016-08-22 16:08:15 -07007154 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07007155 }
7156
7157 if (flags &
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007158 (AOM_EFLAG_NO_UPD_LAST | AOM_EFLAG_NO_UPD_GF | AOM_EFLAG_NO_UPD_ARF)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07007159 int upd = AOM_REFFRAME_ALL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007160
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007161 // Refreshing LAST/LAST2/LAST3 is handled by 1 common flag.
7162 if (flags & AOM_EFLAG_NO_UPD_LAST) upd ^= AOM_LAST_FLAG;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007163
Yaowu Xuf883b422016-08-30 14:01:10 -07007164 if (flags & AOM_EFLAG_NO_UPD_GF) upd ^= AOM_GOLD_FLAG;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007165
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007166 if (flags & AOM_EFLAG_NO_UPD_ARF) {
7167 upd ^= AOM_ALT_FLAG;
7168 upd ^= AOM_BWD_FLAG;
7169 upd ^= AOM_ALT2_FLAG;
7170 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07007171
Yaowu Xuf883b422016-08-30 14:01:10 -07007172 av1_update_reference(cpi, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07007173 }
7174
sarahparker21dbca42018-03-30 17:43:44 -07007175 cpi->ext_use_ref_frame_mvs = cpi->oxcf.allow_ref_frame_mvs &
7176 ((flags & AOM_EFLAG_NO_REF_FRAME_MVS) == 0);
sarahparker27d686a2018-03-30 17:43:44 -07007177 cpi->ext_use_error_resilient = cpi->oxcf.error_resilient_mode |
7178 ((flags & AOM_EFLAG_ERROR_RESILIENT) != 0);
sarahparker9806fed2018-03-30 17:43:44 -07007179 cpi->ext_use_s_frame =
7180 cpi->oxcf.s_frame_mode | ((flags & AOM_EFLAG_SET_S_FRAME) != 0);
Sarah Parker50b6d6e2018-04-11 19:21:54 -07007181 cpi->ext_use_primary_ref_none = (flags & AOM_EFLAG_SET_PRIMARY_REF_NONE) != 0;
sarahparker21dbca42018-03-30 17:43:44 -07007182
Yaowu Xuf883b422016-08-30 14:01:10 -07007183 if (flags & AOM_EFLAG_NO_UPD_ENTROPY) {
7184 av1_update_entropy(cpi, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07007185 }
7186}
Andrey Norkin795ba872018-03-06 13:24:14 -08007187
Andrey Norkin795ba872018-03-06 13:24:14 -08007188int64_t timebase_units_to_ticks(const aom_rational_t *timebase, int64_t n) {
7189 return n * TICKS_PER_SEC * timebase->num / timebase->den;
7190}
7191
7192int64_t ticks_to_timebase_units(const aom_rational_t *timebase, int64_t n) {
7193 const int64_t round = TICKS_PER_SEC * timebase->num / 2 - 1;
7194 return (n * timebase->den + round) / timebase->num / TICKS_PER_SEC;
7195}
Tom Fineganf8d6a162018-08-21 10:47:55 -07007196
7197aom_fixed_buf_t *av1_get_global_headers(AV1_COMP *cpi) {
7198 if (!cpi) return NULL;
7199
7200 uint8_t header_buf[512] = { 0 };
7201 const uint32_t sequence_header_size =
7202 write_sequence_header_obu(cpi, &header_buf[0]);
7203 assert(sequence_header_size <= sizeof(header_buf));
7204 if (sequence_header_size == 0) return NULL;
7205
7206 const size_t obu_header_size = 1;
7207 const size_t size_field_size = aom_uleb_size_in_bytes(sequence_header_size);
7208 const size_t payload_offset = obu_header_size + size_field_size;
7209
7210 if (payload_offset + sequence_header_size > sizeof(header_buf)) return NULL;
7211 memmove(&header_buf[payload_offset], &header_buf[0], sequence_header_size);
7212
7213 if (write_obu_header(OBU_SEQUENCE_HEADER, 0, &header_buf[0]) !=
7214 obu_header_size) {
7215 return NULL;
7216 }
7217
7218 size_t coded_size_field_size = 0;
7219 if (aom_uleb_encode(sequence_header_size, size_field_size,
7220 &header_buf[obu_header_size],
7221 &coded_size_field_size) != 0) {
7222 return NULL;
7223 }
7224 assert(coded_size_field_size == size_field_size);
7225
7226 aom_fixed_buf_t *global_headers =
7227 (aom_fixed_buf_t *)malloc(sizeof(*global_headers));
7228 if (!global_headers) return NULL;
7229
7230 const size_t global_header_buf_size =
7231 obu_header_size + size_field_size + sequence_header_size;
7232
7233 global_headers->buf = malloc(global_header_buf_size);
7234 if (!global_headers->buf) {
7235 free(global_headers);
7236 return NULL;
7237 }
7238
7239 memcpy(global_headers->buf, &header_buf[0], global_header_buf_size);
7240 global_headers->sz = global_header_buf_size;
7241 return global_headers;
7242}