blob: 63e0d340414850790d868d0c6a236c2ad96a9ce6 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Urvang Joshi8a02d762016-07-28 15:51:12 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Urvang Joshi8a02d762016-07-28 15:51:12 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <limits.h>
13#include <math.h>
14#include <stdio.h>
15
Tom Finegan60e653d2018-05-22 11:34:58 -070016#include "config/aom_config.h"
Tom Finegan44702c82018-05-22 13:00:39 -070017#include "config/aom_dsp_rtcd.h"
18#include "config/aom_scale_rtcd.h"
Yaowu Xufa3721d2018-07-30 14:38:49 -070019#include "config/av1_rtcd.h"
20
21#include "aom_dsp/aom_dsp_common.h"
22#include "aom_dsp/aom_filter.h"
23#if CONFIG_DENOISE
24#include "aom_dsp/grain_table.h"
25#include "aom_dsp/noise_util.h"
26#include "aom_dsp/noise_model.h"
27#endif
28#include "aom_dsp/psnr.h"
29#if CONFIG_INTERNAL_STATS
30#include "aom_dsp/ssim.h"
31#endif
32#include "aom_ports/aom_timer.h"
33#include "aom_ports/mem.h"
34#include "aom_ports/system_state.h"
35#include "aom_scale/aom_scale.h"
36#if CONFIG_BITSTREAM_DEBUG || CONFIG_MISMATCH_DEBUG
37#include "aom_util/debug_util.h"
38#endif // CONFIG_BITSTREAM_DEBUG || CONFIG_MISMATCH_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070039
40#include "av1/common/alloccommon.h"
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010041#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/filter.h"
43#include "av1/common/idct.h"
44#include "av1/common/reconinter.h"
45#include "av1/common/reconintra.h"
Fergus Simpsond0565002017-03-27 16:51:52 -070046#include "av1/common/resize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#include "av1/common/tile_common.h"
48
Ravi Chaudharyc5e74692018-10-08 16:05:38 +053049#include "av1/encoder/av1_multi_thread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/encoder/aq_complexity.h"
51#include "av1/encoder/aq_cyclicrefresh.h"
52#include "av1/encoder/aq_variance.h"
53#include "av1/encoder/bitstream.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070054#include "av1/encoder/context_tree.h"
55#include "av1/encoder/encodeframe.h"
56#include "av1/encoder/encodemv.h"
David Turner056f7cd2019-01-07 17:48:13 +000057#include "av1/encoder/encode_strategy.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070058#include "av1/encoder/encoder.h"
Angie Chiangf0fbf9d2017-03-15 15:01:22 -070059#include "av1/encoder/encodetxb.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070060#include "av1/encoder/ethread.h"
61#include "av1/encoder/firstpass.h"
Yaowu Xufa3721d2018-07-30 14:38:49 -070062#include "av1/encoder/grain_test_vectors.h"
RogerZhoucc5d35d2017-08-07 22:20:15 -070063#include "av1/encoder/hash_motion.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070064#include "av1/encoder/mbgraph.h"
65#include "av1/encoder/picklpf.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070066#include "av1/encoder/pickrst.h"
Debargha Mukherjee7166f222017-09-05 21:32:42 -070067#include "av1/encoder/random.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070068#include "av1/encoder/ratectrl.h"
69#include "av1/encoder/rd.h"
Debargha Mukherjeedf713102018-10-02 12:33:32 -070070#include "av1/encoder/rdopt.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070071#include "av1/encoder/segmentation.h"
72#include "av1/encoder/speed_features.h"
73#include "av1/encoder/temporal_filter.h"
Yue Chen7cae98f2018-08-24 10:43:16 -070074#include "av1/encoder/reconinter_enc.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070075
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +000076#define DEFAULT_EXPLICIT_ORDER_HINT_BITS 7
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +000077
Andrey Norkin795ba872018-03-06 13:24:14 -080078// av1 uses 10,000,000 ticks/second as time stamp
79#define TICKS_PER_SEC 10000000LL
Andrey Norkin795ba872018-03-06 13:24:14 -080080
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -080081#if CONFIG_ENTROPY_STATS
82FRAME_COUNTS aggregate_fc;
83#endif // CONFIG_ENTROPY_STATS
84
Yaowu Xuc27fc142016-08-22 16:08:15 -070085#define AM_SEGMENT_ID_INACTIVE 7
86#define AM_SEGMENT_ID_ACTIVE 0
87
Johannb0ef6ff2018-02-08 14:32:21 -080088// Whether to use high precision mv for altref computation.
89#define ALTREF_HIGH_PRECISION_MV 1
90
91// Q threshold for high precision mv. Choose a very high value for now so that
92// HIGH_PRECISION is always chosen.
93#define HIGH_PRECISION_MV_QTHRESH 200
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -070094
Yaowu Xuc27fc142016-08-22 16:08:15 -070095// #define OUTPUT_YUV_REC
Yaowu Xuc27fc142016-08-22 16:08:15 -070096#ifdef OUTPUT_YUV_SKINMAP
97FILE *yuv_skinmap_file = NULL;
98#endif
99#ifdef OUTPUT_YUV_REC
100FILE *yuv_rec_file;
101#define FILE_NAME_LEN 100
102#endif
103
Debargha Mukherjeedf713102018-10-02 12:33:32 -0700104// Estimate if the source frame is screen content, based on the portion of
105// blocks that have no more than 4 (experimentally selected) luma colors.
106static int is_screen_content(const uint8_t *src, int use_hbd, int bd,
107 int stride, int width, int height) {
108 assert(src != NULL);
109 int counts = 0;
110 const int blk_w = 16;
111 const int blk_h = 16;
112 const int limit = 4;
113 for (int r = 0; r + blk_h <= height; r += blk_h) {
114 for (int c = 0; c + blk_w <= width; c += blk_w) {
115 int count_buf[1 << 12]; // Maximum (1 << 12) color levels.
116 const int n_colors =
117 use_hbd ? av1_count_colors_highbd(src + r * stride + c, stride, blk_w,
118 blk_h, bd, count_buf)
119 : av1_count_colors(src + r * stride + c, stride, blk_w, blk_h,
120 count_buf);
121 if (n_colors > 1 && n_colors <= limit) counts++;
122 }
123 }
124 // The threshold is 10%.
125 return counts * blk_h * blk_w * 10 > width * height;
126}
127
Yaowu Xuf883b422016-08-30 14:01:10 -0700128static INLINE void Scale2Ratio(AOM_SCALING mode, int *hr, int *hs) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700129 switch (mode) {
130 case NORMAL:
131 *hr = 1;
132 *hs = 1;
133 break;
134 case FOURFIVE:
135 *hr = 4;
136 *hs = 5;
137 break;
138 case THREEFIVE:
139 *hr = 3;
140 *hs = 5;
141 break;
142 case ONETWO:
143 *hr = 1;
144 *hs = 2;
145 break;
146 default:
147 *hr = 1;
148 *hs = 1;
149 assert(0);
150 break;
151 }
152}
153
154// Mark all inactive blocks as active. Other segmentation features may be set
155// so memset cannot be used, instead only inactive blocks should be reset.
Yaowu Xuf883b422016-08-30 14:01:10 -0700156static void suppress_active_map(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700157 unsigned char *const seg_map = cpi->segmentation_map;
158 int i;
159 if (cpi->active_map.enabled || cpi->active_map.update)
160 for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
161 if (seg_map[i] == AM_SEGMENT_ID_INACTIVE)
162 seg_map[i] = AM_SEGMENT_ID_ACTIVE;
163}
164
Yaowu Xuf883b422016-08-30 14:01:10 -0700165static void apply_active_map(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700166 struct segmentation *const seg = &cpi->common.seg;
167 unsigned char *const seg_map = cpi->segmentation_map;
168 const unsigned char *const active_map = cpi->active_map.map;
169 int i;
170
171 assert(AM_SEGMENT_ID_ACTIVE == CR_SEGMENT_ID_BASE);
172
173 if (frame_is_intra_only(&cpi->common)) {
174 cpi->active_map.enabled = 0;
175 cpi->active_map.update = 1;
176 }
177
178 if (cpi->active_map.update) {
179 if (cpi->active_map.enabled) {
180 for (i = 0; i < cpi->common.mi_rows * cpi->common.mi_cols; ++i)
181 if (seg_map[i] == AM_SEGMENT_ID_ACTIVE) seg_map[i] = active_map[i];
Yaowu Xuf883b422016-08-30 14:01:10 -0700182 av1_enable_segmentation(seg);
183 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
Cheng Chend8184da2017-09-26 18:15:22 -0700184 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_H);
185 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_V);
186 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_U);
187 av1_enable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_V);
188
189 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_H,
190 -MAX_LOOP_FILTER);
191 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_V,
192 -MAX_LOOP_FILTER);
193 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_U,
194 -MAX_LOOP_FILTER);
195 av1_set_segdata(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_V,
196 -MAX_LOOP_FILTER);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700197 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700198 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_SKIP);
Cheng Chend8184da2017-09-26 18:15:22 -0700199 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_H);
200 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_Y_V);
201 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_U);
202 av1_disable_segfeature(seg, AM_SEGMENT_ID_INACTIVE, SEG_LVL_ALT_LF_V);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700203 if (seg->enabled) {
204 seg->update_data = 1;
205 seg->update_map = 1;
206 }
207 }
208 cpi->active_map.update = 0;
209 }
210}
211
Yaowu Xuf883b422016-08-30 14:01:10 -0700212int av1_set_active_map(AV1_COMP *cpi, unsigned char *new_map_16x16, int rows,
213 int cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700214 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols) {
215 unsigned char *const active_map_8x8 = cpi->active_map.map;
216 const int mi_rows = cpi->common.mi_rows;
217 const int mi_cols = cpi->common.mi_cols;
Jingning Han9d533022017-04-07 10:14:42 -0700218 const int row_scale = mi_size_high[BLOCK_16X16] == 2 ? 1 : 2;
219 const int col_scale = mi_size_wide[BLOCK_16X16] == 2 ? 1 : 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700220 cpi->active_map.update = 1;
221 if (new_map_16x16) {
222 int r, c;
223 for (r = 0; r < mi_rows; ++r) {
224 for (c = 0; c < mi_cols; ++c) {
225 active_map_8x8[r * mi_cols + c] =
Jingning Han9d533022017-04-07 10:14:42 -0700226 new_map_16x16[(r >> row_scale) * cols + (c >> col_scale)]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700227 ? AM_SEGMENT_ID_ACTIVE
228 : AM_SEGMENT_ID_INACTIVE;
229 }
230 }
231 cpi->active_map.enabled = 1;
232 } else {
233 cpi->active_map.enabled = 0;
234 }
235 return 0;
236 } else {
237 return -1;
238 }
239}
240
Yaowu Xuf883b422016-08-30 14:01:10 -0700241int av1_get_active_map(AV1_COMP *cpi, unsigned char *new_map_16x16, int rows,
242 int cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700243 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols &&
244 new_map_16x16) {
245 unsigned char *const seg_map_8x8 = cpi->segmentation_map;
246 const int mi_rows = cpi->common.mi_rows;
247 const int mi_cols = cpi->common.mi_cols;
Jingning Han9d533022017-04-07 10:14:42 -0700248 const int row_scale = mi_size_high[BLOCK_16X16] == 2 ? 1 : 2;
249 const int col_scale = mi_size_wide[BLOCK_16X16] == 2 ? 1 : 2;
250
Yaowu Xuc27fc142016-08-22 16:08:15 -0700251 memset(new_map_16x16, !cpi->active_map.enabled, rows * cols);
252 if (cpi->active_map.enabled) {
253 int r, c;
254 for (r = 0; r < mi_rows; ++r) {
255 for (c = 0; c < mi_cols; ++c) {
256 // Cyclic refresh segments are considered active despite not having
257 // AM_SEGMENT_ID_ACTIVE
Jingning Han9d533022017-04-07 10:14:42 -0700258 new_map_16x16[(r >> row_scale) * cols + (c >> col_scale)] |=
Yaowu Xuc27fc142016-08-22 16:08:15 -0700259 seg_map_8x8[r * mi_cols + c] != AM_SEGMENT_ID_INACTIVE;
260 }
261 }
262 }
263 return 0;
264 } else {
265 return -1;
266 }
267}
268
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800269// Compute the horizontal frequency components' energy in a frame
270// by calculuating the 16x4 Horizontal DCT. This is to be used to
271// decide the superresolution parameters.
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800272void analyze_hor_freq(const AV1_COMP *cpi, double *energy) {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800273 uint64_t freq_energy[16] = { 0 };
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800274 const YV12_BUFFER_CONFIG *buf = cpi->source;
275 const int bd = cpi->td.mb.e_mbd.bd;
276 const int width = buf->y_crop_width;
277 const int height = buf->y_crop_height;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800278 DECLARE_ALIGNED(16, int32_t, coeff[16 * 4]);
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800279 int n = 0;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800280 memset(freq_energy, 0, sizeof(freq_energy));
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800281 if (buf->flags & YV12_FLAG_HIGHBITDEPTH) {
282 const int16_t *src16 = (const int16_t *)CONVERT_TO_SHORTPTR(buf->y_buffer);
283 for (int i = 0; i < height - 4; i += 4) {
284 for (int j = 0; j < width - 16; j += 16) {
285 av1_fwd_txfm2d_16x4(src16 + i * buf->y_stride + j, coeff, buf->y_stride,
286 H_DCT, bd);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800287 for (int k = 1; k < 16; ++k) {
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800288 const uint64_t this_energy =
289 ((int64_t)coeff[k] * coeff[k]) +
290 ((int64_t)coeff[k + 16] * coeff[k + 16]) +
291 ((int64_t)coeff[k + 32] * coeff[k + 32]) +
292 ((int64_t)coeff[k + 48] * coeff[k + 48]);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800293 freq_energy[k] += ROUND_POWER_OF_TWO(this_energy, 2 + 2 * (bd - 8));
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800294 }
295 n++;
296 }
297 }
298 } else {
Debargha Mukherjeeac28c722018-11-14 22:09:46 -0800299 assert(bd == 8);
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800300 DECLARE_ALIGNED(16, int16_t, src16[16 * 4]);
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800301 for (int i = 0; i < height - 4; i += 4) {
302 for (int j = 0; j < width - 16; j += 16) {
303 for (int ii = 0; ii < 4; ++ii)
304 for (int jj = 0; jj < 16; ++jj)
305 src16[ii * 16 + jj] =
306 buf->y_buffer[(i + ii) * buf->y_stride + (j + jj)];
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800307 av1_fwd_txfm2d_16x4(src16, coeff, 16, H_DCT, bd);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800308 for (int k = 1; k < 16; ++k) {
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800309 const uint64_t this_energy =
310 ((int64_t)coeff[k] * coeff[k]) +
311 ((int64_t)coeff[k + 16] * coeff[k + 16]) +
312 ((int64_t)coeff[k + 32] * coeff[k + 32]) +
313 ((int64_t)coeff[k + 48] * coeff[k + 48]);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800314 freq_energy[k] += ROUND_POWER_OF_TWO(this_energy, 2);
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800315 }
316 n++;
317 }
318 }
319 }
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800320 if (n) {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800321 for (int k = 1; k < 16; ++k) energy[k] = (double)freq_energy[k] / n;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800322 // Convert to cumulative energy
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800323 for (int k = 14; k > 0; --k) energy[k] += energy[k + 1];
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800324 } else {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -0800325 for (int k = 1; k < 16; ++k) energy[k] = 1e+20;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -0800326 }
Debargha Mukherjeef50fdce2018-11-13 11:13:00 -0800327}
328
Yaowu Xu45295c32018-03-29 12:06:10 -0700329static void set_high_precision_mv(AV1_COMP *cpi, int allow_high_precision_mv,
330 int cur_frame_force_integer_mv) {
James Zern01a9d702017-08-25 19:09:33 +0000331 MACROBLOCK *const mb = &cpi->td.mb;
Hui Su50361152018-03-02 11:01:42 -0800332 cpi->common.allow_high_precision_mv =
333 allow_high_precision_mv && cur_frame_force_integer_mv == 0;
Rupert Swarbricka84faf22017-12-11 13:56:40 +0000334 const int copy_hp =
335 cpi->common.allow_high_precision_mv && cur_frame_force_integer_mv == 0;
Jingning Hanf050fc12018-03-09 14:53:33 -0800336 int *(*src)[2] = copy_hp ? &mb->nmvcost_hp : &mb->nmvcost;
337 mb->mv_cost_stack = *src;
James Zern01a9d702017-08-25 19:09:33 +0000338}
339
Yaowu Xuf883b422016-08-30 14:01:10 -0700340static BLOCK_SIZE select_sb_size(const AV1_COMP *const cpi) {
Urvang Joshie4530f82018-01-09 11:43:37 -0800341 const AV1_COMMON *const cm = &cpi->common;
342
Yaowu Xuf883b422016-08-30 14:01:10 -0700343 if (cpi->oxcf.superblock_size == AOM_SUPERBLOCK_SIZE_64X64)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700344 return BLOCK_64X64;
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100345#if CONFIG_FILEOPTIONS
Urvang Joshie4530f82018-01-09 11:43:37 -0800346 if (cm->options && cm->options->ext_partition)
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100347#endif
348 if (cpi->oxcf.superblock_size == AOM_SUPERBLOCK_SIZE_128X128)
349 return BLOCK_128X128;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700350
Yaowu Xuf883b422016-08-30 14:01:10 -0700351 assert(cpi->oxcf.superblock_size == AOM_SUPERBLOCK_SIZE_DYNAMIC);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700352
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100353// TODO(any): Possibly could improve this with a heuristic.
354#if CONFIG_FILEOPTIONS
Urvang Joshie4530f82018-01-09 11:43:37 -0800355 if (cm->options && !cm->options->ext_partition) return BLOCK_64X64;
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +0100356#endif
Urvang Joshie4530f82018-01-09 11:43:37 -0800357
Urvang Joshiaab74432018-06-01 12:06:22 -0700358 // When superres / resize is on, 'cm->width / height' can change between
359 // calls, so we don't apply this heuristic there. Also, this heuristic gives
360 // compression gain for speed >= 2 only.
361 if (cpi->oxcf.superres_mode == SUPERRES_NONE &&
362 cpi->oxcf.resize_mode == RESIZE_NONE && cpi->oxcf.speed >= 2) {
Urvang Joshie4530f82018-01-09 11:43:37 -0800363 return (cm->width >= 480 && cm->height >= 360) ? BLOCK_128X128
364 : BLOCK_64X64;
365 }
366
Yaowu Xuc27fc142016-08-22 16:08:15 -0700367 return BLOCK_128X128;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700368}
369
David Turner99e990e2018-12-10 12:54:26 +0000370static int get_current_frame_ref_type(const AV1_COMP *const cpi) {
371 const AV1_COMMON *const cm = &cpi->common;
372 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
373 // We choose the reference "type" of this frame from the flags which indicate
374 // which reference frames will be refreshed by it. More than one of these
375 // flags may be set, so the order here implies an order of precedence.
376
377 if (frame_is_intra_only(cm) || cm->error_resilient_mode ||
378 cm->force_primary_ref_none)
379 return REGULAR_FRAME;
380 else if (gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE)
381 return EXT_ARF_FRAME;
382 else if (cpi->refresh_alt_ref_frame)
383 return ARF_FRAME;
384 else if (cpi->rc.is_src_frame_alt_ref)
385 return OVERLAY_FRAME;
386 else if (cpi->refresh_golden_frame)
387 return GLD_FRAME;
388 else if (cpi->refresh_bwd_ref_frame)
389 return BRF_FRAME;
390 else
391 return REGULAR_FRAME;
392}
393
Yaowu Xuf883b422016-08-30 14:01:10 -0700394static void setup_frame(AV1_COMP *cpi) {
395 AV1_COMMON *const cm = &cpi->common;
Johannb0ef6ff2018-02-08 14:32:21 -0800396 // Set up entropy context depending on frame type. The decoder mandates
397 // the use of the default context, index 0, for keyframes and inter
398 // frames where the error_resilient_mode or intra_only flag is set. For
399 // other inter-frames the encoder currently uses only two contexts;
400 // context 1 for ALTREF frames and context 0 for the others.
Soo-Chul Han85e8c792018-01-21 01:58:15 -0500401
Thomas Daede51020e12017-12-14 20:12:44 -0800402 cm->primary_ref_frame = PRIMARY_REF_NONE;
Sarah Parker50b6d6e2018-04-11 19:21:54 -0700403 if (frame_is_intra_only(cm) || cm->error_resilient_mode ||
404 cm->force_primary_ref_none) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700405 av1_setup_past_independence(cm);
Thomas Daede51020e12017-12-14 20:12:44 -0800406 for (int i = 0; i < REF_FRAMES; i++) {
David Turner99e990e2018-12-10 12:54:26 +0000407 cpi->fb_of_context_type[i] = -1;
Thomas Daede51020e12017-12-14 20:12:44 -0800408 }
David Turner99e990e2018-12-10 12:54:26 +0000409 cpi->fb_of_context_type[REGULAR_FRAME] =
David Turnera21966b2018-12-05 14:48:49 +0000410 cm->show_frame ? get_ref_frame_map_idx(cm, GOLDEN_FRAME)
411 : get_ref_frame_map_idx(cm, ALTREF_FRAME);
Yunqing Wang19aefd12018-05-14 15:38:57 -0700412 } else {
David Turner99e990e2018-12-10 12:54:26 +0000413 int wanted_fb = cpi->fb_of_context_type[get_current_frame_ref_type(cpi)];
Thomas Daede51020e12017-12-14 20:12:44 -0800414 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
David Turnera21966b2018-12-05 14:48:49 +0000415 int fb = get_ref_frame_map_idx(cm, ref_frame);
Thomas Daede51020e12017-12-14 20:12:44 -0800416 if (fb == wanted_fb) {
417 cm->primary_ref_frame = ref_frame - LAST_FRAME;
418 }
419 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700420 }
421
David Turnerd2a592e2018-11-16 14:59:31 +0000422 if (cm->current_frame.frame_type == KEY_FRAME && cm->show_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700423 cpi->refresh_golden_frame = 1;
424 cpi->refresh_alt_ref_frame = 1;
Yunqing Wang9538e4d2019-01-07 18:28:08 +0000425 av1_zero(cpi->interp_filter_selected);
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000426 set_sb_size(&cm->seq_params, select_sb_size(cpi));
Tarek AMARAc9813852018-03-05 18:40:18 -0500427 } else if (frame_is_sframe(cm)) {
428 cpi->refresh_golden_frame = 1;
429 cpi->refresh_alt_ref_frame = 1;
Yunqing Wang9538e4d2019-01-07 18:28:08 +0000430 av1_zero(cpi->interp_filter_selected);
Tarek AMARAc9813852018-03-05 18:40:18 -0500431 set_sb_size(&cm->seq_params, select_sb_size(cpi));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700432 } else {
David Turnera21966b2018-12-05 14:48:49 +0000433 const RefCntBuffer *const primary_ref_buf = get_primary_ref_frame_buf(cm);
434 if (primary_ref_buf == NULL) {
David Barkercc615a82018-03-19 14:38:51 +0000435 av1_setup_past_independence(cm);
436 cm->seg.update_map = 1;
437 cm->seg.update_data = 1;
Thomas Daededa4d8b92017-06-05 15:44:14 -0700438 } else {
David Turnera21966b2018-12-05 14:48:49 +0000439 *cm->fc = primary_ref_buf->frame_context;
Thomas Daededa4d8b92017-06-05 15:44:14 -0700440 }
Yunqing Wang9538e4d2019-01-07 18:28:08 +0000441 av1_zero(cpi->interp_filter_selected[0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700442 }
443
David Turnera21966b2018-12-05 14:48:49 +0000444 cm->prev_frame = get_primary_ref_frame_buf(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700445 cpi->vaq_refresh = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700446}
447
Cheng Chen46f30c72017-09-07 11:13:33 -0700448static void enc_setup_mi(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700449 int i;
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530450 int mi_rows_sb_aligned = calc_mi_size(cm->mi_rows);
Yunqing Wang19b9f722018-02-20 16:22:01 -0800451 cm->mi = cm->mip;
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530452 memset(cm->mip, 0, cm->mi_stride * mi_rows_sb_aligned * sizeof(*cm->mip));
Yunqing Wang19b9f722018-02-20 16:22:01 -0800453 cm->prev_mi = cm->prev_mip;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700454 // Clear top border row
455 memset(cm->prev_mip, 0, sizeof(*cm->prev_mip) * cm->mi_stride);
456 // Clear left border column
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530457 for (i = 0; i < mi_rows_sb_aligned; ++i)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700458 memset(&cm->prev_mip[i * cm->mi_stride], 0, sizeof(*cm->prev_mip));
Yunqing Wang19b9f722018-02-20 16:22:01 -0800459 cm->mi_grid_visible = cm->mi_grid_base;
460 cm->prev_mi_grid_visible = cm->prev_mi_grid_base;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700461
462 memset(cm->mi_grid_base, 0,
Ravi Chaudhary75c4c5f2018-07-17 16:32:08 +0530463 cm->mi_stride * mi_rows_sb_aligned * sizeof(*cm->mi_grid_base));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700464}
465
Cheng Chen46f30c72017-09-07 11:13:33 -0700466static int enc_alloc_mi(AV1_COMMON *cm, int mi_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700467 cm->mip = aom_calloc(mi_size, sizeof(*cm->mip));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700468 if (!cm->mip) return 1;
Yaowu Xuf883b422016-08-30 14:01:10 -0700469 cm->prev_mip = aom_calloc(mi_size, sizeof(*cm->prev_mip));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700470 if (!cm->prev_mip) return 1;
471 cm->mi_alloc_size = mi_size;
472
Yue Chen53b53f02018-03-29 14:31:23 -0700473 cm->mi_grid_base =
474 (MB_MODE_INFO **)aom_calloc(mi_size, sizeof(MB_MODE_INFO *));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700475 if (!cm->mi_grid_base) return 1;
476 cm->prev_mi_grid_base =
Yue Chen53b53f02018-03-29 14:31:23 -0700477 (MB_MODE_INFO **)aom_calloc(mi_size, sizeof(MB_MODE_INFO *));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700478 if (!cm->prev_mi_grid_base) return 1;
479
480 return 0;
481}
482
Cheng Chen46f30c72017-09-07 11:13:33 -0700483static void enc_free_mi(AV1_COMMON *cm) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700484 aom_free(cm->mip);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700485 cm->mip = NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -0700486 aom_free(cm->prev_mip);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700487 cm->prev_mip = NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -0700488 aom_free(cm->mi_grid_base);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700489 cm->mi_grid_base = NULL;
Yaowu Xuf883b422016-08-30 14:01:10 -0700490 aom_free(cm->prev_mi_grid_base);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700491 cm->prev_mi_grid_base = NULL;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -0700492 cm->mi_alloc_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700493}
494
Cheng Chen46f30c72017-09-07 11:13:33 -0700495static void swap_mi_and_prev_mi(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700496 // Current mip will be the prev_mip for the next frame.
Yue Chen53b53f02018-03-29 14:31:23 -0700497 MB_MODE_INFO **temp_base = cm->prev_mi_grid_base;
498 MB_MODE_INFO *temp = cm->prev_mip;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700499 cm->prev_mip = cm->mip;
500 cm->mip = temp;
501
502 // Update the upper left visible macroblock ptrs.
Yunqing Wang19b9f722018-02-20 16:22:01 -0800503 cm->mi = cm->mip;
504 cm->prev_mi = cm->prev_mip;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700505
506 cm->prev_mi_grid_base = cm->mi_grid_base;
507 cm->mi_grid_base = temp_base;
Yunqing Wang19b9f722018-02-20 16:22:01 -0800508 cm->mi_grid_visible = cm->mi_grid_base;
509 cm->prev_mi_grid_visible = cm->prev_mi_grid_base;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700510}
511
Yaowu Xuf883b422016-08-30 14:01:10 -0700512void av1_initialize_enc(void) {
Wan-Teh Chang3cac4542018-06-29 10:21:39 -0700513 av1_rtcd();
514 aom_dsp_rtcd();
515 aom_scale_rtcd();
516 av1_init_intra_predictors();
517 av1_init_me_luts();
518 av1_rc_init_minq_luts();
519 av1_init_wedge_masks();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700520}
521
Debargha Mukherjeeccb27262017-09-25 14:19:46 -0700522static void dealloc_context_buffers_ext(AV1_COMP *cpi) {
523 if (cpi->mbmi_ext_base) {
524 aom_free(cpi->mbmi_ext_base);
525 cpi->mbmi_ext_base = NULL;
526 }
527}
528
529static void alloc_context_buffers_ext(AV1_COMP *cpi) {
530 AV1_COMMON *cm = &cpi->common;
531 int mi_size = cm->mi_cols * cm->mi_rows;
532
533 dealloc_context_buffers_ext(cpi);
534 CHECK_MEM_ERROR(cm, cpi->mbmi_ext_base,
535 aom_calloc(mi_size, sizeof(*cpi->mbmi_ext_base)));
536}
537
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800538static void update_film_grain_parameters(struct AV1_COMP *cpi,
539 const AV1EncoderConfig *oxcf) {
540 AV1_COMMON *const cm = &cpi->common;
541 cpi->oxcf = *oxcf;
542
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700543 if (cpi->film_grain_table) {
544 aom_film_grain_table_free(cpi->film_grain_table);
545 aom_free(cpi->film_grain_table);
546 cpi->film_grain_table = NULL;
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700547 }
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700548
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800549 if (oxcf->film_grain_test_vector) {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -0700550 cm->seq_params.film_grain_params_present = 1;
David Turnerd2a592e2018-11-16 14:59:31 +0000551 if (cm->current_frame.frame_type == KEY_FRAME) {
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800552 memcpy(&cm->film_grain_params,
553 film_grain_test_vectors + oxcf->film_grain_test_vector - 1,
554 sizeof(cm->film_grain_params));
555
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700556 cm->film_grain_params.bit_depth = cm->seq_params.bit_depth;
557 if (cm->seq_params.color_range == AOM_CR_FULL_RANGE) {
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800558 cm->film_grain_params.clip_to_restricted_range = 0;
559 }
560 }
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700561 } else if (oxcf->film_grain_table_filename) {
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700562 cpi->film_grain_table = aom_malloc(sizeof(*cpi->film_grain_table));
563 memset(cpi->film_grain_table, 0, sizeof(aom_film_grain_table_t));
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700564
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700565 aom_film_grain_table_read(cpi->film_grain_table,
Neil Birkbeckeb895ef2018-03-14 17:51:03 -0700566 oxcf->film_grain_table_filename, &cm->error);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800567 } else {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -0700568 cm->seq_params.film_grain_params_present = 0;
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800569 memset(&cm->film_grain_params, 0, sizeof(cm->film_grain_params));
570 }
571}
Andrey Norkin6f1c2f72018-01-15 20:08:52 -0800572
Yaowu Xuf883b422016-08-30 14:01:10 -0700573static void dealloc_compressor_data(AV1_COMP *cpi) {
574 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000575 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700576
Debargha Mukherjeeccb27262017-09-25 14:19:46 -0700577 dealloc_context_buffers_ext(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700578
Yaowu Xuf883b422016-08-30 14:01:10 -0700579 aom_free(cpi->tile_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700580 cpi->tile_data = NULL;
581
582 // Delete sementation map
Yaowu Xuf883b422016-08-30 14:01:10 -0700583 aom_free(cpi->segmentation_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700584 cpi->segmentation_map = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700585
Yaowu Xuf883b422016-08-30 14:01:10 -0700586 av1_cyclic_refresh_free(cpi->cyclic_refresh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700587 cpi->cyclic_refresh = NULL;
588
Yaowu Xuf883b422016-08-30 14:01:10 -0700589 aom_free(cpi->active_map.map);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700590 cpi->active_map.map = NULL;
591
Jingning Hand064cf02017-06-01 10:00:39 -0700592 aom_free(cpi->td.mb.above_pred_buf);
593 cpi->td.mb.above_pred_buf = NULL;
594
595 aom_free(cpi->td.mb.left_pred_buf);
596 cpi->td.mb.left_pred_buf = NULL;
597
598 aom_free(cpi->td.mb.wsrc_buf);
599 cpi->td.mb.wsrc_buf = NULL;
600
Ravi Chaudhary5d970f42018-09-25 11:25:32 +0530601#if CONFIG_COLLECT_INTER_MODE_RD_STATS
602 aom_free(cpi->td.mb.inter_modes_info);
603 cpi->td.mb.inter_modes_info = NULL;
604#endif
605
Ravi Chaudhary783d6a32018-08-28 18:21:02 +0530606 for (int i = 0; i < 2; i++)
607 for (int j = 0; j < 2; j++) {
608 aom_free(cpi->td.mb.hash_value_buffer[i][j]);
609 cpi->td.mb.hash_value_buffer[i][j] = NULL;
610 }
Jingning Hand064cf02017-06-01 10:00:39 -0700611 aom_free(cpi->td.mb.mask_buf);
612 cpi->td.mb.mask_buf = NULL;
Jingning Hand064cf02017-06-01 10:00:39 -0700613
Jingning Han6cc1fd32017-10-13 09:05:36 -0700614 aom_free(cm->tpl_mvs);
615 cm->tpl_mvs = NULL;
Jingning Han6cc1fd32017-10-13 09:05:36 -0700616
Yaowu Xuf883b422016-08-30 14:01:10 -0700617 av1_free_ref_frame_buffers(cm->buffer_pool);
Angie Chiangf0fbf9d2017-03-15 15:01:22 -0700618 av1_free_txb_buf(cpi);
Yaowu Xuf883b422016-08-30 14:01:10 -0700619 av1_free_context_buffers(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700620
Yaowu Xuf883b422016-08-30 14:01:10 -0700621 aom_free_frame_buffer(&cpi->last_frame_uf);
Yaowu Xuf883b422016-08-30 14:01:10 -0700622 av1_free_restoration_buffers(cm);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800623 aom_free_frame_buffer(&cpi->trial_frame_rst);
Yaowu Xuf883b422016-08-30 14:01:10 -0700624 aom_free_frame_buffer(&cpi->scaled_source);
625 aom_free_frame_buffer(&cpi->scaled_last_source);
626 aom_free_frame_buffer(&cpi->alt_ref_buffer);
627 av1_lookahead_destroy(cpi->lookahead);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700628
Yaowu Xuf883b422016-08-30 14:01:10 -0700629 aom_free(cpi->tile_tok[0][0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700630 cpi->tile_tok[0][0] = 0;
631
Ravi Chaudhary73cf15b2018-08-30 10:52:51 +0530632 aom_free(cpi->tplist[0][0]);
633 cpi->tplist[0][0] = NULL;
634
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000635 av1_free_pc_tree(&cpi->td, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700636
hui sud9a812b2017-07-06 14:34:37 -0700637 aom_free(cpi->td.mb.palette_buffer);
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700638
Urvang Joshi0a4cfad2018-09-07 11:10:39 -0700639 aom_free(cpi->td.mb.tmp_conv_dst);
640 for (int j = 0; j < 2; ++j) {
641 aom_free(cpi->td.mb.tmp_obmc_bufs[j]);
642 }
643
Neil Birkbecka2893ab2018-06-08 14:45:13 -0700644#if CONFIG_DENOISE
645 if (cpi->denoise_and_model) {
646 aom_denoise_and_model_free(cpi->denoise_and_model);
647 cpi->denoise_and_model = NULL;
648 }
649#endif
650 if (cpi->film_grain_table) {
651 aom_film_grain_table_free(cpi->film_grain_table);
652 cpi->film_grain_table = NULL;
653 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700654}
655
Yaowu Xuf883b422016-08-30 14:01:10 -0700656static void save_coding_context(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700657 CODING_CONTEXT *const cc = &cpi->coding_context;
Yaowu Xuf883b422016-08-30 14:01:10 -0700658 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700659
Sebastien Alaiwane140c502017-04-27 09:52:34 +0200660 // Stores a snapshot of key state variables which can subsequently be
661 // restored with a call to av1_restore_coding_context. These functions are
662 // intended for use in a re-code loop in av1_compress_frame where the
663 // quantizer value is adjusted between loop iterations.
Jingning Hanf050fc12018-03-09 14:53:33 -0800664 av1_copy(cc->nmv_vec_cost, cpi->td.mb.nmv_vec_cost);
665 av1_copy(cc->nmv_costs, cpi->nmv_costs);
666 av1_copy(cc->nmv_costs_hp, cpi->nmv_costs_hp);
James Zern01a9d702017-08-25 19:09:33 +0000667
Yaowu Xuc27fc142016-08-22 16:08:15 -0700668 cc->fc = *cm->fc;
669}
670
Yaowu Xuf883b422016-08-30 14:01:10 -0700671static void restore_coding_context(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700672 CODING_CONTEXT *const cc = &cpi->coding_context;
Yaowu Xuf883b422016-08-30 14:01:10 -0700673 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700674
Sebastien Alaiwane140c502017-04-27 09:52:34 +0200675 // Restore key state variables to the snapshot state stored in the
676 // previous call to av1_save_coding_context.
Jingning Hanf050fc12018-03-09 14:53:33 -0800677 av1_copy(cpi->td.mb.nmv_vec_cost, cc->nmv_vec_cost);
678 av1_copy(cpi->nmv_costs, cc->nmv_costs);
679 av1_copy(cpi->nmv_costs_hp, cc->nmv_costs_hp);
James Zern01a9d702017-08-25 19:09:33 +0000680
Yaowu Xuc27fc142016-08-22 16:08:15 -0700681 *cm->fc = cc->fc;
682}
683
Yaowu Xuf883b422016-08-30 14:01:10 -0700684static void configure_static_seg_features(AV1_COMP *cpi) {
685 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700686 const RATE_CONTROL *const rc = &cpi->rc;
687 struct segmentation *const seg = &cm->seg;
688
689 int high_q = (int)(rc->avg_q > 48.0);
690 int qi_delta;
691
692 // Disable and clear down for KF
David Turnerd2a592e2018-11-16 14:59:31 +0000693 if (cm->current_frame.frame_type == KEY_FRAME) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700694 // Clear down the global segmentation map
695 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
696 seg->update_map = 0;
697 seg->update_data = 0;
698 cpi->static_mb_pct = 0;
699
700 // Disable segmentation
Yaowu Xuf883b422016-08-30 14:01:10 -0700701 av1_disable_segmentation(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700702
703 // Clear down the segment features.
Yaowu Xuf883b422016-08-30 14:01:10 -0700704 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700705 } else if (cpi->refresh_alt_ref_frame) {
706 // If this is an alt ref frame
707 // Clear down the global segmentation map
708 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
709 seg->update_map = 0;
710 seg->update_data = 0;
711 cpi->static_mb_pct = 0;
712
713 // Disable segmentation and individual segment features by default
Yaowu Xuf883b422016-08-30 14:01:10 -0700714 av1_disable_segmentation(seg);
715 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700716
717 // Scan frames from current to arf frame.
718 // This function re-enables segmentation if appropriate.
Yaowu Xuf883b422016-08-30 14:01:10 -0700719 av1_update_mbgraph_stats(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700720
721 // If segmentation was enabled set those features needed for the
722 // arf itself.
723 if (seg->enabled) {
724 seg->update_map = 1;
725 seg->update_data = 1;
726
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700727 qi_delta = av1_compute_qdelta(rc, rc->avg_q, rc->avg_q * 0.875,
728 cm->seq_params.bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700729 av1_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta - 2);
Cheng Chend8184da2017-09-26 18:15:22 -0700730 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_H, -2);
731 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_V, -2);
732 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_U, -2);
733 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_V, -2);
734
735 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_H);
736 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_V);
737 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_U);
738 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_V);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700739
Yaowu Xuf883b422016-08-30 14:01:10 -0700740 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700741 }
742 } else if (seg->enabled) {
743 // All other frames if segmentation has been enabled
744
745 // First normal frame in a valid gf or alt ref group
746 if (rc->frames_since_golden == 0) {
747 // Set up segment features for normal frames in an arf group
748 if (rc->source_alt_ref_active) {
749 seg->update_map = 0;
750 seg->update_data = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700751
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700752 qi_delta = av1_compute_qdelta(rc, rc->avg_q, rc->avg_q * 1.125,
753 cm->seq_params.bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700754 av1_set_segdata(seg, 1, SEG_LVL_ALT_Q, qi_delta + 2);
755 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_Q);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700756
Cheng Chend8184da2017-09-26 18:15:22 -0700757 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_H, -2);
758 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_Y_V, -2);
759 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_U, -2);
760 av1_set_segdata(seg, 1, SEG_LVL_ALT_LF_V, -2);
761
762 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_H);
763 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_Y_V);
764 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_U);
765 av1_enable_segfeature(seg, 1, SEG_LVL_ALT_LF_V);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700766
767 // Segment coding disabled for compred testing
768 if (high_q || (cpi->static_mb_pct == 100)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700769 av1_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
770 av1_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
771 av1_enable_segfeature(seg, 1, SEG_LVL_SKIP);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700772 }
773 } else {
774 // Disable segmentation and clear down features if alt ref
775 // is not active for this group
776
Yaowu Xuf883b422016-08-30 14:01:10 -0700777 av1_disable_segmentation(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700778
779 memset(cpi->segmentation_map, 0, cm->mi_rows * cm->mi_cols);
780
781 seg->update_map = 0;
782 seg->update_data = 0;
783
Yaowu Xuf883b422016-08-30 14:01:10 -0700784 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700785 }
786 } else if (rc->is_src_frame_alt_ref) {
787 // Special case where we are coding over the top of a previous
788 // alt ref frame.
789 // Segment coding disabled for compred testing
790
791 // Enable ref frame features for segment 0 as well
Yaowu Xuf883b422016-08-30 14:01:10 -0700792 av1_enable_segfeature(seg, 0, SEG_LVL_REF_FRAME);
793 av1_enable_segfeature(seg, 1, SEG_LVL_REF_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700794
795 // All mbs should use ALTREF_FRAME
Yaowu Xuf883b422016-08-30 14:01:10 -0700796 av1_clear_segdata(seg, 0, SEG_LVL_REF_FRAME);
797 av1_set_segdata(seg, 0, SEG_LVL_REF_FRAME, ALTREF_FRAME);
798 av1_clear_segdata(seg, 1, SEG_LVL_REF_FRAME);
799 av1_set_segdata(seg, 1, SEG_LVL_REF_FRAME, ALTREF_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700800
801 // Skip all MBs if high Q (0,0 mv and skip coeffs)
802 if (high_q) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700803 av1_enable_segfeature(seg, 0, SEG_LVL_SKIP);
804 av1_enable_segfeature(seg, 1, SEG_LVL_SKIP);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700805 }
806 // Enable data update
807 seg->update_data = 1;
808 } else {
809 // All other frames.
810
811 // No updates.. leave things as they are.
812 seg->update_map = 0;
813 seg->update_data = 0;
814 }
815 }
816}
817
Yaowu Xuf883b422016-08-30 14:01:10 -0700818static void update_reference_segmentation_map(AV1_COMP *cpi) {
819 AV1_COMMON *const cm = &cpi->common;
Yushin Choa7f65922018-04-04 16:06:11 -0700820 MB_MODE_INFO **mi_4x4_ptr = cm->mi_grid_visible;
David Turnerb757ce02018-11-12 15:01:28 +0000821 uint8_t *cache_ptr = cm->cur_frame->seg_map;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700822 int row, col;
823
824 for (row = 0; row < cm->mi_rows; row++) {
Yushin Choa7f65922018-04-04 16:06:11 -0700825 MB_MODE_INFO **mi_4x4 = mi_4x4_ptr;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700826 uint8_t *cache = cache_ptr;
Yushin Choa7f65922018-04-04 16:06:11 -0700827 for (col = 0; col < cm->mi_cols; col++, mi_4x4++, cache++)
828 cache[0] = mi_4x4[0]->segment_id;
829 mi_4x4_ptr += cm->mi_stride;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700830 cache_ptr += cm->mi_cols;
831 }
832}
833
Yaowu Xuf883b422016-08-30 14:01:10 -0700834static void alloc_raw_frame_buffers(AV1_COMP *cpi) {
835 AV1_COMMON *cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700836 const SequenceHeader *const seq_params = &cm->seq_params;
Yaowu Xuf883b422016-08-30 14:01:10 -0700837 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700838
839 if (!cpi->lookahead)
Satish Kumar Suman29909962019-01-09 10:31:21 +0530840 cpi->lookahead = av1_lookahead_init(
841 oxcf->width, oxcf->height, seq_params->subsampling_x,
842 seq_params->subsampling_y, seq_params->use_highbitdepth,
843 oxcf->lag_in_frames, oxcf->border_in_pixels);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700844 if (!cpi->lookahead)
Yaowu Xuf883b422016-08-30 14:01:10 -0700845 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700846 "Failed to allocate lag buffers");
847
848 // TODO(agrange) Check if ARF is enabled and skip allocation if not.
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700849 if (aom_realloc_frame_buffer(
850 &cpi->alt_ref_buffer, oxcf->width, oxcf->height,
851 seq_params->subsampling_x, seq_params->subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530852 seq_params->use_highbitdepth, oxcf->border_in_pixels,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700853 cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700854 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700855 "Failed to allocate altref buffer");
856}
857
Yaowu Xuf883b422016-08-30 14:01:10 -0700858static void alloc_util_frame_buffers(AV1_COMP *cpi) {
859 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700860 const SequenceHeader *const seq_params = &cm->seq_params;
861 if (aom_realloc_frame_buffer(
862 &cpi->last_frame_uf, cm->width, cm->height, seq_params->subsampling_x,
863 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530864 cpi->oxcf.border_in_pixels, cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700865 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700866 "Failed to allocate last frame buffer");
867
Fergus Simpson9cd57cf2017-06-12 17:02:03 -0700868 if (aom_realloc_frame_buffer(
Debargha Mukherjee3a4959f2018-02-26 15:34:03 -0800869 &cpi->trial_frame_rst, cm->superres_upscaled_width,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700870 cm->superres_upscaled_height, seq_params->subsampling_x,
871 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman3b12c002018-12-19 15:27:20 +0530872 AOM_RESTORATION_FRAME_BORDER, cm->byte_alignment, NULL, NULL, NULL))
Debargha Mukherjee874d36d2016-12-14 16:53:17 -0800873 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800874 "Failed to allocate trial restored frame buffer");
Yaowu Xuc27fc142016-08-22 16:08:15 -0700875
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700876 if (aom_realloc_frame_buffer(
877 &cpi->scaled_source, cm->width, cm->height, seq_params->subsampling_x,
878 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530879 cpi->oxcf.border_in_pixels, cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700880 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700881 "Failed to allocate scaled source buffer");
882
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700883 if (aom_realloc_frame_buffer(
884 &cpi->scaled_last_source, cm->width, cm->height,
885 seq_params->subsampling_x, seq_params->subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +0530886 seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700887 cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -0700888 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700889 "Failed to allocate scaled last source buffer");
890}
891
Cheng Chen46f30c72017-09-07 11:13:33 -0700892static void alloc_compressor_data(AV1_COMP *cpi) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700893 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000894 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700895
Yaowu Xuf883b422016-08-30 14:01:10 -0700896 av1_alloc_context_buffers(cm, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700897
Ravi Chaudhary73cf15b2018-08-30 10:52:51 +0530898 int mi_rows_aligned_to_sb =
899 ALIGN_POWER_OF_TWO(cm->mi_rows, cm->seq_params.mib_size_log2);
900 int sb_rows = mi_rows_aligned_to_sb >> cm->seq_params.mib_size_log2;
901
Angie Chiangf0fbf9d2017-03-15 15:01:22 -0700902 av1_alloc_txb_buf(cpi);
Angie Chiangf0fbf9d2017-03-15 15:01:22 -0700903
Yaowu Xuc27fc142016-08-22 16:08:15 -0700904 alloc_context_buffers_ext(cpi);
905
Yaowu Xuf883b422016-08-30 14:01:10 -0700906 aom_free(cpi->tile_tok[0][0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700907
908 {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000909 unsigned int tokens =
910 get_token_alloc(cm->mb_rows, cm->mb_cols, MAX_SB_SIZE_LOG2, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700911 CHECK_MEM_ERROR(cm, cpi->tile_tok[0][0],
Yaowu Xuf883b422016-08-30 14:01:10 -0700912 aom_calloc(tokens, sizeof(*cpi->tile_tok[0][0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700913 }
Ravi Chaudhary73cf15b2018-08-30 10:52:51 +0530914 aom_free(cpi->tplist[0][0]);
915
916 CHECK_MEM_ERROR(cm, cpi->tplist[0][0],
917 aom_calloc(sb_rows * MAX_TILE_ROWS * MAX_TILE_COLS,
918 sizeof(*cpi->tplist[0][0])));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700919
Yaowu Xuf883b422016-08-30 14:01:10 -0700920 av1_setup_pc_tree(&cpi->common, &cpi->td);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700921}
922
Yaowu Xuf883b422016-08-30 14:01:10 -0700923void av1_new_framerate(AV1_COMP *cpi, double framerate) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700924 cpi->framerate = framerate < 0.1 ? 30 : framerate;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700925 av1_rc_update_framerate(cpi, cpi->common.width, cpi->common.height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700926}
927
Yunqing Wang75e20e82018-06-16 12:10:48 -0700928static void set_tile_info(AV1_COMP *cpi) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200929 AV1_COMMON *const cm = &cpi->common;
Dominic Symesf58f1112017-09-25 12:47:40 +0200930 int i, start_sb;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200931
932 av1_get_tile_limits(cm);
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200933
934 // configure tile columns
Dominic Symes26ad0b22017-10-01 16:35:13 +0200935 if (cpi->oxcf.tile_width_count == 0 || cpi->oxcf.tile_height_count == 0) {
Dominic Symesf58f1112017-09-25 12:47:40 +0200936 cm->uniform_tile_spacing_flag = 1;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200937 cm->log2_tile_cols = AOMMAX(cpi->oxcf.tile_columns, cm->min_log2_tile_cols);
938 cm->log2_tile_cols = AOMMIN(cm->log2_tile_cols, cm->max_log2_tile_cols);
Dominic Symesf58f1112017-09-25 12:47:40 +0200939 } else {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000940 int mi_cols = ALIGN_POWER_OF_TWO(cm->mi_cols, cm->seq_params.mib_size_log2);
941 int sb_cols = mi_cols >> cm->seq_params.mib_size_log2;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200942 int size_sb, j = 0;
Dominic Symesf58f1112017-09-25 12:47:40 +0200943 cm->uniform_tile_spacing_flag = 0;
944 for (i = 0, start_sb = 0; start_sb < sb_cols && i < MAX_TILE_COLS; i++) {
945 cm->tile_col_start_sb[i] = start_sb;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200946 size_sb = cpi->oxcf.tile_widths[j++];
947 if (j >= cpi->oxcf.tile_width_count) j = 0;
David Barker6cd5a822018-03-05 16:19:28 +0000948 start_sb += AOMMIN(size_sb, cm->max_tile_width_sb);
Dominic Symesf58f1112017-09-25 12:47:40 +0200949 }
950 cm->tile_cols = i;
951 cm->tile_col_start_sb[i] = sb_cols;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200952 }
953 av1_calculate_tile_cols(cm);
954
955 // configure tile rows
956 if (cm->uniform_tile_spacing_flag) {
957 cm->log2_tile_rows = AOMMAX(cpi->oxcf.tile_rows, cm->min_log2_tile_rows);
958 cm->log2_tile_rows = AOMMIN(cm->log2_tile_rows, cm->max_log2_tile_rows);
Dominic Symesf58f1112017-09-25 12:47:40 +0200959 } else {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000960 int mi_rows = ALIGN_POWER_OF_TWO(cm->mi_rows, cm->seq_params.mib_size_log2);
961 int sb_rows = mi_rows >> cm->seq_params.mib_size_log2;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200962 int size_sb, j = 0;
Dominic Symesf58f1112017-09-25 12:47:40 +0200963 for (i = 0, start_sb = 0; start_sb < sb_rows && i < MAX_TILE_ROWS; i++) {
964 cm->tile_row_start_sb[i] = start_sb;
Dominic Symes26ad0b22017-10-01 16:35:13 +0200965 size_sb = cpi->oxcf.tile_heights[j++];
966 if (j >= cpi->oxcf.tile_height_count) j = 0;
967 start_sb += AOMMIN(size_sb, cm->max_tile_height_sb);
Dominic Symesf58f1112017-09-25 12:47:40 +0200968 }
969 cm->tile_rows = i;
970 cm->tile_row_start_sb[i] = sb_rows;
Dominic Symesdb5d66f2017-08-18 18:11:34 +0200971 }
972 av1_calculate_tile_rows(cm);
973}
974
Yaowu Xuf883b422016-08-30 14:01:10 -0700975static void update_frame_size(AV1_COMP *cpi) {
976 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700977 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
978
Yaowu Xuf883b422016-08-30 14:01:10 -0700979 av1_set_mb_mi(cm, cm->width, cm->height);
980 av1_init_context_buffers(cm);
Luc Trudeau1e84af52017-11-25 15:00:28 -0500981 av1_init_macroblockd(cm, xd, NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700982 memset(cpi->mbmi_ext_base, 0,
983 cm->mi_rows * cm->mi_cols * sizeof(*cpi->mbmi_ext_base));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700984 set_tile_info(cpi);
985}
986
Yaowu Xuf883b422016-08-30 14:01:10 -0700987static void init_buffer_indices(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700988 int fb_idx;
Zoe Liu5989a722018-03-29 13:37:36 -0700989 for (fb_idx = 0; fb_idx < REF_FRAMES; ++fb_idx)
David Turnera21966b2018-12-05 14:48:49 +0000990 cpi->common.remapped_ref_idx[fb_idx] = fb_idx;
RogerZhou3b635242017-09-19 10:06:46 -0700991 cpi->rate_index = 0;
992 cpi->rate_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700993}
994
Debargha Mukherjee57498692018-05-11 13:29:31 -0700995static INLINE int does_level_match(int width, int height, double fps,
996 int lvl_width, int lvl_height,
997 double lvl_fps, int lvl_dim_mult) {
998 const int64_t lvl_luma_pels = lvl_width * lvl_height;
999 const double lvl_display_sample_rate = lvl_luma_pels * lvl_fps;
1000 const int64_t luma_pels = width * height;
1001 const double display_sample_rate = luma_pels * fps;
1002 return luma_pels <= lvl_luma_pels &&
1003 display_sample_rate <= lvl_display_sample_rate &&
1004 width <= lvl_width * lvl_dim_mult &&
1005 height <= lvl_height * lvl_dim_mult;
1006}
1007
Andrey Norkin26495512018-06-20 17:13:11 -07001008static void set_bitstream_level_tier(SequenceHeader *seq, AV1_COMMON *cm,
Andrey Norkinf481d982018-05-15 12:05:31 -07001009 const AV1EncoderConfig *oxcf) {
Debargha Mukherjee57498692018-05-11 13:29:31 -07001010 // TODO(any): This is a placeholder function that only addresses dimensions
1011 // and max display sample rates.
1012 // Need to add checks for max bit rate, max decoded luma sample rate, header
1013 // rate, etc. that are not covered by this function.
Debargha Mukherjeeea675402018-05-10 16:10:41 -07001014 (void)oxcf;
Debargha Mukherjee57498692018-05-11 13:29:31 -07001015 BitstreamLevel bl = { 9, 3 };
1016 if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate, 512,
1017 288, 30.0, 4)) {
1018 bl.major = 2;
1019 bl.minor = 0;
1020 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1021 704, 396, 30.0, 4)) {
1022 bl.major = 2;
1023 bl.minor = 1;
1024 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1025 1088, 612, 30.0, 4)) {
1026 bl.major = 3;
1027 bl.minor = 0;
1028 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1029 1376, 774, 30.0, 4)) {
1030 bl.major = 3;
1031 bl.minor = 1;
1032 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1033 2048, 1152, 30.0, 3)) {
1034 bl.major = 4;
1035 bl.minor = 0;
1036 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1037 2048, 1152, 60.0, 3)) {
1038 bl.major = 4;
1039 bl.minor = 1;
1040 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1041 4096, 2176, 30.0, 2)) {
1042 bl.major = 5;
1043 bl.minor = 0;
1044 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1045 4096, 2176, 60.0, 2)) {
1046 bl.major = 5;
1047 bl.minor = 1;
1048 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1049 4096, 2176, 120.0, 2)) {
1050 bl.major = 5;
1051 bl.minor = 2;
1052 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1053 8192, 4352, 30.0, 2)) {
1054 bl.major = 6;
1055 bl.minor = 0;
1056 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1057 8192, 4352, 60.0, 2)) {
1058 bl.major = 6;
1059 bl.minor = 1;
1060 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1061 8192, 4352, 120.0, 2)) {
1062 bl.major = 6;
1063 bl.minor = 2;
1064 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1065 16384, 8704, 30.0, 2)) {
1066 bl.major = 7;
1067 bl.minor = 0;
1068 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1069 16384, 8704, 60.0, 2)) {
1070 bl.major = 7;
1071 bl.minor = 1;
1072 } else if (does_level_match(oxcf->width, oxcf->height, oxcf->init_framerate,
1073 16384, 8704, 120.0, 2)) {
1074 bl.major = 7;
1075 bl.minor = 2;
1076 }
Debargha Mukherjeeea675402018-05-10 16:10:41 -07001077 for (int i = 0; i < MAX_NUM_OPERATING_POINTS; ++i) {
Debargha Mukherjee57498692018-05-11 13:29:31 -07001078 seq->level[i] = bl;
Andrey Norkinf481d982018-05-15 12:05:31 -07001079 seq->tier[i] = 0; // setting main tier by default
Andrey Norkin26495512018-06-20 17:13:11 -07001080 // Set the maximum parameters for bitrate and buffer size for this profile,
1081 // level, and tier
1082 cm->op_params[i].bitrate = max_level_bitrate(
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001083 cm->seq_params.profile, major_minor_to_seq_level_idx(seq->level[i]),
1084 seq->tier[i]);
Andrey Norkinc7511de2018-06-22 12:31:06 -07001085 // Level with seq_level_idx = 31 returns a high "dummy" bitrate to pass the
1086 // check
Andrey Norkin26495512018-06-20 17:13:11 -07001087 if (cm->op_params[i].bitrate == 0)
1088 aom_internal_error(
1089 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1090 "AV1 does not support this combination of profile, level, and tier.");
Andrey Norkinc7511de2018-06-22 12:31:06 -07001091 // Buffer size in bits/s is bitrate in bits/s * 1 s
Andrey Norkin26495512018-06-20 17:13:11 -07001092 cm->op_params[i].buffer_size = cm->op_params[i].bitrate;
Debargha Mukherjeeea675402018-05-10 16:10:41 -07001093 }
1094}
1095
Andrey Norkin26495512018-06-20 17:13:11 -07001096static void init_seq_coding_tools(SequenceHeader *seq, AV1_COMMON *cm,
1097 const AV1EncoderConfig *oxcf) {
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001098 seq->still_picture = (oxcf->limit == 1);
1099 seq->reduced_still_picture_hdr = seq->still_picture;
Debargha Mukherjee9713ccb2018-04-08 19:09:17 -07001100 seq->reduced_still_picture_hdr &= !oxcf->full_still_picture_hdr;
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001101 seq->force_screen_content_tools = 2;
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001102 seq->force_integer_mv = 2;
David Turnerebf96f42018-11-14 16:57:57 +00001103 seq->order_hint_info.enable_order_hint = oxcf->enable_order_hint;
David Turner936235c2018-11-28 13:42:01 +00001104 seq->frame_id_numbers_present_flag =
1105 !(seq->still_picture && seq->reduced_still_picture_hdr) &&
1106 !oxcf->large_scale_tile && oxcf->error_resilient_mode;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001107 if (seq->still_picture && seq->reduced_still_picture_hdr) {
David Turnerebf96f42018-11-14 16:57:57 +00001108 seq->order_hint_info.enable_order_hint = 0;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001109 seq->force_screen_content_tools = 2;
1110 seq->force_integer_mv = 2;
1111 }
David Turnerebf96f42018-11-14 16:57:57 +00001112 seq->order_hint_info.order_hint_bits_minus_1 =
1113 seq->order_hint_info.enable_order_hint
1114 ? DEFAULT_EXPLICIT_ORDER_HINT_BITS - 1
1115 : -1;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001116
David Turner760a2f42018-12-07 15:25:36 +00001117 seq->max_frame_width =
1118 oxcf->forced_max_frame_width ? oxcf->forced_max_frame_width : oxcf->width;
1119 seq->max_frame_height = oxcf->forced_max_frame_height
1120 ? oxcf->forced_max_frame_height
1121 : oxcf->height;
1122 seq->num_bits_width =
1123 (seq->max_frame_width > 1) ? get_msb(seq->max_frame_width - 1) + 1 : 1;
1124 seq->num_bits_height =
1125 (seq->max_frame_height > 1) ? get_msb(seq->max_frame_height - 1) + 1 : 1;
1126 assert(seq->num_bits_width <= 16);
1127 assert(seq->num_bits_height <= 16);
1128
1129 seq->frame_id_length = FRAME_ID_LENGTH;
1130 seq->delta_frame_id_length = DELTA_FRAME_ID_LENGTH;
1131
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07001132 seq->enable_dual_filter = oxcf->enable_dual_filter;
Debargha Mukherjee7ac3eb12018-12-12 10:26:50 -08001133 seq->order_hint_info.enable_dist_wtd_comp = oxcf->enable_dist_wtd_comp;
1134 seq->order_hint_info.enable_dist_wtd_comp &=
David Turnerebf96f42018-11-14 16:57:57 +00001135 seq->order_hint_info.enable_order_hint;
1136 seq->order_hint_info.enable_ref_frame_mvs = oxcf->enable_ref_frame_mvs;
1137 seq->order_hint_info.enable_ref_frame_mvs &=
1138 seq->order_hint_info.enable_order_hint;
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001139 seq->enable_superres = oxcf->enable_superres;
1140 seq->enable_cdef = oxcf->enable_cdef;
1141 seq->enable_restoration = oxcf->enable_restoration;
Debargha Mukherjee37df9162018-03-25 12:48:24 -07001142 seq->enable_warped_motion = oxcf->enable_warped_motion;
Debargha Mukherjee16ea6ba2018-12-10 12:01:38 -08001143 seq->enable_interintra_compound = oxcf->enable_interintra_comp;
1144 seq->enable_masked_compound = oxcf->enable_masked_comp;
Debargha Mukherjee03c43ba2018-12-14 13:08:08 -08001145 seq->enable_intra_edge_filter = oxcf->enable_intra_edge_filter;
Yue Chen8f9ca582018-12-12 15:11:47 -08001146 seq->enable_filter_intra = oxcf->enable_filter_intra;
Debargha Mukherjee57498692018-05-11 13:29:31 -07001147
Andrey Norkin26495512018-06-20 17:13:11 -07001148 set_bitstream_level_tier(seq, cm, oxcf);
Adrian Grangec56f6ec2018-05-31 14:19:32 -07001149
1150 if (seq->operating_points_cnt_minus_1 == 0) {
1151 seq->operating_point_idc[0] = 0;
1152 } else {
1153 // Set operating_point_idc[] such that for the i-th operating point the
1154 // first (operating_points_cnt-i) spatial layers and the first temporal
1155 // layer are decoded Note that highest quality operating point should come
1156 // first
1157 for (int i = 0; i < seq->operating_points_cnt_minus_1 + 1; i++)
1158 seq->operating_point_idc[i] =
1159 (~(~0u << (seq->operating_points_cnt_minus_1 + 1 - i)) << 8) | 1;
1160 }
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07001161}
1162
Yaowu Xuf883b422016-08-30 14:01:10 -07001163static void init_config(struct AV1_COMP *cpi, AV1EncoderConfig *oxcf) {
1164 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001165
1166 cpi->oxcf = *oxcf;
1167 cpi->framerate = oxcf->init_framerate;
1168
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001169 cm->seq_params.profile = oxcf->profile;
1170 cm->seq_params.bit_depth = oxcf->bit_depth;
1171 cm->seq_params.use_highbitdepth = oxcf->use_highbitdepth;
1172 cm->seq_params.color_primaries = oxcf->color_primaries;
1173 cm->seq_params.transfer_characteristics = oxcf->transfer_characteristics;
1174 cm->seq_params.matrix_coefficients = oxcf->matrix_coefficients;
Debargha Mukherjeef340fec2018-01-10 18:12:22 -08001175 cm->seq_params.monochrome = oxcf->monochrome;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001176 cm->seq_params.chroma_sample_position = oxcf->chroma_sample_position;
1177 cm->seq_params.color_range = oxcf->color_range;
Andrey Norkin28e9ce22018-01-08 10:11:21 -08001178 cm->timing_info_present = oxcf->timing_info_present;
Andrey Norkin795ba872018-03-06 13:24:14 -08001179 cm->timing_info.num_units_in_display_tick =
1180 oxcf->timing_info.num_units_in_display_tick;
1181 cm->timing_info.time_scale = oxcf->timing_info.time_scale;
1182 cm->timing_info.equal_picture_interval =
1183 oxcf->timing_info.equal_picture_interval;
1184 cm->timing_info.num_ticks_per_picture =
1185 oxcf->timing_info.num_ticks_per_picture;
1186
Andrey Norkin26495512018-06-20 17:13:11 -07001187 cm->seq_params.display_model_info_present_flag =
1188 oxcf->display_model_info_present_flag;
Adrian Grangec56f6ec2018-05-31 14:19:32 -07001189 cm->seq_params.decoder_model_info_present_flag =
1190 oxcf->decoder_model_info_present_flag;
Andrey Norkin795ba872018-03-06 13:24:14 -08001191 if (oxcf->decoder_model_info_present_flag) {
Andrey Norkin26495512018-06-20 17:13:11 -07001192 // set the decoder model parameters in schedule mode
Andrey Norkin795ba872018-03-06 13:24:14 -08001193 cm->buffer_model.num_units_in_decoding_tick =
1194 oxcf->buffer_model.num_units_in_decoding_tick;
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07001195 cm->buffer_removal_time_present = 1;
Andrey Norkin795ba872018-03-06 13:24:14 -08001196 set_aom_dec_model_info(&cm->buffer_model);
Andrey Norkin26495512018-06-20 17:13:11 -07001197 set_dec_model_op_parameters(&cm->op_params[0]);
1198 } else if (cm->timing_info_present &&
1199 cm->timing_info.equal_picture_interval &&
1200 !cm->seq_params.decoder_model_info_present_flag) {
1201 // set the decoder model parameters in resource availability mode
1202 set_resource_availability_parameters(&cm->op_params[0]);
Andrey Norkinc7511de2018-06-22 12:31:06 -07001203 } else {
1204 cm->op_params[0].initial_display_delay =
1205 10; // Default value (not signaled)
Andrey Norkin795ba872018-03-06 13:24:14 -08001206 }
Andrey Norkinc7511de2018-06-22 12:31:06 -07001207
Tom Fineganf8d6a162018-08-21 10:47:55 -07001208 if (cm->seq_params.monochrome) {
1209 cm->seq_params.subsampling_x = 1;
1210 cm->seq_params.subsampling_y = 1;
1211 } else if (cm->seq_params.color_primaries == AOM_CICP_CP_BT_709 &&
1212 cm->seq_params.transfer_characteristics == AOM_CICP_TC_SRGB &&
1213 cm->seq_params.matrix_coefficients == AOM_CICP_MC_IDENTITY) {
1214 cm->seq_params.subsampling_x = 0;
1215 cm->seq_params.subsampling_y = 0;
1216 } else {
1217 if (cm->seq_params.profile == 0) {
1218 cm->seq_params.subsampling_x = 1;
1219 cm->seq_params.subsampling_y = 1;
1220 } else if (cm->seq_params.profile == 1) {
1221 cm->seq_params.subsampling_x = 0;
1222 cm->seq_params.subsampling_y = 0;
1223 } else {
1224 if (cm->seq_params.bit_depth == AOM_BITS_12) {
1225 cm->seq_params.subsampling_x = oxcf->chroma_subsampling_x;
1226 cm->seq_params.subsampling_y = oxcf->chroma_subsampling_y;
1227 } else {
1228 cm->seq_params.subsampling_x = 1;
1229 cm->seq_params.subsampling_y = 0;
1230 }
1231 }
Tom Finegan02b2a842018-08-24 13:50:00 -07001232 }
1233
Yaowu Xuc27fc142016-08-22 16:08:15 -07001234 cm->width = oxcf->width;
1235 cm->height = oxcf->height;
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001236 set_sb_size(&cm->seq_params,
1237 select_sb_size(cpi)); // set sb size before allocations
Cheng Chen46f30c72017-09-07 11:13:33 -07001238 alloc_compressor_data(cpi);
Yaowu Xuc7119a72018-03-29 09:59:37 -07001239
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08001240 update_film_grain_parameters(cpi, oxcf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001241
1242 // Single thread case: use counts in common.
Yue Chencc6a6ef2018-05-21 16:21:05 -07001243 cpi->td.counts = &cpi->counts;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001244
1245 // change includes all joint functionality
Yaowu Xuf883b422016-08-30 14:01:10 -07001246 av1_change_config(cpi, oxcf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001247
1248 cpi->static_mb_pct = 0;
1249 cpi->ref_frame_flags = 0;
1250
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07001251 // Reset resize pending flags
1252 cpi->resize_pending_width = 0;
1253 cpi->resize_pending_height = 0;
1254
Yaowu Xuc27fc142016-08-22 16:08:15 -07001255 init_buffer_indices(cpi);
1256}
1257
1258static void set_rc_buffer_sizes(RATE_CONTROL *rc,
Yaowu Xuf883b422016-08-30 14:01:10 -07001259 const AV1EncoderConfig *oxcf) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001260 const int64_t bandwidth = oxcf->target_bandwidth;
1261 const int64_t starting = oxcf->starting_buffer_level_ms;
1262 const int64_t optimal = oxcf->optimal_buffer_level_ms;
1263 const int64_t maximum = oxcf->maximum_buffer_size_ms;
1264
1265 rc->starting_buffer_level = starting * bandwidth / 1000;
1266 rc->optimal_buffer_level =
1267 (optimal == 0) ? bandwidth / 8 : optimal * bandwidth / 1000;
1268 rc->maximum_buffer_size =
1269 (maximum == 0) ? bandwidth / 8 : maximum * bandwidth / 1000;
1270}
1271
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001272#define HIGHBD_BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX4DF, JSDAF, JSVAF) \
1273 cpi->fn_ptr[BT].sdf = SDF; \
1274 cpi->fn_ptr[BT].sdaf = SDAF; \
1275 cpi->fn_ptr[BT].vf = VF; \
1276 cpi->fn_ptr[BT].svf = SVF; \
1277 cpi->fn_ptr[BT].svaf = SVAF; \
1278 cpi->fn_ptr[BT].sdx4df = SDX4DF; \
1279 cpi->fn_ptr[BT].jsdaf = JSDAF; \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001280 cpi->fn_ptr[BT].jsvaf = JSVAF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001281
1282#define MAKE_BFP_SAD_WRAPPER(fnname) \
1283 static unsigned int fnname##_bits8(const uint8_t *src_ptr, \
1284 int source_stride, \
1285 const uint8_t *ref_ptr, int ref_stride) { \
1286 return fnname(src_ptr, source_stride, ref_ptr, ref_stride); \
1287 } \
1288 static unsigned int fnname##_bits10( \
1289 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1290 int ref_stride) { \
1291 return fnname(src_ptr, source_stride, ref_ptr, ref_stride) >> 2; \
1292 } \
1293 static unsigned int fnname##_bits12( \
1294 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1295 int ref_stride) { \
1296 return fnname(src_ptr, source_stride, ref_ptr, ref_stride) >> 4; \
1297 }
1298
1299#define MAKE_BFP_SADAVG_WRAPPER(fnname) \
1300 static unsigned int fnname##_bits8( \
1301 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1302 int ref_stride, const uint8_t *second_pred) { \
1303 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred); \
1304 } \
1305 static unsigned int fnname##_bits10( \
1306 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1307 int ref_stride, const uint8_t *second_pred) { \
1308 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred) >> \
1309 2; \
1310 } \
1311 static unsigned int fnname##_bits12( \
1312 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1313 int ref_stride, const uint8_t *second_pred) { \
1314 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred) >> \
1315 4; \
1316 }
1317
Yaowu Xuc27fc142016-08-22 16:08:15 -07001318#define MAKE_BFP_SAD4D_WRAPPER(fnname) \
1319 static void fnname##_bits8(const uint8_t *src_ptr, int source_stride, \
1320 const uint8_t *const ref_ptr[], int ref_stride, \
1321 unsigned int *sad_array) { \
1322 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1323 } \
1324 static void fnname##_bits10(const uint8_t *src_ptr, int source_stride, \
1325 const uint8_t *const ref_ptr[], int ref_stride, \
1326 unsigned int *sad_array) { \
1327 int i; \
1328 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1329 for (i = 0; i < 4; i++) sad_array[i] >>= 2; \
1330 } \
1331 static void fnname##_bits12(const uint8_t *src_ptr, int source_stride, \
1332 const uint8_t *const ref_ptr[], int ref_stride, \
1333 unsigned int *sad_array) { \
1334 int i; \
1335 fnname(src_ptr, source_stride, ref_ptr, ref_stride, sad_array); \
1336 for (i = 0; i < 4; i++) sad_array[i] >>= 4; \
1337 }
1338
Cheng Chenbf3d4962017-11-01 14:48:52 -07001339#define MAKE_BFP_JSADAVG_WRAPPER(fnname) \
1340 static unsigned int fnname##_bits8( \
1341 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1342 int ref_stride, const uint8_t *second_pred, \
Debargha Mukherjeef90004a2018-12-20 13:35:06 -08001343 const DIST_WTD_COMP_PARAMS *jcp_param) { \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001344 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred, \
1345 jcp_param); \
1346 } \
1347 static unsigned int fnname##_bits10( \
1348 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1349 int ref_stride, const uint8_t *second_pred, \
Debargha Mukherjeef90004a2018-12-20 13:35:06 -08001350 const DIST_WTD_COMP_PARAMS *jcp_param) { \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001351 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred, \
1352 jcp_param) >> \
1353 2; \
1354 } \
1355 static unsigned int fnname##_bits12( \
1356 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1357 int ref_stride, const uint8_t *second_pred, \
Debargha Mukherjeef90004a2018-12-20 13:35:06 -08001358 const DIST_WTD_COMP_PARAMS *jcp_param) { \
Cheng Chenbf3d4962017-11-01 14:48:52 -07001359 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, second_pred, \
1360 jcp_param) >> \
1361 4; \
1362 }
Cheng Chenbf3d4962017-11-01 14:48:52 -07001363
Yaowu Xuf883b422016-08-30 14:01:10 -07001364MAKE_BFP_SAD_WRAPPER(aom_highbd_sad128x128)
1365MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad128x128_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001366MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad128x128x4d)
1367MAKE_BFP_SAD_WRAPPER(aom_highbd_sad128x64)
1368MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad128x64_avg)
1369MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad128x64x4d)
1370MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x128)
1371MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x128_avg)
1372MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x128x4d)
Yaowu Xuf883b422016-08-30 14:01:10 -07001373MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x16)
1374MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x16_avg)
1375MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x16x4d)
1376MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x32)
1377MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x32_avg)
1378MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x32x4d)
1379MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x32)
1380MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x32_avg)
1381MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x32x4d)
1382MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x64)
1383MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x64_avg)
1384MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x64x4d)
1385MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x32)
1386MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x32_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001387MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x32x4d)
1388MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x64)
1389MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x64_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001390MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x64x4d)
1391MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x16)
1392MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x16_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001393MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x16x4d)
1394MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x8)
1395MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x8_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001396MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x8x4d)
1397MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x16)
1398MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x16_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001399MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x16x4d)
1400MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x8)
1401MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x8_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001402MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x8x4d)
1403MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x4)
1404MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x4_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001405MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x4x4d)
1406MAKE_BFP_SAD_WRAPPER(aom_highbd_sad4x8)
1407MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad4x8_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001408MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad4x8x4d)
1409MAKE_BFP_SAD_WRAPPER(aom_highbd_sad4x4)
1410MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad4x4_avg)
Yaowu Xuf883b422016-08-30 14:01:10 -07001411MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad4x4x4d)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001412
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001413MAKE_BFP_SAD_WRAPPER(aom_highbd_sad4x16)
1414MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad4x16_avg)
1415MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad4x16x4d)
1416MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x4)
1417MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x4_avg)
1418MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x4x4d)
1419MAKE_BFP_SAD_WRAPPER(aom_highbd_sad8x32)
1420MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad8x32_avg)
1421MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad8x32x4d)
1422MAKE_BFP_SAD_WRAPPER(aom_highbd_sad32x8)
1423MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad32x8_avg)
1424MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad32x8x4d)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001425MAKE_BFP_SAD_WRAPPER(aom_highbd_sad16x64)
1426MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad16x64_avg)
1427MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad16x64x4d)
1428MAKE_BFP_SAD_WRAPPER(aom_highbd_sad64x16)
1429MAKE_BFP_SADAVG_WRAPPER(aom_highbd_sad64x16_avg)
1430MAKE_BFP_SAD4D_WRAPPER(aom_highbd_sad64x16x4d)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001431
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001432MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad128x128_avg)
1433MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad128x64_avg)
1434MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x128_avg)
1435MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x16_avg)
1436MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x32_avg)
1437MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x32_avg)
1438MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x64_avg)
1439MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x32_avg)
1440MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x64_avg)
1441MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x16_avg)
1442MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x8_avg)
1443MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x16_avg)
1444MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x8_avg)
1445MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x4_avg)
1446MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad4x8_avg)
1447MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad4x4_avg)
1448MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad4x16_avg)
1449MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x4_avg)
1450MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad8x32_avg)
1451MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad32x8_avg)
1452MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad16x64_avg)
1453MAKE_BFP_JSADAVG_WRAPPER(aom_highbd_dist_wtd_sad64x16_avg)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001454
David Barker0f3c94e2017-05-16 15:21:50 +01001455#define HIGHBD_MBFP(BT, MCSDF, MCSVF) \
David Barkerf19f35f2017-05-22 16:33:22 +01001456 cpi->fn_ptr[BT].msdf = MCSDF; \
1457 cpi->fn_ptr[BT].msvf = MCSVF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001458
David Barkerc155e012017-05-11 13:54:54 +01001459#define MAKE_MBFP_COMPOUND_SAD_WRAPPER(fnname) \
1460 static unsigned int fnname##_bits8( \
1461 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1462 int ref_stride, const uint8_t *second_pred_ptr, const uint8_t *m, \
1463 int m_stride, int invert_mask) { \
1464 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
1465 second_pred_ptr, m, m_stride, invert_mask); \
1466 } \
1467 static unsigned int fnname##_bits10( \
1468 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1469 int ref_stride, const uint8_t *second_pred_ptr, const uint8_t *m, \
1470 int m_stride, int invert_mask) { \
1471 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
1472 second_pred_ptr, m, m_stride, invert_mask) >> \
1473 2; \
1474 } \
1475 static unsigned int fnname##_bits12( \
1476 const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, \
1477 int ref_stride, const uint8_t *second_pred_ptr, const uint8_t *m, \
1478 int m_stride, int invert_mask) { \
1479 return fnname(src_ptr, source_stride, ref_ptr, ref_stride, \
1480 second_pred_ptr, m, m_stride, invert_mask) >> \
1481 4; \
1482 }
1483
David Barkerf19f35f2017-05-22 16:33:22 +01001484MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad128x128)
1485MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad128x64)
1486MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01001487MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x64)
1488MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x32)
1489MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x64)
1490MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x32)
1491MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x16)
1492MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x32)
1493MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x16)
1494MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x8)
1495MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x16)
1496MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x8)
1497MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x4)
1498MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad4x8)
1499MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001500MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad4x16)
1501MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x4)
1502MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad8x32)
1503MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001504MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad16x64)
1505MAKE_MBFP_COMPOUND_SAD_WRAPPER(aom_highbd_masked_sad64x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001506
Yaowu Xuc27fc142016-08-22 16:08:15 -07001507#define HIGHBD_OBFP(BT, OSDF, OVF, OSVF) \
1508 cpi->fn_ptr[BT].osdf = OSDF; \
1509 cpi->fn_ptr[BT].ovf = OVF; \
1510 cpi->fn_ptr[BT].osvf = OSVF;
1511
1512#define MAKE_OBFP_SAD_WRAPPER(fnname) \
1513 static unsigned int fnname##_bits8(const uint8_t *ref, int ref_stride, \
1514 const int32_t *wsrc, \
1515 const int32_t *msk) { \
1516 return fnname(ref, ref_stride, wsrc, msk); \
1517 } \
1518 static unsigned int fnname##_bits10(const uint8_t *ref, int ref_stride, \
1519 const int32_t *wsrc, \
1520 const int32_t *msk) { \
1521 return fnname(ref, ref_stride, wsrc, msk) >> 2; \
1522 } \
1523 static unsigned int fnname##_bits12(const uint8_t *ref, int ref_stride, \
1524 const int32_t *wsrc, \
1525 const int32_t *msk) { \
1526 return fnname(ref, ref_stride, wsrc, msk) >> 4; \
1527 }
1528
Yaowu Xuf883b422016-08-30 14:01:10 -07001529MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad128x128)
1530MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad128x64)
1531MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07001532MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x64)
1533MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x32)
1534MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x64)
1535MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x32)
1536MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x16)
1537MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x32)
1538MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x16)
1539MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x8)
1540MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x16)
1541MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x8)
1542MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x4)
1543MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad4x8)
1544MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001545MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad4x16)
1546MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x4)
1547MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad8x32)
1548MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001549MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad16x64)
1550MAKE_OBFP_SAD_WRAPPER(aom_highbd_obmc_sad64x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001551
Yaowu Xuf883b422016-08-30 14:01:10 -07001552static void highbd_set_var_fns(AV1_COMP *const cpi) {
1553 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001554 if (cm->seq_params.use_highbitdepth) {
1555 switch (cm->seq_params.bit_depth) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001556 case AOM_BITS_8:
Cheng Chenbf3d4962017-11-01 14:48:52 -07001557 HIGHBD_BFP(BLOCK_64X16, aom_highbd_sad64x16_bits8,
1558 aom_highbd_sad64x16_avg_bits8, aom_highbd_8_variance64x16,
1559 aom_highbd_8_sub_pixel_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001560 aom_highbd_8_sub_pixel_avg_variance64x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001561 aom_highbd_sad64x16x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001562 aom_highbd_dist_wtd_sad64x16_avg_bits8,
1563 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001564
1565 HIGHBD_BFP(BLOCK_16X64, aom_highbd_sad16x64_bits8,
1566 aom_highbd_sad16x64_avg_bits8, aom_highbd_8_variance16x64,
1567 aom_highbd_8_sub_pixel_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001568 aom_highbd_8_sub_pixel_avg_variance16x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001569 aom_highbd_sad16x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001570 aom_highbd_dist_wtd_sad16x64_avg_bits8,
1571 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001572
1573 HIGHBD_BFP(
1574 BLOCK_32X8, aom_highbd_sad32x8_bits8, aom_highbd_sad32x8_avg_bits8,
1575 aom_highbd_8_variance32x8, aom_highbd_8_sub_pixel_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001576 aom_highbd_8_sub_pixel_avg_variance32x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001577 aom_highbd_sad32x8x4d_bits8, aom_highbd_dist_wtd_sad32x8_avg_bits8,
1578 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x8)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001579
1580 HIGHBD_BFP(
1581 BLOCK_8X32, aom_highbd_sad8x32_bits8, aom_highbd_sad8x32_avg_bits8,
1582 aom_highbd_8_variance8x32, aom_highbd_8_sub_pixel_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001583 aom_highbd_8_sub_pixel_avg_variance8x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001584 aom_highbd_sad8x32x4d_bits8, aom_highbd_dist_wtd_sad8x32_avg_bits8,
1585 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001586
1587 HIGHBD_BFP(
1588 BLOCK_16X4, aom_highbd_sad16x4_bits8, aom_highbd_sad16x4_avg_bits8,
1589 aom_highbd_8_variance16x4, aom_highbd_8_sub_pixel_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001590 aom_highbd_8_sub_pixel_avg_variance16x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001591 aom_highbd_sad16x4x4d_bits8, aom_highbd_dist_wtd_sad16x4_avg_bits8,
1592 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x4)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001593
1594 HIGHBD_BFP(
1595 BLOCK_4X16, aom_highbd_sad4x16_bits8, aom_highbd_sad4x16_avg_bits8,
1596 aom_highbd_8_variance4x16, aom_highbd_8_sub_pixel_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001597 aom_highbd_8_sub_pixel_avg_variance4x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001598 aom_highbd_sad4x16x4d_bits8, aom_highbd_dist_wtd_sad4x16_avg_bits8,
1599 aom_highbd_8_dist_wtd_sub_pixel_avg_variance4x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001600
1601 HIGHBD_BFP(BLOCK_32X16, aom_highbd_sad32x16_bits8,
1602 aom_highbd_sad32x16_avg_bits8, aom_highbd_8_variance32x16,
1603 aom_highbd_8_sub_pixel_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001604 aom_highbd_8_sub_pixel_avg_variance32x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001605 aom_highbd_sad32x16x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001606 aom_highbd_dist_wtd_sad32x16_avg_bits8,
1607 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001608
1609 HIGHBD_BFP(BLOCK_16X32, aom_highbd_sad16x32_bits8,
1610 aom_highbd_sad16x32_avg_bits8, aom_highbd_8_variance16x32,
1611 aom_highbd_8_sub_pixel_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001612 aom_highbd_8_sub_pixel_avg_variance16x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001613 aom_highbd_sad16x32x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001614 aom_highbd_dist_wtd_sad16x32_avg_bits8,
1615 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001616
1617 HIGHBD_BFP(BLOCK_64X32, aom_highbd_sad64x32_bits8,
1618 aom_highbd_sad64x32_avg_bits8, aom_highbd_8_variance64x32,
1619 aom_highbd_8_sub_pixel_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001620 aom_highbd_8_sub_pixel_avg_variance64x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001621 aom_highbd_sad64x32x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001622 aom_highbd_dist_wtd_sad64x32_avg_bits8,
1623 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001624
1625 HIGHBD_BFP(BLOCK_32X64, aom_highbd_sad32x64_bits8,
1626 aom_highbd_sad32x64_avg_bits8, aom_highbd_8_variance32x64,
1627 aom_highbd_8_sub_pixel_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001628 aom_highbd_8_sub_pixel_avg_variance32x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001629 aom_highbd_sad32x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001630 aom_highbd_dist_wtd_sad32x64_avg_bits8,
1631 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001632
1633 HIGHBD_BFP(BLOCK_32X32, aom_highbd_sad32x32_bits8,
1634 aom_highbd_sad32x32_avg_bits8, aom_highbd_8_variance32x32,
1635 aom_highbd_8_sub_pixel_variance32x32,
1636 aom_highbd_8_sub_pixel_avg_variance32x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001637 aom_highbd_sad32x32x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001638 aom_highbd_dist_wtd_sad32x32_avg_bits8,
1639 aom_highbd_8_dist_wtd_sub_pixel_avg_variance32x32)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001640
1641 HIGHBD_BFP(BLOCK_64X64, aom_highbd_sad64x64_bits8,
1642 aom_highbd_sad64x64_avg_bits8, aom_highbd_8_variance64x64,
1643 aom_highbd_8_sub_pixel_variance64x64,
1644 aom_highbd_8_sub_pixel_avg_variance64x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001645 aom_highbd_sad64x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001646 aom_highbd_dist_wtd_sad64x64_avg_bits8,
1647 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001648
1649 HIGHBD_BFP(BLOCK_16X16, aom_highbd_sad16x16_bits8,
1650 aom_highbd_sad16x16_avg_bits8, aom_highbd_8_variance16x16,
1651 aom_highbd_8_sub_pixel_variance16x16,
1652 aom_highbd_8_sub_pixel_avg_variance16x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001653 aom_highbd_sad16x16x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001654 aom_highbd_dist_wtd_sad16x16_avg_bits8,
1655 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001656
1657 HIGHBD_BFP(
1658 BLOCK_16X8, aom_highbd_sad16x8_bits8, aom_highbd_sad16x8_avg_bits8,
1659 aom_highbd_8_variance16x8, aom_highbd_8_sub_pixel_variance16x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001660 aom_highbd_8_sub_pixel_avg_variance16x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001661 aom_highbd_sad16x8x4d_bits8, aom_highbd_dist_wtd_sad16x8_avg_bits8,
1662 aom_highbd_8_dist_wtd_sub_pixel_avg_variance16x8)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001663
1664 HIGHBD_BFP(
1665 BLOCK_8X16, aom_highbd_sad8x16_bits8, aom_highbd_sad8x16_avg_bits8,
1666 aom_highbd_8_variance8x16, aom_highbd_8_sub_pixel_variance8x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001667 aom_highbd_8_sub_pixel_avg_variance8x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001668 aom_highbd_sad8x16x4d_bits8, aom_highbd_dist_wtd_sad8x16_avg_bits8,
1669 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x16)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001670
Cheng Chenbf3d4962017-11-01 14:48:52 -07001671 HIGHBD_BFP(
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001672 BLOCK_8X8, aom_highbd_sad8x8_bits8, aom_highbd_sad8x8_avg_bits8,
1673 aom_highbd_8_variance8x8, aom_highbd_8_sub_pixel_variance8x8,
1674 aom_highbd_8_sub_pixel_avg_variance8x8, aom_highbd_sad8x8x4d_bits8,
1675 aom_highbd_dist_wtd_sad8x8_avg_bits8,
1676 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x8)
1677
1678 HIGHBD_BFP(
1679 BLOCK_8X4, aom_highbd_sad8x4_bits8, aom_highbd_sad8x4_avg_bits8,
1680 aom_highbd_8_variance8x4, aom_highbd_8_sub_pixel_variance8x4,
1681 aom_highbd_8_sub_pixel_avg_variance8x4, aom_highbd_sad8x4x4d_bits8,
1682 aom_highbd_dist_wtd_sad8x4_avg_bits8,
1683 aom_highbd_8_dist_wtd_sub_pixel_avg_variance8x4)
1684
1685 HIGHBD_BFP(
1686 BLOCK_4X8, aom_highbd_sad4x8_bits8, aom_highbd_sad4x8_avg_bits8,
1687 aom_highbd_8_variance4x8, aom_highbd_8_sub_pixel_variance4x8,
1688 aom_highbd_8_sub_pixel_avg_variance4x8, aom_highbd_sad4x8x4d_bits8,
1689 aom_highbd_dist_wtd_sad4x8_avg_bits8,
1690 aom_highbd_8_dist_wtd_sub_pixel_avg_variance4x8)
1691
1692 HIGHBD_BFP(
1693 BLOCK_4X4, aom_highbd_sad4x4_bits8, aom_highbd_sad4x4_avg_bits8,
1694 aom_highbd_8_variance4x4, aom_highbd_8_sub_pixel_variance4x4,
1695 aom_highbd_8_sub_pixel_avg_variance4x4, aom_highbd_sad4x4x4d_bits8,
1696 aom_highbd_dist_wtd_sad4x4_avg_bits8,
1697 aom_highbd_8_dist_wtd_sub_pixel_avg_variance4x4)
1698
1699 HIGHBD_BFP(BLOCK_128X128, aom_highbd_sad128x128_bits8,
1700 aom_highbd_sad128x128_avg_bits8,
1701 aom_highbd_8_variance128x128,
1702 aom_highbd_8_sub_pixel_variance128x128,
1703 aom_highbd_8_sub_pixel_avg_variance128x128,
1704 aom_highbd_sad128x128x4d_bits8,
1705 aom_highbd_dist_wtd_sad128x128_avg_bits8,
1706 aom_highbd_8_dist_wtd_sub_pixel_avg_variance128x128)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001707
1708 HIGHBD_BFP(BLOCK_128X64, aom_highbd_sad128x64_bits8,
1709 aom_highbd_sad128x64_avg_bits8, aom_highbd_8_variance128x64,
1710 aom_highbd_8_sub_pixel_variance128x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001711 aom_highbd_8_sub_pixel_avg_variance128x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001712 aom_highbd_sad128x64x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001713 aom_highbd_dist_wtd_sad128x64_avg_bits8,
1714 aom_highbd_8_dist_wtd_sub_pixel_avg_variance128x64)
Cheng Chenbf3d4962017-11-01 14:48:52 -07001715
1716 HIGHBD_BFP(BLOCK_64X128, aom_highbd_sad64x128_bits8,
1717 aom_highbd_sad64x128_avg_bits8, aom_highbd_8_variance64x128,
1718 aom_highbd_8_sub_pixel_variance64x128,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001719 aom_highbd_8_sub_pixel_avg_variance64x128,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001720 aom_highbd_sad64x128x4d_bits8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001721 aom_highbd_dist_wtd_sad64x128_avg_bits8,
1722 aom_highbd_8_dist_wtd_sub_pixel_avg_variance64x128)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001723
David Barkerf19f35f2017-05-22 16:33:22 +01001724 HIGHBD_MBFP(BLOCK_128X128, aom_highbd_masked_sad128x128_bits8,
1725 aom_highbd_8_masked_sub_pixel_variance128x128)
1726 HIGHBD_MBFP(BLOCK_128X64, aom_highbd_masked_sad128x64_bits8,
1727 aom_highbd_8_masked_sub_pixel_variance128x64)
1728 HIGHBD_MBFP(BLOCK_64X128, aom_highbd_masked_sad64x128_bits8,
1729 aom_highbd_8_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01001730 HIGHBD_MBFP(BLOCK_64X64, aom_highbd_masked_sad64x64_bits8,
1731 aom_highbd_8_masked_sub_pixel_variance64x64)
1732 HIGHBD_MBFP(BLOCK_64X32, aom_highbd_masked_sad64x32_bits8,
1733 aom_highbd_8_masked_sub_pixel_variance64x32)
1734 HIGHBD_MBFP(BLOCK_32X64, aom_highbd_masked_sad32x64_bits8,
1735 aom_highbd_8_masked_sub_pixel_variance32x64)
1736 HIGHBD_MBFP(BLOCK_32X32, aom_highbd_masked_sad32x32_bits8,
1737 aom_highbd_8_masked_sub_pixel_variance32x32)
1738 HIGHBD_MBFP(BLOCK_32X16, aom_highbd_masked_sad32x16_bits8,
1739 aom_highbd_8_masked_sub_pixel_variance32x16)
1740 HIGHBD_MBFP(BLOCK_16X32, aom_highbd_masked_sad16x32_bits8,
1741 aom_highbd_8_masked_sub_pixel_variance16x32)
1742 HIGHBD_MBFP(BLOCK_16X16, aom_highbd_masked_sad16x16_bits8,
1743 aom_highbd_8_masked_sub_pixel_variance16x16)
1744 HIGHBD_MBFP(BLOCK_8X16, aom_highbd_masked_sad8x16_bits8,
1745 aom_highbd_8_masked_sub_pixel_variance8x16)
1746 HIGHBD_MBFP(BLOCK_16X8, aom_highbd_masked_sad16x8_bits8,
1747 aom_highbd_8_masked_sub_pixel_variance16x8)
1748 HIGHBD_MBFP(BLOCK_8X8, aom_highbd_masked_sad8x8_bits8,
1749 aom_highbd_8_masked_sub_pixel_variance8x8)
1750 HIGHBD_MBFP(BLOCK_4X8, aom_highbd_masked_sad4x8_bits8,
1751 aom_highbd_8_masked_sub_pixel_variance4x8)
1752 HIGHBD_MBFP(BLOCK_8X4, aom_highbd_masked_sad8x4_bits8,
1753 aom_highbd_8_masked_sub_pixel_variance8x4)
1754 HIGHBD_MBFP(BLOCK_4X4, aom_highbd_masked_sad4x4_bits8,
1755 aom_highbd_8_masked_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001756 HIGHBD_MBFP(BLOCK_64X16, aom_highbd_masked_sad64x16_bits8,
1757 aom_highbd_8_masked_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001758 HIGHBD_MBFP(BLOCK_16X64, aom_highbd_masked_sad16x64_bits8,
1759 aom_highbd_8_masked_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001760 HIGHBD_MBFP(BLOCK_32X8, aom_highbd_masked_sad32x8_bits8,
1761 aom_highbd_8_masked_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001762 HIGHBD_MBFP(BLOCK_8X32, aom_highbd_masked_sad8x32_bits8,
1763 aom_highbd_8_masked_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001764 HIGHBD_MBFP(BLOCK_16X4, aom_highbd_masked_sad16x4_bits8,
1765 aom_highbd_8_masked_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001766 HIGHBD_MBFP(BLOCK_4X16, aom_highbd_masked_sad4x16_bits8,
1767 aom_highbd_8_masked_sub_pixel_variance4x16)
Yaowu Xuf883b422016-08-30 14:01:10 -07001768 HIGHBD_OBFP(BLOCK_128X128, aom_highbd_obmc_sad128x128_bits8,
1769 aom_highbd_obmc_variance128x128,
1770 aom_highbd_obmc_sub_pixel_variance128x128)
1771 HIGHBD_OBFP(BLOCK_128X64, aom_highbd_obmc_sad128x64_bits8,
1772 aom_highbd_obmc_variance128x64,
1773 aom_highbd_obmc_sub_pixel_variance128x64)
1774 HIGHBD_OBFP(BLOCK_64X128, aom_highbd_obmc_sad64x128_bits8,
1775 aom_highbd_obmc_variance64x128,
1776 aom_highbd_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07001777 HIGHBD_OBFP(BLOCK_64X64, aom_highbd_obmc_sad64x64_bits8,
1778 aom_highbd_obmc_variance64x64,
1779 aom_highbd_obmc_sub_pixel_variance64x64)
1780 HIGHBD_OBFP(BLOCK_64X32, aom_highbd_obmc_sad64x32_bits8,
1781 aom_highbd_obmc_variance64x32,
1782 aom_highbd_obmc_sub_pixel_variance64x32)
1783 HIGHBD_OBFP(BLOCK_32X64, aom_highbd_obmc_sad32x64_bits8,
1784 aom_highbd_obmc_variance32x64,
1785 aom_highbd_obmc_sub_pixel_variance32x64)
1786 HIGHBD_OBFP(BLOCK_32X32, aom_highbd_obmc_sad32x32_bits8,
1787 aom_highbd_obmc_variance32x32,
1788 aom_highbd_obmc_sub_pixel_variance32x32)
1789 HIGHBD_OBFP(BLOCK_32X16, aom_highbd_obmc_sad32x16_bits8,
1790 aom_highbd_obmc_variance32x16,
1791 aom_highbd_obmc_sub_pixel_variance32x16)
1792 HIGHBD_OBFP(BLOCK_16X32, aom_highbd_obmc_sad16x32_bits8,
1793 aom_highbd_obmc_variance16x32,
1794 aom_highbd_obmc_sub_pixel_variance16x32)
1795 HIGHBD_OBFP(BLOCK_16X16, aom_highbd_obmc_sad16x16_bits8,
1796 aom_highbd_obmc_variance16x16,
1797 aom_highbd_obmc_sub_pixel_variance16x16)
1798 HIGHBD_OBFP(BLOCK_8X16, aom_highbd_obmc_sad8x16_bits8,
1799 aom_highbd_obmc_variance8x16,
1800 aom_highbd_obmc_sub_pixel_variance8x16)
1801 HIGHBD_OBFP(BLOCK_16X8, aom_highbd_obmc_sad16x8_bits8,
1802 aom_highbd_obmc_variance16x8,
1803 aom_highbd_obmc_sub_pixel_variance16x8)
1804 HIGHBD_OBFP(BLOCK_8X8, aom_highbd_obmc_sad8x8_bits8,
1805 aom_highbd_obmc_variance8x8,
1806 aom_highbd_obmc_sub_pixel_variance8x8)
1807 HIGHBD_OBFP(BLOCK_4X8, aom_highbd_obmc_sad4x8_bits8,
1808 aom_highbd_obmc_variance4x8,
1809 aom_highbd_obmc_sub_pixel_variance4x8)
1810 HIGHBD_OBFP(BLOCK_8X4, aom_highbd_obmc_sad8x4_bits8,
1811 aom_highbd_obmc_variance8x4,
1812 aom_highbd_obmc_sub_pixel_variance8x4)
1813 HIGHBD_OBFP(BLOCK_4X4, aom_highbd_obmc_sad4x4_bits8,
1814 aom_highbd_obmc_variance4x4,
1815 aom_highbd_obmc_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001816 HIGHBD_OBFP(BLOCK_64X16, aom_highbd_obmc_sad64x16_bits8,
1817 aom_highbd_obmc_variance64x16,
1818 aom_highbd_obmc_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01001819 HIGHBD_OBFP(BLOCK_16X64, aom_highbd_obmc_sad16x64_bits8,
1820 aom_highbd_obmc_variance16x64,
1821 aom_highbd_obmc_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001822 HIGHBD_OBFP(BLOCK_32X8, aom_highbd_obmc_sad32x8_bits8,
1823 aom_highbd_obmc_variance32x8,
1824 aom_highbd_obmc_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001825 HIGHBD_OBFP(BLOCK_8X32, aom_highbd_obmc_sad8x32_bits8,
1826 aom_highbd_obmc_variance8x32,
1827 aom_highbd_obmc_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001828 HIGHBD_OBFP(BLOCK_16X4, aom_highbd_obmc_sad16x4_bits8,
1829 aom_highbd_obmc_variance16x4,
1830 aom_highbd_obmc_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001831 HIGHBD_OBFP(BLOCK_4X16, aom_highbd_obmc_sad4x16_bits8,
1832 aom_highbd_obmc_variance4x16,
1833 aom_highbd_obmc_sub_pixel_variance4x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001834 break;
1835
Yaowu Xuf883b422016-08-30 14:01:10 -07001836 case AOM_BITS_10:
Cheng Chenbf3d4962017-11-01 14:48:52 -07001837 HIGHBD_BFP(BLOCK_64X16, aom_highbd_sad64x16_bits10,
1838 aom_highbd_sad64x16_avg_bits10, aom_highbd_10_variance64x16,
1839 aom_highbd_10_sub_pixel_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001840 aom_highbd_10_sub_pixel_avg_variance64x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001841 aom_highbd_sad64x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001842 aom_highbd_dist_wtd_sad64x16_avg_bits10,
1843 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001844
1845 HIGHBD_BFP(BLOCK_16X64, aom_highbd_sad16x64_bits10,
1846 aom_highbd_sad16x64_avg_bits10, aom_highbd_10_variance16x64,
1847 aom_highbd_10_sub_pixel_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001848 aom_highbd_10_sub_pixel_avg_variance16x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001849 aom_highbd_sad16x64x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001850 aom_highbd_dist_wtd_sad16x64_avg_bits10,
1851 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001852
1853 HIGHBD_BFP(BLOCK_32X8, aom_highbd_sad32x8_bits10,
1854 aom_highbd_sad32x8_avg_bits10, aom_highbd_10_variance32x8,
1855 aom_highbd_10_sub_pixel_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001856 aom_highbd_10_sub_pixel_avg_variance32x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001857 aom_highbd_sad32x8x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001858 aom_highbd_dist_wtd_sad32x8_avg_bits10,
1859 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001860
1861 HIGHBD_BFP(BLOCK_8X32, aom_highbd_sad8x32_bits10,
1862 aom_highbd_sad8x32_avg_bits10, aom_highbd_10_variance8x32,
1863 aom_highbd_10_sub_pixel_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001864 aom_highbd_10_sub_pixel_avg_variance8x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001865 aom_highbd_sad8x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001866 aom_highbd_dist_wtd_sad8x32_avg_bits10,
1867 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001868
1869 HIGHBD_BFP(BLOCK_16X4, aom_highbd_sad16x4_bits10,
1870 aom_highbd_sad16x4_avg_bits10, aom_highbd_10_variance16x4,
1871 aom_highbd_10_sub_pixel_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001872 aom_highbd_10_sub_pixel_avg_variance16x4,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001873 aom_highbd_sad16x4x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001874 aom_highbd_dist_wtd_sad16x4_avg_bits10,
1875 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001876
1877 HIGHBD_BFP(BLOCK_4X16, aom_highbd_sad4x16_bits10,
1878 aom_highbd_sad4x16_avg_bits10, aom_highbd_10_variance4x16,
1879 aom_highbd_10_sub_pixel_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001880 aom_highbd_10_sub_pixel_avg_variance4x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001881 aom_highbd_sad4x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001882 aom_highbd_dist_wtd_sad4x16_avg_bits10,
1883 aom_highbd_10_dist_wtd_sub_pixel_avg_variance4x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001884
1885 HIGHBD_BFP(BLOCK_32X16, aom_highbd_sad32x16_bits10,
1886 aom_highbd_sad32x16_avg_bits10, aom_highbd_10_variance32x16,
1887 aom_highbd_10_sub_pixel_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001888 aom_highbd_10_sub_pixel_avg_variance32x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001889 aom_highbd_sad32x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001890 aom_highbd_dist_wtd_sad32x16_avg_bits10,
1891 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001892
1893 HIGHBD_BFP(BLOCK_16X32, aom_highbd_sad16x32_bits10,
1894 aom_highbd_sad16x32_avg_bits10, aom_highbd_10_variance16x32,
1895 aom_highbd_10_sub_pixel_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001896 aom_highbd_10_sub_pixel_avg_variance16x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001897 aom_highbd_sad16x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001898 aom_highbd_dist_wtd_sad16x32_avg_bits10,
1899 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001900
1901 HIGHBD_BFP(BLOCK_64X32, aom_highbd_sad64x32_bits10,
1902 aom_highbd_sad64x32_avg_bits10, aom_highbd_10_variance64x32,
1903 aom_highbd_10_sub_pixel_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001904 aom_highbd_10_sub_pixel_avg_variance64x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001905 aom_highbd_sad64x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001906 aom_highbd_dist_wtd_sad64x32_avg_bits10,
1907 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001908
1909 HIGHBD_BFP(BLOCK_32X64, aom_highbd_sad32x64_bits10,
1910 aom_highbd_sad32x64_avg_bits10, aom_highbd_10_variance32x64,
1911 aom_highbd_10_sub_pixel_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001912 aom_highbd_10_sub_pixel_avg_variance32x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001913 aom_highbd_sad32x64x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001914 aom_highbd_dist_wtd_sad32x64_avg_bits10,
1915 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001916
1917 HIGHBD_BFP(BLOCK_32X32, aom_highbd_sad32x32_bits10,
1918 aom_highbd_sad32x32_avg_bits10, aom_highbd_10_variance32x32,
1919 aom_highbd_10_sub_pixel_variance32x32,
1920 aom_highbd_10_sub_pixel_avg_variance32x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001921 aom_highbd_sad32x32x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001922 aom_highbd_dist_wtd_sad32x32_avg_bits10,
1923 aom_highbd_10_dist_wtd_sub_pixel_avg_variance32x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001924
1925 HIGHBD_BFP(BLOCK_64X64, aom_highbd_sad64x64_bits10,
1926 aom_highbd_sad64x64_avg_bits10, aom_highbd_10_variance64x64,
1927 aom_highbd_10_sub_pixel_variance64x64,
1928 aom_highbd_10_sub_pixel_avg_variance64x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001929 aom_highbd_sad64x64x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001930 aom_highbd_dist_wtd_sad64x64_avg_bits10,
1931 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001932
1933 HIGHBD_BFP(BLOCK_16X16, aom_highbd_sad16x16_bits10,
1934 aom_highbd_sad16x16_avg_bits10, aom_highbd_10_variance16x16,
1935 aom_highbd_10_sub_pixel_variance16x16,
1936 aom_highbd_10_sub_pixel_avg_variance16x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001937 aom_highbd_sad16x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001938 aom_highbd_dist_wtd_sad16x16_avg_bits10,
1939 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001940
1941 HIGHBD_BFP(BLOCK_16X8, aom_highbd_sad16x8_bits10,
1942 aom_highbd_sad16x8_avg_bits10, aom_highbd_10_variance16x8,
1943 aom_highbd_10_sub_pixel_variance16x8,
1944 aom_highbd_10_sub_pixel_avg_variance16x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001945 aom_highbd_sad16x8x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001946 aom_highbd_dist_wtd_sad16x8_avg_bits10,
1947 aom_highbd_10_dist_wtd_sub_pixel_avg_variance16x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001948
1949 HIGHBD_BFP(BLOCK_8X16, aom_highbd_sad8x16_bits10,
1950 aom_highbd_sad8x16_avg_bits10, aom_highbd_10_variance8x16,
1951 aom_highbd_10_sub_pixel_variance8x16,
1952 aom_highbd_10_sub_pixel_avg_variance8x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07001953 aom_highbd_sad8x16x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001954 aom_highbd_dist_wtd_sad8x16_avg_bits10,
1955 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001956
1957 HIGHBD_BFP(
1958 BLOCK_8X8, aom_highbd_sad8x8_bits10, aom_highbd_sad8x8_avg_bits10,
1959 aom_highbd_10_variance8x8, aom_highbd_10_sub_pixel_variance8x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001960 aom_highbd_10_sub_pixel_avg_variance8x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001961 aom_highbd_sad8x8x4d_bits10, aom_highbd_dist_wtd_sad8x8_avg_bits10,
1962 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001963
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001964 HIGHBD_BFP(
1965 BLOCK_8X4, aom_highbd_sad8x4_bits10, aom_highbd_sad8x4_avg_bits10,
1966 aom_highbd_10_variance8x4, aom_highbd_10_sub_pixel_variance8x4,
1967 aom_highbd_10_sub_pixel_avg_variance8x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001968 aom_highbd_sad8x4x4d_bits10, aom_highbd_dist_wtd_sad8x4_avg_bits10,
1969 aom_highbd_10_dist_wtd_sub_pixel_avg_variance8x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001970
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001971 HIGHBD_BFP(
1972 BLOCK_4X8, aom_highbd_sad4x8_bits10, aom_highbd_sad4x8_avg_bits10,
1973 aom_highbd_10_variance4x8, aom_highbd_10_sub_pixel_variance4x8,
1974 aom_highbd_10_sub_pixel_avg_variance4x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001975 aom_highbd_sad4x8x4d_bits10, aom_highbd_dist_wtd_sad4x8_avg_bits10,
1976 aom_highbd_10_dist_wtd_sub_pixel_avg_variance4x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001977
1978 HIGHBD_BFP(
1979 BLOCK_4X4, aom_highbd_sad4x4_bits10, aom_highbd_sad4x4_avg_bits10,
1980 aom_highbd_10_variance4x4, aom_highbd_10_sub_pixel_variance4x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001981 aom_highbd_10_sub_pixel_avg_variance4x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001982 aom_highbd_sad4x4x4d_bits10, aom_highbd_dist_wtd_sad4x4_avg_bits10,
1983 aom_highbd_10_dist_wtd_sub_pixel_avg_variance4x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001984
Kyle Siefringef6e2df2018-04-10 14:51:35 -04001985 HIGHBD_BFP(BLOCK_128X128, aom_highbd_sad128x128_bits10,
1986 aom_highbd_sad128x128_avg_bits10,
1987 aom_highbd_10_variance128x128,
1988 aom_highbd_10_sub_pixel_variance128x128,
1989 aom_highbd_10_sub_pixel_avg_variance128x128,
1990 aom_highbd_sad128x128x4d_bits10,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001991 aom_highbd_dist_wtd_sad128x128_avg_bits10,
1992 aom_highbd_10_dist_wtd_sub_pixel_avg_variance128x128);
Cheng Chenbf3d4962017-11-01 14:48:52 -07001993
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08001994 HIGHBD_BFP(BLOCK_128X64, aom_highbd_sad128x64_bits10,
1995 aom_highbd_sad128x64_avg_bits10,
1996 aom_highbd_10_variance128x64,
1997 aom_highbd_10_sub_pixel_variance128x64,
1998 aom_highbd_10_sub_pixel_avg_variance128x64,
1999 aom_highbd_sad128x64x4d_bits10,
2000 aom_highbd_dist_wtd_sad128x64_avg_bits10,
2001 aom_highbd_10_dist_wtd_sub_pixel_avg_variance128x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002002
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002003 HIGHBD_BFP(BLOCK_64X128, aom_highbd_sad64x128_bits10,
2004 aom_highbd_sad64x128_avg_bits10,
2005 aom_highbd_10_variance64x128,
2006 aom_highbd_10_sub_pixel_variance64x128,
2007 aom_highbd_10_sub_pixel_avg_variance64x128,
2008 aom_highbd_sad64x128x4d_bits10,
2009 aom_highbd_dist_wtd_sad64x128_avg_bits10,
2010 aom_highbd_10_dist_wtd_sub_pixel_avg_variance64x128);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002011
David Barkerf19f35f2017-05-22 16:33:22 +01002012 HIGHBD_MBFP(BLOCK_128X128, aom_highbd_masked_sad128x128_bits10,
2013 aom_highbd_10_masked_sub_pixel_variance128x128)
2014 HIGHBD_MBFP(BLOCK_128X64, aom_highbd_masked_sad128x64_bits10,
2015 aom_highbd_10_masked_sub_pixel_variance128x64)
2016 HIGHBD_MBFP(BLOCK_64X128, aom_highbd_masked_sad64x128_bits10,
2017 aom_highbd_10_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01002018 HIGHBD_MBFP(BLOCK_64X64, aom_highbd_masked_sad64x64_bits10,
2019 aom_highbd_10_masked_sub_pixel_variance64x64)
2020 HIGHBD_MBFP(BLOCK_64X32, aom_highbd_masked_sad64x32_bits10,
2021 aom_highbd_10_masked_sub_pixel_variance64x32)
2022 HIGHBD_MBFP(BLOCK_32X64, aom_highbd_masked_sad32x64_bits10,
2023 aom_highbd_10_masked_sub_pixel_variance32x64)
2024 HIGHBD_MBFP(BLOCK_32X32, aom_highbd_masked_sad32x32_bits10,
2025 aom_highbd_10_masked_sub_pixel_variance32x32)
2026 HIGHBD_MBFP(BLOCK_32X16, aom_highbd_masked_sad32x16_bits10,
2027 aom_highbd_10_masked_sub_pixel_variance32x16)
2028 HIGHBD_MBFP(BLOCK_16X32, aom_highbd_masked_sad16x32_bits10,
2029 aom_highbd_10_masked_sub_pixel_variance16x32)
2030 HIGHBD_MBFP(BLOCK_16X16, aom_highbd_masked_sad16x16_bits10,
2031 aom_highbd_10_masked_sub_pixel_variance16x16)
2032 HIGHBD_MBFP(BLOCK_8X16, aom_highbd_masked_sad8x16_bits10,
2033 aom_highbd_10_masked_sub_pixel_variance8x16)
2034 HIGHBD_MBFP(BLOCK_16X8, aom_highbd_masked_sad16x8_bits10,
2035 aom_highbd_10_masked_sub_pixel_variance16x8)
2036 HIGHBD_MBFP(BLOCK_8X8, aom_highbd_masked_sad8x8_bits10,
2037 aom_highbd_10_masked_sub_pixel_variance8x8)
2038 HIGHBD_MBFP(BLOCK_4X8, aom_highbd_masked_sad4x8_bits10,
2039 aom_highbd_10_masked_sub_pixel_variance4x8)
2040 HIGHBD_MBFP(BLOCK_8X4, aom_highbd_masked_sad8x4_bits10,
2041 aom_highbd_10_masked_sub_pixel_variance8x4)
2042 HIGHBD_MBFP(BLOCK_4X4, aom_highbd_masked_sad4x4_bits10,
2043 aom_highbd_10_masked_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002044 HIGHBD_MBFP(BLOCK_64X16, aom_highbd_masked_sad64x16_bits10,
2045 aom_highbd_10_masked_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002046 HIGHBD_MBFP(BLOCK_16X64, aom_highbd_masked_sad16x64_bits10,
2047 aom_highbd_10_masked_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002048 HIGHBD_MBFP(BLOCK_32X8, aom_highbd_masked_sad32x8_bits10,
2049 aom_highbd_10_masked_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002050 HIGHBD_MBFP(BLOCK_8X32, aom_highbd_masked_sad8x32_bits10,
2051 aom_highbd_10_masked_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002052 HIGHBD_MBFP(BLOCK_16X4, aom_highbd_masked_sad16x4_bits10,
2053 aom_highbd_10_masked_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002054 HIGHBD_MBFP(BLOCK_4X16, aom_highbd_masked_sad4x16_bits10,
2055 aom_highbd_10_masked_sub_pixel_variance4x16)
Yaowu Xuf883b422016-08-30 14:01:10 -07002056 HIGHBD_OBFP(BLOCK_128X128, aom_highbd_obmc_sad128x128_bits10,
2057 aom_highbd_10_obmc_variance128x128,
2058 aom_highbd_10_obmc_sub_pixel_variance128x128)
2059 HIGHBD_OBFP(BLOCK_128X64, aom_highbd_obmc_sad128x64_bits10,
2060 aom_highbd_10_obmc_variance128x64,
2061 aom_highbd_10_obmc_sub_pixel_variance128x64)
2062 HIGHBD_OBFP(BLOCK_64X128, aom_highbd_obmc_sad64x128_bits10,
2063 aom_highbd_10_obmc_variance64x128,
2064 aom_highbd_10_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07002065 HIGHBD_OBFP(BLOCK_64X64, aom_highbd_obmc_sad64x64_bits10,
2066 aom_highbd_10_obmc_variance64x64,
2067 aom_highbd_10_obmc_sub_pixel_variance64x64)
2068 HIGHBD_OBFP(BLOCK_64X32, aom_highbd_obmc_sad64x32_bits10,
2069 aom_highbd_10_obmc_variance64x32,
2070 aom_highbd_10_obmc_sub_pixel_variance64x32)
2071 HIGHBD_OBFP(BLOCK_32X64, aom_highbd_obmc_sad32x64_bits10,
2072 aom_highbd_10_obmc_variance32x64,
2073 aom_highbd_10_obmc_sub_pixel_variance32x64)
2074 HIGHBD_OBFP(BLOCK_32X32, aom_highbd_obmc_sad32x32_bits10,
2075 aom_highbd_10_obmc_variance32x32,
2076 aom_highbd_10_obmc_sub_pixel_variance32x32)
2077 HIGHBD_OBFP(BLOCK_32X16, aom_highbd_obmc_sad32x16_bits10,
2078 aom_highbd_10_obmc_variance32x16,
2079 aom_highbd_10_obmc_sub_pixel_variance32x16)
2080 HIGHBD_OBFP(BLOCK_16X32, aom_highbd_obmc_sad16x32_bits10,
2081 aom_highbd_10_obmc_variance16x32,
2082 aom_highbd_10_obmc_sub_pixel_variance16x32)
2083 HIGHBD_OBFP(BLOCK_16X16, aom_highbd_obmc_sad16x16_bits10,
2084 aom_highbd_10_obmc_variance16x16,
2085 aom_highbd_10_obmc_sub_pixel_variance16x16)
2086 HIGHBD_OBFP(BLOCK_8X16, aom_highbd_obmc_sad8x16_bits10,
2087 aom_highbd_10_obmc_variance8x16,
2088 aom_highbd_10_obmc_sub_pixel_variance8x16)
2089 HIGHBD_OBFP(BLOCK_16X8, aom_highbd_obmc_sad16x8_bits10,
2090 aom_highbd_10_obmc_variance16x8,
2091 aom_highbd_10_obmc_sub_pixel_variance16x8)
2092 HIGHBD_OBFP(BLOCK_8X8, aom_highbd_obmc_sad8x8_bits10,
2093 aom_highbd_10_obmc_variance8x8,
2094 aom_highbd_10_obmc_sub_pixel_variance8x8)
2095 HIGHBD_OBFP(BLOCK_4X8, aom_highbd_obmc_sad4x8_bits10,
2096 aom_highbd_10_obmc_variance4x8,
2097 aom_highbd_10_obmc_sub_pixel_variance4x8)
2098 HIGHBD_OBFP(BLOCK_8X4, aom_highbd_obmc_sad8x4_bits10,
2099 aom_highbd_10_obmc_variance8x4,
2100 aom_highbd_10_obmc_sub_pixel_variance8x4)
2101 HIGHBD_OBFP(BLOCK_4X4, aom_highbd_obmc_sad4x4_bits10,
2102 aom_highbd_10_obmc_variance4x4,
2103 aom_highbd_10_obmc_sub_pixel_variance4x4)
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01002104
Rupert Swarbrick72678572017-08-02 12:05:26 +01002105 HIGHBD_OBFP(BLOCK_64X16, aom_highbd_obmc_sad64x16_bits10,
2106 aom_highbd_10_obmc_variance64x16,
2107 aom_highbd_10_obmc_sub_pixel_variance64x16)
2108
2109 HIGHBD_OBFP(BLOCK_16X64, aom_highbd_obmc_sad16x64_bits10,
2110 aom_highbd_10_obmc_variance16x64,
2111 aom_highbd_10_obmc_sub_pixel_variance16x64)
2112
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002113 HIGHBD_OBFP(BLOCK_32X8, aom_highbd_obmc_sad32x8_bits10,
2114 aom_highbd_10_obmc_variance32x8,
2115 aom_highbd_10_obmc_sub_pixel_variance32x8)
2116
2117 HIGHBD_OBFP(BLOCK_8X32, aom_highbd_obmc_sad8x32_bits10,
2118 aom_highbd_10_obmc_variance8x32,
2119 aom_highbd_10_obmc_sub_pixel_variance8x32)
2120
2121 HIGHBD_OBFP(BLOCK_16X4, aom_highbd_obmc_sad16x4_bits10,
2122 aom_highbd_10_obmc_variance16x4,
2123 aom_highbd_10_obmc_sub_pixel_variance16x4)
2124
2125 HIGHBD_OBFP(BLOCK_4X16, aom_highbd_obmc_sad4x16_bits10,
2126 aom_highbd_10_obmc_variance4x16,
2127 aom_highbd_10_obmc_sub_pixel_variance4x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002128 break;
2129
Yaowu Xuf883b422016-08-30 14:01:10 -07002130 case AOM_BITS_12:
Cheng Chenbf3d4962017-11-01 14:48:52 -07002131 HIGHBD_BFP(BLOCK_64X16, aom_highbd_sad64x16_bits12,
2132 aom_highbd_sad64x16_avg_bits12, aom_highbd_12_variance64x16,
2133 aom_highbd_12_sub_pixel_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002134 aom_highbd_12_sub_pixel_avg_variance64x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002135 aom_highbd_sad64x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002136 aom_highbd_dist_wtd_sad64x16_avg_bits12,
2137 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002138
2139 HIGHBD_BFP(BLOCK_16X64, aom_highbd_sad16x64_bits12,
2140 aom_highbd_sad16x64_avg_bits12, aom_highbd_12_variance16x64,
2141 aom_highbd_12_sub_pixel_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002142 aom_highbd_12_sub_pixel_avg_variance16x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002143 aom_highbd_sad16x64x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002144 aom_highbd_dist_wtd_sad16x64_avg_bits12,
2145 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002146
2147 HIGHBD_BFP(BLOCK_32X8, aom_highbd_sad32x8_bits12,
2148 aom_highbd_sad32x8_avg_bits12, aom_highbd_12_variance32x8,
2149 aom_highbd_12_sub_pixel_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002150 aom_highbd_12_sub_pixel_avg_variance32x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002151 aom_highbd_sad32x8x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002152 aom_highbd_dist_wtd_sad32x8_avg_bits12,
2153 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002154
2155 HIGHBD_BFP(BLOCK_8X32, aom_highbd_sad8x32_bits12,
2156 aom_highbd_sad8x32_avg_bits12, aom_highbd_12_variance8x32,
2157 aom_highbd_12_sub_pixel_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002158 aom_highbd_12_sub_pixel_avg_variance8x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002159 aom_highbd_sad8x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002160 aom_highbd_dist_wtd_sad8x32_avg_bits12,
2161 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002162
2163 HIGHBD_BFP(BLOCK_16X4, aom_highbd_sad16x4_bits12,
2164 aom_highbd_sad16x4_avg_bits12, aom_highbd_12_variance16x4,
2165 aom_highbd_12_sub_pixel_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002166 aom_highbd_12_sub_pixel_avg_variance16x4,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002167 aom_highbd_sad16x4x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002168 aom_highbd_dist_wtd_sad16x4_avg_bits12,
2169 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002170
2171 HIGHBD_BFP(BLOCK_4X16, aom_highbd_sad4x16_bits12,
2172 aom_highbd_sad4x16_avg_bits12, aom_highbd_12_variance4x16,
2173 aom_highbd_12_sub_pixel_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002174 aom_highbd_12_sub_pixel_avg_variance4x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002175 aom_highbd_sad4x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002176 aom_highbd_dist_wtd_sad4x16_avg_bits12,
2177 aom_highbd_12_dist_wtd_sub_pixel_avg_variance4x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002178
2179 HIGHBD_BFP(BLOCK_32X16, aom_highbd_sad32x16_bits12,
2180 aom_highbd_sad32x16_avg_bits12, aom_highbd_12_variance32x16,
2181 aom_highbd_12_sub_pixel_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002182 aom_highbd_12_sub_pixel_avg_variance32x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002183 aom_highbd_sad32x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002184 aom_highbd_dist_wtd_sad32x16_avg_bits12,
2185 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002186
2187 HIGHBD_BFP(BLOCK_16X32, aom_highbd_sad16x32_bits12,
2188 aom_highbd_sad16x32_avg_bits12, aom_highbd_12_variance16x32,
2189 aom_highbd_12_sub_pixel_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002190 aom_highbd_12_sub_pixel_avg_variance16x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002191 aom_highbd_sad16x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002192 aom_highbd_dist_wtd_sad16x32_avg_bits12,
2193 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002194
2195 HIGHBD_BFP(BLOCK_64X32, aom_highbd_sad64x32_bits12,
2196 aom_highbd_sad64x32_avg_bits12, aom_highbd_12_variance64x32,
2197 aom_highbd_12_sub_pixel_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002198 aom_highbd_12_sub_pixel_avg_variance64x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002199 aom_highbd_sad64x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002200 aom_highbd_dist_wtd_sad64x32_avg_bits12,
2201 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002202
2203 HIGHBD_BFP(BLOCK_32X64, aom_highbd_sad32x64_bits12,
2204 aom_highbd_sad32x64_avg_bits12, aom_highbd_12_variance32x64,
2205 aom_highbd_12_sub_pixel_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002206 aom_highbd_12_sub_pixel_avg_variance32x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002207 aom_highbd_sad32x64x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002208 aom_highbd_dist_wtd_sad32x64_avg_bits12,
2209 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002210
2211 HIGHBD_BFP(BLOCK_32X32, aom_highbd_sad32x32_bits12,
2212 aom_highbd_sad32x32_avg_bits12, aom_highbd_12_variance32x32,
2213 aom_highbd_12_sub_pixel_variance32x32,
2214 aom_highbd_12_sub_pixel_avg_variance32x32,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002215 aom_highbd_sad32x32x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002216 aom_highbd_dist_wtd_sad32x32_avg_bits12,
2217 aom_highbd_12_dist_wtd_sub_pixel_avg_variance32x32);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002218
2219 HIGHBD_BFP(BLOCK_64X64, aom_highbd_sad64x64_bits12,
2220 aom_highbd_sad64x64_avg_bits12, aom_highbd_12_variance64x64,
2221 aom_highbd_12_sub_pixel_variance64x64,
2222 aom_highbd_12_sub_pixel_avg_variance64x64,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002223 aom_highbd_sad64x64x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002224 aom_highbd_dist_wtd_sad64x64_avg_bits12,
2225 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002226
2227 HIGHBD_BFP(BLOCK_16X16, aom_highbd_sad16x16_bits12,
2228 aom_highbd_sad16x16_avg_bits12, aom_highbd_12_variance16x16,
2229 aom_highbd_12_sub_pixel_variance16x16,
2230 aom_highbd_12_sub_pixel_avg_variance16x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002231 aom_highbd_sad16x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002232 aom_highbd_dist_wtd_sad16x16_avg_bits12,
2233 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002234
2235 HIGHBD_BFP(BLOCK_16X8, aom_highbd_sad16x8_bits12,
2236 aom_highbd_sad16x8_avg_bits12, aom_highbd_12_variance16x8,
2237 aom_highbd_12_sub_pixel_variance16x8,
2238 aom_highbd_12_sub_pixel_avg_variance16x8,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002239 aom_highbd_sad16x8x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002240 aom_highbd_dist_wtd_sad16x8_avg_bits12,
2241 aom_highbd_12_dist_wtd_sub_pixel_avg_variance16x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002242
2243 HIGHBD_BFP(BLOCK_8X16, aom_highbd_sad8x16_bits12,
2244 aom_highbd_sad8x16_avg_bits12, aom_highbd_12_variance8x16,
2245 aom_highbd_12_sub_pixel_variance8x16,
2246 aom_highbd_12_sub_pixel_avg_variance8x16,
Cheng Chenbf3d4962017-11-01 14:48:52 -07002247 aom_highbd_sad8x16x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002248 aom_highbd_dist_wtd_sad8x16_avg_bits12,
2249 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x16);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002250
2251 HIGHBD_BFP(
2252 BLOCK_8X8, aom_highbd_sad8x8_bits12, aom_highbd_sad8x8_avg_bits12,
2253 aom_highbd_12_variance8x8, aom_highbd_12_sub_pixel_variance8x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002254 aom_highbd_12_sub_pixel_avg_variance8x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002255 aom_highbd_sad8x8x4d_bits12, aom_highbd_dist_wtd_sad8x8_avg_bits12,
2256 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002257
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002258 HIGHBD_BFP(
2259 BLOCK_8X4, aom_highbd_sad8x4_bits12, aom_highbd_sad8x4_avg_bits12,
2260 aom_highbd_12_variance8x4, aom_highbd_12_sub_pixel_variance8x4,
2261 aom_highbd_12_sub_pixel_avg_variance8x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002262 aom_highbd_sad8x4x4d_bits12, aom_highbd_dist_wtd_sad8x4_avg_bits12,
2263 aom_highbd_12_dist_wtd_sub_pixel_avg_variance8x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002264
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002265 HIGHBD_BFP(
2266 BLOCK_4X8, aom_highbd_sad4x8_bits12, aom_highbd_sad4x8_avg_bits12,
2267 aom_highbd_12_variance4x8, aom_highbd_12_sub_pixel_variance4x8,
2268 aom_highbd_12_sub_pixel_avg_variance4x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002269 aom_highbd_sad4x8x4d_bits12, aom_highbd_dist_wtd_sad4x8_avg_bits12,
2270 aom_highbd_12_dist_wtd_sub_pixel_avg_variance4x8);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002271
2272 HIGHBD_BFP(
2273 BLOCK_4X4, aom_highbd_sad4x4_bits12, aom_highbd_sad4x4_avg_bits12,
2274 aom_highbd_12_variance4x4, aom_highbd_12_sub_pixel_variance4x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002275 aom_highbd_12_sub_pixel_avg_variance4x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002276 aom_highbd_sad4x4x4d_bits12, aom_highbd_dist_wtd_sad4x4_avg_bits12,
2277 aom_highbd_12_dist_wtd_sub_pixel_avg_variance4x4);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002278
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002279 HIGHBD_BFP(BLOCK_128X128, aom_highbd_sad128x128_bits12,
2280 aom_highbd_sad128x128_avg_bits12,
2281 aom_highbd_12_variance128x128,
2282 aom_highbd_12_sub_pixel_variance128x128,
2283 aom_highbd_12_sub_pixel_avg_variance128x128,
2284 aom_highbd_sad128x128x4d_bits12,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002285 aom_highbd_dist_wtd_sad128x128_avg_bits12,
2286 aom_highbd_12_dist_wtd_sub_pixel_avg_variance128x128);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002287
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002288 HIGHBD_BFP(BLOCK_128X64, aom_highbd_sad128x64_bits12,
2289 aom_highbd_sad128x64_avg_bits12,
2290 aom_highbd_12_variance128x64,
2291 aom_highbd_12_sub_pixel_variance128x64,
2292 aom_highbd_12_sub_pixel_avg_variance128x64,
2293 aom_highbd_sad128x64x4d_bits12,
2294 aom_highbd_dist_wtd_sad128x64_avg_bits12,
2295 aom_highbd_12_dist_wtd_sub_pixel_avg_variance128x64);
Cheng Chenbf3d4962017-11-01 14:48:52 -07002296
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002297 HIGHBD_BFP(BLOCK_64X128, aom_highbd_sad64x128_bits12,
2298 aom_highbd_sad64x128_avg_bits12,
2299 aom_highbd_12_variance64x128,
2300 aom_highbd_12_sub_pixel_variance64x128,
2301 aom_highbd_12_sub_pixel_avg_variance64x128,
2302 aom_highbd_sad64x128x4d_bits12,
2303 aom_highbd_dist_wtd_sad64x128_avg_bits12,
2304 aom_highbd_12_dist_wtd_sub_pixel_avg_variance64x128);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002305
David Barkerf19f35f2017-05-22 16:33:22 +01002306 HIGHBD_MBFP(BLOCK_128X128, aom_highbd_masked_sad128x128_bits12,
2307 aom_highbd_12_masked_sub_pixel_variance128x128)
2308 HIGHBD_MBFP(BLOCK_128X64, aom_highbd_masked_sad128x64_bits12,
2309 aom_highbd_12_masked_sub_pixel_variance128x64)
2310 HIGHBD_MBFP(BLOCK_64X128, aom_highbd_masked_sad64x128_bits12,
2311 aom_highbd_12_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01002312 HIGHBD_MBFP(BLOCK_64X64, aom_highbd_masked_sad64x64_bits12,
2313 aom_highbd_12_masked_sub_pixel_variance64x64)
2314 HIGHBD_MBFP(BLOCK_64X32, aom_highbd_masked_sad64x32_bits12,
2315 aom_highbd_12_masked_sub_pixel_variance64x32)
2316 HIGHBD_MBFP(BLOCK_32X64, aom_highbd_masked_sad32x64_bits12,
2317 aom_highbd_12_masked_sub_pixel_variance32x64)
2318 HIGHBD_MBFP(BLOCK_32X32, aom_highbd_masked_sad32x32_bits12,
2319 aom_highbd_12_masked_sub_pixel_variance32x32)
2320 HIGHBD_MBFP(BLOCK_32X16, aom_highbd_masked_sad32x16_bits12,
2321 aom_highbd_12_masked_sub_pixel_variance32x16)
2322 HIGHBD_MBFP(BLOCK_16X32, aom_highbd_masked_sad16x32_bits12,
2323 aom_highbd_12_masked_sub_pixel_variance16x32)
2324 HIGHBD_MBFP(BLOCK_16X16, aom_highbd_masked_sad16x16_bits12,
2325 aom_highbd_12_masked_sub_pixel_variance16x16)
2326 HIGHBD_MBFP(BLOCK_8X16, aom_highbd_masked_sad8x16_bits12,
2327 aom_highbd_12_masked_sub_pixel_variance8x16)
2328 HIGHBD_MBFP(BLOCK_16X8, aom_highbd_masked_sad16x8_bits12,
2329 aom_highbd_12_masked_sub_pixel_variance16x8)
2330 HIGHBD_MBFP(BLOCK_8X8, aom_highbd_masked_sad8x8_bits12,
2331 aom_highbd_12_masked_sub_pixel_variance8x8)
2332 HIGHBD_MBFP(BLOCK_4X8, aom_highbd_masked_sad4x8_bits12,
2333 aom_highbd_12_masked_sub_pixel_variance4x8)
2334 HIGHBD_MBFP(BLOCK_8X4, aom_highbd_masked_sad8x4_bits12,
2335 aom_highbd_12_masked_sub_pixel_variance8x4)
2336 HIGHBD_MBFP(BLOCK_4X4, aom_highbd_masked_sad4x4_bits12,
2337 aom_highbd_12_masked_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002338 HIGHBD_MBFP(BLOCK_64X16, aom_highbd_masked_sad64x16_bits12,
2339 aom_highbd_12_masked_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002340 HIGHBD_MBFP(BLOCK_16X64, aom_highbd_masked_sad16x64_bits12,
2341 aom_highbd_12_masked_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002342 HIGHBD_MBFP(BLOCK_32X8, aom_highbd_masked_sad32x8_bits12,
2343 aom_highbd_12_masked_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002344 HIGHBD_MBFP(BLOCK_8X32, aom_highbd_masked_sad8x32_bits12,
2345 aom_highbd_12_masked_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002346 HIGHBD_MBFP(BLOCK_16X4, aom_highbd_masked_sad16x4_bits12,
2347 aom_highbd_12_masked_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002348 HIGHBD_MBFP(BLOCK_4X16, aom_highbd_masked_sad4x16_bits12,
2349 aom_highbd_12_masked_sub_pixel_variance4x16)
Yaowu Xuf883b422016-08-30 14:01:10 -07002350 HIGHBD_OBFP(BLOCK_128X128, aom_highbd_obmc_sad128x128_bits12,
2351 aom_highbd_12_obmc_variance128x128,
2352 aom_highbd_12_obmc_sub_pixel_variance128x128)
2353 HIGHBD_OBFP(BLOCK_128X64, aom_highbd_obmc_sad128x64_bits12,
2354 aom_highbd_12_obmc_variance128x64,
2355 aom_highbd_12_obmc_sub_pixel_variance128x64)
2356 HIGHBD_OBFP(BLOCK_64X128, aom_highbd_obmc_sad64x128_bits12,
2357 aom_highbd_12_obmc_variance64x128,
2358 aom_highbd_12_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07002359 HIGHBD_OBFP(BLOCK_64X64, aom_highbd_obmc_sad64x64_bits12,
2360 aom_highbd_12_obmc_variance64x64,
2361 aom_highbd_12_obmc_sub_pixel_variance64x64)
2362 HIGHBD_OBFP(BLOCK_64X32, aom_highbd_obmc_sad64x32_bits12,
2363 aom_highbd_12_obmc_variance64x32,
2364 aom_highbd_12_obmc_sub_pixel_variance64x32)
2365 HIGHBD_OBFP(BLOCK_32X64, aom_highbd_obmc_sad32x64_bits12,
2366 aom_highbd_12_obmc_variance32x64,
2367 aom_highbd_12_obmc_sub_pixel_variance32x64)
2368 HIGHBD_OBFP(BLOCK_32X32, aom_highbd_obmc_sad32x32_bits12,
2369 aom_highbd_12_obmc_variance32x32,
2370 aom_highbd_12_obmc_sub_pixel_variance32x32)
2371 HIGHBD_OBFP(BLOCK_32X16, aom_highbd_obmc_sad32x16_bits12,
2372 aom_highbd_12_obmc_variance32x16,
2373 aom_highbd_12_obmc_sub_pixel_variance32x16)
2374 HIGHBD_OBFP(BLOCK_16X32, aom_highbd_obmc_sad16x32_bits12,
2375 aom_highbd_12_obmc_variance16x32,
2376 aom_highbd_12_obmc_sub_pixel_variance16x32)
2377 HIGHBD_OBFP(BLOCK_16X16, aom_highbd_obmc_sad16x16_bits12,
2378 aom_highbd_12_obmc_variance16x16,
2379 aom_highbd_12_obmc_sub_pixel_variance16x16)
2380 HIGHBD_OBFP(BLOCK_8X16, aom_highbd_obmc_sad8x16_bits12,
2381 aom_highbd_12_obmc_variance8x16,
2382 aom_highbd_12_obmc_sub_pixel_variance8x16)
2383 HIGHBD_OBFP(BLOCK_16X8, aom_highbd_obmc_sad16x8_bits12,
2384 aom_highbd_12_obmc_variance16x8,
2385 aom_highbd_12_obmc_sub_pixel_variance16x8)
2386 HIGHBD_OBFP(BLOCK_8X8, aom_highbd_obmc_sad8x8_bits12,
2387 aom_highbd_12_obmc_variance8x8,
2388 aom_highbd_12_obmc_sub_pixel_variance8x8)
2389 HIGHBD_OBFP(BLOCK_4X8, aom_highbd_obmc_sad4x8_bits12,
2390 aom_highbd_12_obmc_variance4x8,
2391 aom_highbd_12_obmc_sub_pixel_variance4x8)
2392 HIGHBD_OBFP(BLOCK_8X4, aom_highbd_obmc_sad8x4_bits12,
2393 aom_highbd_12_obmc_variance8x4,
2394 aom_highbd_12_obmc_sub_pixel_variance8x4)
2395 HIGHBD_OBFP(BLOCK_4X4, aom_highbd_obmc_sad4x4_bits12,
2396 aom_highbd_12_obmc_variance4x4,
2397 aom_highbd_12_obmc_sub_pixel_variance4x4)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002398 HIGHBD_OBFP(BLOCK_64X16, aom_highbd_obmc_sad64x16_bits12,
2399 aom_highbd_12_obmc_variance64x16,
2400 aom_highbd_12_obmc_sub_pixel_variance64x16)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002401 HIGHBD_OBFP(BLOCK_16X64, aom_highbd_obmc_sad16x64_bits12,
2402 aom_highbd_12_obmc_variance16x64,
2403 aom_highbd_12_obmc_sub_pixel_variance16x64)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002404 HIGHBD_OBFP(BLOCK_32X8, aom_highbd_obmc_sad32x8_bits12,
2405 aom_highbd_12_obmc_variance32x8,
2406 aom_highbd_12_obmc_sub_pixel_variance32x8)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002407 HIGHBD_OBFP(BLOCK_8X32, aom_highbd_obmc_sad8x32_bits12,
2408 aom_highbd_12_obmc_variance8x32,
2409 aom_highbd_12_obmc_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002410 HIGHBD_OBFP(BLOCK_16X4, aom_highbd_obmc_sad16x4_bits12,
2411 aom_highbd_12_obmc_variance16x4,
2412 aom_highbd_12_obmc_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002413 HIGHBD_OBFP(BLOCK_4X16, aom_highbd_obmc_sad4x16_bits12,
2414 aom_highbd_12_obmc_variance4x16,
2415 aom_highbd_12_obmc_sub_pixel_variance4x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002416 break;
2417
2418 default:
2419 assert(0 &&
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002420 "cm->seq_params.bit_depth should be AOM_BITS_8, "
Yaowu Xuf883b422016-08-30 14:01:10 -07002421 "AOM_BITS_10 or AOM_BITS_12");
Yaowu Xuc27fc142016-08-22 16:08:15 -07002422 }
2423 }
2424}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002425
Yaowu Xuf883b422016-08-30 14:01:10 -07002426static void realloc_segmentation_maps(AV1_COMP *cpi) {
2427 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002428
2429 // Create the encoder segmentation map and set all entries to 0
Yaowu Xuf883b422016-08-30 14:01:10 -07002430 aom_free(cpi->segmentation_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002431 CHECK_MEM_ERROR(cm, cpi->segmentation_map,
Yaowu Xuf883b422016-08-30 14:01:10 -07002432 aom_calloc(cm->mi_rows * cm->mi_cols, 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002433
2434 // Create a map used for cyclic background refresh.
Yaowu Xuf883b422016-08-30 14:01:10 -07002435 if (cpi->cyclic_refresh) av1_cyclic_refresh_free(cpi->cyclic_refresh);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002436 CHECK_MEM_ERROR(cm, cpi->cyclic_refresh,
Yaowu Xuf883b422016-08-30 14:01:10 -07002437 av1_cyclic_refresh_alloc(cm->mi_rows, cm->mi_cols));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002438
2439 // Create a map used to mark inactive areas.
Yaowu Xuf883b422016-08-30 14:01:10 -07002440 aom_free(cpi->active_map.map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002441 CHECK_MEM_ERROR(cm, cpi->active_map.map,
Yaowu Xuf883b422016-08-30 14:01:10 -07002442 aom_calloc(cm->mi_rows * cm->mi_cols, 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002443}
2444
Yaowu Xuf883b422016-08-30 14:01:10 -07002445void av1_change_config(struct AV1_COMP *cpi, const AV1EncoderConfig *oxcf) {
2446 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002447 SequenceHeader *const seq_params = &cm->seq_params;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002448 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002449 RATE_CONTROL *const rc = &cpi->rc;
hui sud9a812b2017-07-06 14:34:37 -07002450 MACROBLOCK *const x = &cpi->td.mb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002451
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002452 if (seq_params->profile != oxcf->profile) seq_params->profile = oxcf->profile;
2453 seq_params->bit_depth = oxcf->bit_depth;
2454 seq_params->color_primaries = oxcf->color_primaries;
2455 seq_params->transfer_characteristics = oxcf->transfer_characteristics;
2456 seq_params->matrix_coefficients = oxcf->matrix_coefficients;
2457 seq_params->monochrome = oxcf->monochrome;
2458 seq_params->chroma_sample_position = oxcf->chroma_sample_position;
2459 seq_params->color_range = oxcf->color_range;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002460
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002461 assert(IMPLIES(seq_params->profile <= PROFILE_1,
2462 seq_params->bit_depth <= AOM_BITS_10));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002463
Andrey Norkin28e9ce22018-01-08 10:11:21 -08002464 cm->timing_info_present = oxcf->timing_info_present;
Andrey Norkin795ba872018-03-06 13:24:14 -08002465 cm->timing_info.num_units_in_display_tick =
2466 oxcf->timing_info.num_units_in_display_tick;
2467 cm->timing_info.time_scale = oxcf->timing_info.time_scale;
2468 cm->timing_info.equal_picture_interval =
2469 oxcf->timing_info.equal_picture_interval;
2470 cm->timing_info.num_ticks_per_picture =
2471 oxcf->timing_info.num_ticks_per_picture;
2472
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002473 seq_params->display_model_info_present_flag =
Andrey Norkin26495512018-06-20 17:13:11 -07002474 oxcf->display_model_info_present_flag;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002475 seq_params->decoder_model_info_present_flag =
Adrian Grangec56f6ec2018-05-31 14:19:32 -07002476 oxcf->decoder_model_info_present_flag;
Andrey Norkin795ba872018-03-06 13:24:14 -08002477 if (oxcf->decoder_model_info_present_flag) {
Andrey Norkin26495512018-06-20 17:13:11 -07002478 // set the decoder model parameters in schedule mode
Andrey Norkin795ba872018-03-06 13:24:14 -08002479 cm->buffer_model.num_units_in_decoding_tick =
2480 oxcf->buffer_model.num_units_in_decoding_tick;
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07002481 cm->buffer_removal_time_present = 1;
Andrey Norkin795ba872018-03-06 13:24:14 -08002482 set_aom_dec_model_info(&cm->buffer_model);
Andrey Norkin26495512018-06-20 17:13:11 -07002483 set_dec_model_op_parameters(&cm->op_params[0]);
2484 } else if (cm->timing_info_present &&
2485 cm->timing_info.equal_picture_interval &&
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002486 !seq_params->decoder_model_info_present_flag) {
Andrey Norkin26495512018-06-20 17:13:11 -07002487 // set the decoder model parameters in resource availability mode
2488 set_resource_availability_parameters(&cm->op_params[0]);
Andrey Norkinc7511de2018-06-22 12:31:06 -07002489 } else {
2490 cm->op_params[0].initial_display_delay =
2491 10; // Default value (not signaled)
Andrey Norkin795ba872018-03-06 13:24:14 -08002492 }
Andrey Norkin28e9ce22018-01-08 10:11:21 -08002493
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002494 update_film_grain_parameters(cpi, oxcf);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002495
Yaowu Xuc27fc142016-08-22 16:08:15 -07002496 cpi->oxcf = *oxcf;
Maxym Dmytrychenkocc6e0e12018-02-05 16:35:37 +01002497 cpi->common.options = oxcf->cfg;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002498 x->e_mbd.bd = (int)seq_params->bit_depth;
hui sud9a812b2017-07-06 14:34:37 -07002499 x->e_mbd.global_motion = cm->global_motion;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002500
Yaowu Xuf883b422016-08-30 14:01:10 -07002501 if ((oxcf->pass == 0) && (oxcf->rc_mode == AOM_Q)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002502 rc->baseline_gf_interval = FIXED_GF_INTERVAL;
2503 } else {
2504 rc->baseline_gf_interval = (MIN_GF_INTERVAL + MAX_GF_INTERVAL) / 2;
2505 }
2506
2507 cpi->refresh_last_frame = 1;
2508 cpi->refresh_golden_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002509 cpi->refresh_bwd_ref_frame = 0;
Zoe Liue9b15e22017-07-19 15:53:01 -07002510 cpi->refresh_alt2_ref_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002511
Debargha Mukherjee229fdc82018-03-10 07:45:33 -08002512 cm->refresh_frame_context = (oxcf->frame_parallel_decoding_mode)
2513 ? REFRESH_FRAME_CONTEXT_DISABLED
2514 : REFRESH_FRAME_CONTEXT_BACKWARD;
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01002515 if (oxcf->large_scale_tile)
James Zernf34dfc82018-02-23 16:53:33 -08002516 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_DISABLED;
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01002517
Alex Converse74ad0912017-07-18 10:22:58 -07002518 if (x->palette_buffer == NULL) {
hui sud9a812b2017-07-06 14:34:37 -07002519 CHECK_MEM_ERROR(cm, x->palette_buffer,
2520 aom_memalign(16, sizeof(*x->palette_buffer)));
2521 }
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002522
2523 if (x->tmp_conv_dst == NULL) {
2524 CHECK_MEM_ERROR(
2525 cm, x->tmp_conv_dst,
2526 aom_memalign(32, MAX_SB_SIZE * MAX_SB_SIZE * sizeof(*x->tmp_conv_dst)));
Urvang Joshie58f6ec2018-09-10 15:10:12 -07002527 x->e_mbd.tmp_conv_dst = x->tmp_conv_dst;
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002528 }
2529 for (int i = 0; i < 2; ++i) {
2530 if (x->tmp_obmc_bufs[i] == NULL) {
2531 CHECK_MEM_ERROR(cm, x->tmp_obmc_bufs[i],
wenyao.liu22d8ab32018-10-16 09:11:29 +08002532 aom_memalign(32, 2 * MAX_MB_PLANE * MAX_SB_SQUARE *
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002533 sizeof(*x->tmp_obmc_bufs[i])));
Urvang Joshie58f6ec2018-09-10 15:10:12 -07002534 x->e_mbd.tmp_obmc_bufs[i] = x->tmp_obmc_bufs[i];
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07002535 }
2536 }
2537
Yaowu Xuf883b422016-08-30 14:01:10 -07002538 av1_reset_segment_features(cm);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07002539 set_high_precision_mv(cpi, 1, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002540
Yaowu Xuc27fc142016-08-22 16:08:15 -07002541 set_rc_buffer_sizes(rc, &cpi->oxcf);
2542
2543 // Under a configuration change, where maximum_buffer_size may change,
2544 // keep buffer level clipped to the maximum allowed buffer size.
Yaowu Xuf883b422016-08-30 14:01:10 -07002545 rc->bits_off_target = AOMMIN(rc->bits_off_target, rc->maximum_buffer_size);
2546 rc->buffer_level = AOMMIN(rc->buffer_level, rc->maximum_buffer_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002547
2548 // Set up frame rate and related parameters rate control values.
Yaowu Xuf883b422016-08-30 14:01:10 -07002549 av1_new_framerate(cpi, cpi->framerate);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002550
2551 // Set absolute upper and lower quality limits
2552 rc->worst_quality = cpi->oxcf.worst_allowed_q;
2553 rc->best_quality = cpi->oxcf.best_allowed_q;
2554
Urvang Joshib55cb5e2018-09-12 14:50:21 -07002555 cm->interp_filter = oxcf->large_scale_tile ? EIGHTTAP_REGULAR : SWITCHABLE;
Yue Chen5380cb52018-02-23 15:33:21 -08002556 cm->switchable_motion_mode = 1;
2557
Yaowu Xuc27fc142016-08-22 16:08:15 -07002558 if (cpi->oxcf.render_width > 0 && cpi->oxcf.render_height > 0) {
2559 cm->render_width = cpi->oxcf.render_width;
2560 cm->render_height = cpi->oxcf.render_height;
2561 } else {
2562 cm->render_width = cpi->oxcf.width;
2563 cm->render_height = cpi->oxcf.height;
2564 }
2565 cm->width = cpi->oxcf.width;
2566 cm->height = cpi->oxcf.height;
2567
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002568 int sb_size = seq_params->sb_size;
Urvang Joshie4530f82018-01-09 11:43:37 -08002569 // Superblock size should not be updated after the first key frame.
2570 if (!cpi->seq_params_locked) {
2571 set_sb_size(&cm->seq_params, select_sb_size(cpi));
2572 }
Dominic Symes917d6c02017-10-11 18:00:52 +02002573
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002574 if (cpi->initial_width || sb_size != seq_params->sb_size) {
Dominic Symes917d6c02017-10-11 18:00:52 +02002575 if (cm->width > cpi->initial_width || cm->height > cpi->initial_height ||
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002576 seq_params->sb_size != sb_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002577 av1_free_context_buffers(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002578 av1_free_pc_tree(&cpi->td, num_planes);
Cheng Chen46f30c72017-09-07 11:13:33 -07002579 alloc_compressor_data(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002580 realloc_segmentation_maps(cpi);
2581 cpi->initial_width = cpi->initial_height = 0;
2582 }
2583 }
2584 update_frame_size(cpi);
2585
2586 cpi->alt_ref_source = NULL;
2587 rc->is_src_frame_alt_ref = 0;
2588
Yaowu Xuc27fc142016-08-22 16:08:15 -07002589 rc->is_bwd_ref_frame = 0;
2590 rc->is_last_bipred_frame = 0;
2591 rc->is_bipred_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002592
Yaowu Xuc27fc142016-08-22 16:08:15 -07002593 set_tile_info(cpi);
2594
2595 cpi->ext_refresh_frame_flags_pending = 0;
2596 cpi->ext_refresh_frame_context_pending = 0;
2597
Yaowu Xuc27fc142016-08-22 16:08:15 -07002598 highbd_set_var_fns(cpi);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002599
Debargha Mukherjeeedd77252018-03-25 12:01:38 -07002600 // Init sequence level coding tools
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07002601 // This should not be called after the first key frame.
2602 if (!cpi->seq_params_locked) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002603 seq_params->operating_points_cnt_minus_1 =
Adrian Grangec56f6ec2018-05-31 14:19:32 -07002604 cm->number_spatial_layers > 1 ? cm->number_spatial_layers - 1 : 0;
Andrey Norkin26495512018-06-20 17:13:11 -07002605 init_seq_coding_tools(&cm->seq_params, cm, oxcf);
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07002606 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002607}
2608
Yaowu Xuf883b422016-08-30 14:01:10 -07002609AV1_COMP *av1_create_compressor(AV1EncoderConfig *oxcf,
2610 BufferPool *const pool) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002611 unsigned int i;
Yaowu Xuf883b422016-08-30 14:01:10 -07002612 AV1_COMP *volatile const cpi = aom_memalign(32, sizeof(AV1_COMP));
2613 AV1_COMMON *volatile const cm = cpi != NULL ? &cpi->common : NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002614
2615 if (!cm) return NULL;
2616
Yaowu Xuf883b422016-08-30 14:01:10 -07002617 av1_zero(*cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002618
Wan-Teh Changa2fad3e2018-07-19 16:55:19 -07002619 // The jmp_buf is valid only for the duration of the function that calls
2620 // setjmp(). Therefore, this function must reset the 'setjmp' field to 0
2621 // before it returns.
Yaowu Xuc27fc142016-08-22 16:08:15 -07002622 if (setjmp(cm->error.jmp)) {
2623 cm->error.setjmp = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002624 av1_remove_compressor(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002625 return 0;
2626 }
2627
2628 cm->error.setjmp = 1;
Cheng Chen46f30c72017-09-07 11:13:33 -07002629 cm->alloc_mi = enc_alloc_mi;
2630 cm->free_mi = enc_free_mi;
2631 cm->setup_mi = enc_setup_mi;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002632
Angie Chianga5d96c42016-10-21 16:16:56 -07002633 CHECK_MEM_ERROR(cm, cm->fc,
2634 (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->fc)));
David Turner1bcefb32018-11-19 17:54:00 +00002635 CHECK_MEM_ERROR(
2636 cm, cm->default_frame_context,
2637 (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->default_frame_context)));
Angie Chianga5d96c42016-10-21 16:16:56 -07002638 memset(cm->fc, 0, sizeof(*cm->fc));
David Turner1bcefb32018-11-19 17:54:00 +00002639 memset(cm->default_frame_context, 0, sizeof(*cm->default_frame_context));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002640
2641 cpi->resize_state = 0;
2642 cpi->resize_avg_qp = 0;
2643 cpi->resize_buffer_underflow = 0;
Fergus Simpsonddc846e2017-04-24 18:09:13 -07002644
Yaowu Xuc27fc142016-08-22 16:08:15 -07002645 cpi->common.buffer_pool = pool;
2646
2647 init_config(cpi, oxcf);
Yaowu Xuf883b422016-08-30 14:01:10 -07002648 av1_rc_init(&cpi->oxcf, oxcf->pass, &cpi->rc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002649
David Turnerd2a592e2018-11-16 14:59:31 +00002650 cm->current_frame.frame_number = 0;
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07002651 cpi->seq_params_locked = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002652 cpi->partition_search_skippable_frame = 0;
2653 cpi->tile_data = NULL;
David Turnere7ebf902018-12-04 14:04:55 +00002654 cpi->last_show_frame_buf = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002655 realloc_segmentation_maps(cpi);
2656
Jingning Hanf050fc12018-03-09 14:53:33 -08002657 memset(cpi->nmv_costs, 0, sizeof(cpi->nmv_costs));
2658 memset(cpi->nmv_costs_hp, 0, sizeof(cpi->nmv_costs_hp));
James Zern01a9d702017-08-25 19:09:33 +00002659
Yaowu Xuc27fc142016-08-22 16:08:15 -07002660 for (i = 0; i < (sizeof(cpi->mbgraph_stats) / sizeof(cpi->mbgraph_stats[0]));
2661 i++) {
2662 CHECK_MEM_ERROR(
2663 cm, cpi->mbgraph_stats[i].mb_stats,
Yaowu Xuf883b422016-08-30 14:01:10 -07002664 aom_calloc(cm->MBs * sizeof(*cpi->mbgraph_stats[i].mb_stats), 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002665 }
2666
2667#if CONFIG_FP_MB_STATS
2668 cpi->use_fp_mb_stats = 0;
2669 if (cpi->use_fp_mb_stats) {
2670 // a place holder used to store the first pass mb stats in the first pass
2671 CHECK_MEM_ERROR(cm, cpi->twopass.frame_mb_stats_buf,
Yaowu Xuf883b422016-08-30 14:01:10 -07002672 aom_calloc(cm->MBs * sizeof(uint8_t), 1));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002673 } else {
2674 cpi->twopass.frame_mb_stats_buf = NULL;
2675 }
2676#endif
2677
2678 cpi->refresh_alt_ref_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002679
2680 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
2681#if CONFIG_INTERNAL_STATS
2682 cpi->b_calculate_blockiness = 1;
2683 cpi->b_calculate_consistency = 1;
2684 cpi->total_inconsistency = 0;
2685 cpi->psnr.worst = 100.0;
2686 cpi->worst_ssim = 100.0;
2687
2688 cpi->count = 0;
2689 cpi->bytes = 0;
Debargha Mukherjee0857e662019-01-04 16:22:09 -08002690#if CONFIG_SPEED_STATS
2691 cpi->tx_search_count = 0;
2692#endif // CONFIG_SPEED_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002693
2694 if (cpi->b_calculate_psnr) {
2695 cpi->total_sq_error = 0;
2696 cpi->total_samples = 0;
2697 cpi->tot_recode_hits = 0;
2698 cpi->summed_quality = 0;
2699 cpi->summed_weights = 0;
2700 }
2701
2702 cpi->fastssim.worst = 100.0;
2703 cpi->psnrhvs.worst = 100.0;
2704
2705 if (cpi->b_calculate_blockiness) {
2706 cpi->total_blockiness = 0;
2707 cpi->worst_blockiness = 0.0;
2708 }
2709
2710 if (cpi->b_calculate_consistency) {
2711 CHECK_MEM_ERROR(cm, cpi->ssim_vars,
Yaowu Xuf883b422016-08-30 14:01:10 -07002712 aom_malloc(sizeof(*cpi->ssim_vars) * 4 *
Yaowu Xuc27fc142016-08-22 16:08:15 -07002713 cpi->common.mi_rows * cpi->common.mi_cols));
2714 cpi->worst_consistency = 100.0;
2715 }
2716#endif
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08002717#if CONFIG_ENTROPY_STATS
2718 av1_zero(aggregate_fc);
2719#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002720
2721 cpi->first_time_stamp_ever = INT64_MAX;
2722
Jingning Hanf050fc12018-03-09 14:53:33 -08002723 cpi->td.mb.nmvcost[0] = &cpi->nmv_costs[0][MV_MAX];
2724 cpi->td.mb.nmvcost[1] = &cpi->nmv_costs[1][MV_MAX];
2725 cpi->td.mb.nmvcost_hp[0] = &cpi->nmv_costs_hp[0][MV_MAX];
2726 cpi->td.mb.nmvcost_hp[1] = &cpi->nmv_costs_hp[1][MV_MAX];
James Zern01a9d702017-08-25 19:09:33 +00002727
Yaowu Xuc27fc142016-08-22 16:08:15 -07002728#ifdef OUTPUT_YUV_SKINMAP
2729 yuv_skinmap_file = fopen("skinmap.yuv", "ab");
2730#endif
2731#ifdef OUTPUT_YUV_REC
2732 yuv_rec_file = fopen("rec.yuv", "wb");
2733#endif
2734
Yaowu Xuc27fc142016-08-22 16:08:15 -07002735 if (oxcf->pass == 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002736 av1_init_first_pass(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002737 } else if (oxcf->pass == 2) {
2738 const size_t packet_sz = sizeof(FIRSTPASS_STATS);
2739 const int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
2740
2741#if CONFIG_FP_MB_STATS
2742 if (cpi->use_fp_mb_stats) {
2743 const size_t psz = cpi->common.MBs * sizeof(uint8_t);
2744 const int ps = (int)(oxcf->firstpass_mb_stats_in.sz / psz);
2745
2746 cpi->twopass.firstpass_mb_stats.mb_stats_start =
2747 oxcf->firstpass_mb_stats_in.buf;
2748 cpi->twopass.firstpass_mb_stats.mb_stats_end =
2749 cpi->twopass.firstpass_mb_stats.mb_stats_start +
2750 (ps - 1) * cpi->common.MBs * sizeof(uint8_t);
2751 }
2752#endif
2753
2754 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
2755 cpi->twopass.stats_in = cpi->twopass.stats_in_start;
2756 cpi->twopass.stats_in_end = &cpi->twopass.stats_in[packets - 1];
2757
Yaowu Xuf883b422016-08-30 14:01:10 -07002758 av1_init_second_pass(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002759 }
2760
Jingning Hand064cf02017-06-01 10:00:39 -07002761 CHECK_MEM_ERROR(
2762 cm, cpi->td.mb.above_pred_buf,
Yue Chen1a799252018-03-01 16:47:41 -08002763 (uint8_t *)aom_memalign(16, MAX_MB_PLANE * MAX_SB_SQUARE *
Johannb0ef6ff2018-02-08 14:32:21 -08002764 sizeof(*cpi->td.mb.above_pred_buf)));
Jingning Hand064cf02017-06-01 10:00:39 -07002765 CHECK_MEM_ERROR(
2766 cm, cpi->td.mb.left_pred_buf,
Yue Chen1a799252018-03-01 16:47:41 -08002767 (uint8_t *)aom_memalign(16, MAX_MB_PLANE * MAX_SB_SQUARE *
Johannb0ef6ff2018-02-08 14:32:21 -08002768 sizeof(*cpi->td.mb.left_pred_buf)));
Jingning Hand064cf02017-06-01 10:00:39 -07002769
2770 CHECK_MEM_ERROR(cm, cpi->td.mb.wsrc_buf,
2771 (int32_t *)aom_memalign(
2772 16, MAX_SB_SQUARE * sizeof(*cpi->td.mb.wsrc_buf)));
2773
Ravi Chaudhary5d970f42018-09-25 11:25:32 +05302774#if CONFIG_COLLECT_INTER_MODE_RD_STATS
2775 CHECK_MEM_ERROR(
2776 cm, cpi->td.mb.inter_modes_info,
2777 (InterModesInfo *)aom_malloc(sizeof(*cpi->td.mb.inter_modes_info)));
2778#endif
2779
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05302780 for (int x = 0; x < 2; x++)
2781 for (int y = 0; y < 2; y++)
2782 CHECK_MEM_ERROR(
2783 cm, cpi->td.mb.hash_value_buffer[x][y],
2784 (uint32_t *)aom_malloc(AOM_BUFFER_SIZE_FOR_BLOCK_HASH *
2785 sizeof(*cpi->td.mb.hash_value_buffer[0][0])));
2786
2787 cpi->td.mb.g_crc_initialized = 0;
2788
Jingning Hand064cf02017-06-01 10:00:39 -07002789 CHECK_MEM_ERROR(cm, cpi->td.mb.mask_buf,
2790 (int32_t *)aom_memalign(
2791 16, MAX_SB_SQUARE * sizeof(*cpi->td.mb.mask_buf)));
2792
Yaowu Xuf883b422016-08-30 14:01:10 -07002793 av1_set_speed_features_framesize_independent(cpi);
2794 av1_set_speed_features_framesize_dependent(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002795
Yue Chen7cae98f2018-08-24 10:43:16 -07002796 for (int frame = 0; frame < MAX_LAG_BUFFERS; ++frame) {
2797 int mi_cols = ALIGN_POWER_OF_TWO(cm->mi_cols, MAX_MIB_SIZE_LOG2);
2798 int mi_rows = ALIGN_POWER_OF_TWO(cm->mi_rows, MAX_MIB_SIZE_LOG2);
2799
2800 CHECK_MEM_ERROR(cm, cpi->tpl_stats[frame].tpl_stats_ptr,
2801 aom_calloc(mi_rows * mi_cols,
2802 sizeof(*cpi->tpl_stats[frame].tpl_stats_ptr)));
2803 cpi->tpl_stats[frame].is_valid = 0;
2804 cpi->tpl_stats[frame].width = mi_cols;
2805 cpi->tpl_stats[frame].height = mi_rows;
2806 cpi->tpl_stats[frame].stride = mi_cols;
2807 cpi->tpl_stats[frame].mi_rows = cm->mi_rows;
2808 cpi->tpl_stats[frame].mi_cols = cm->mi_cols;
2809 }
2810
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002811#define BFP(BT, SDF, SDAF, VF, SVF, SVAF, SDX4DF, JSDAF, JSVAF) \
2812 cpi->fn_ptr[BT].sdf = SDF; \
2813 cpi->fn_ptr[BT].sdaf = SDAF; \
2814 cpi->fn_ptr[BT].vf = VF; \
2815 cpi->fn_ptr[BT].svf = SVF; \
2816 cpi->fn_ptr[BT].svaf = SVAF; \
2817 cpi->fn_ptr[BT].sdx4df = SDX4DF; \
2818 cpi->fn_ptr[BT].jsdaf = JSDAF; \
Cheng Chenf78632e2017-10-20 15:30:51 -07002819 cpi->fn_ptr[BT].jsvaf = JSVAF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002820
Cheng Chenf78632e2017-10-20 15:30:51 -07002821 BFP(BLOCK_4X16, aom_sad4x16, aom_sad4x16_avg, aom_variance4x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002822 aom_sub_pixel_variance4x16, aom_sub_pixel_avg_variance4x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002823 aom_sad4x16x4d, aom_dist_wtd_sad4x16_avg,
2824 aom_dist_wtd_sub_pixel_avg_variance4x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002825
2826 BFP(BLOCK_16X4, aom_sad16x4, aom_sad16x4_avg, aom_variance16x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002827 aom_sub_pixel_variance16x4, aom_sub_pixel_avg_variance16x4,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002828 aom_sad16x4x4d, aom_dist_wtd_sad16x4_avg,
2829 aom_dist_wtd_sub_pixel_avg_variance16x4)
Cheng Chenf78632e2017-10-20 15:30:51 -07002830
2831 BFP(BLOCK_8X32, aom_sad8x32, aom_sad8x32_avg, aom_variance8x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002832 aom_sub_pixel_variance8x32, aom_sub_pixel_avg_variance8x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002833 aom_sad8x32x4d, aom_dist_wtd_sad8x32_avg,
2834 aom_dist_wtd_sub_pixel_avg_variance8x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002835
2836 BFP(BLOCK_32X8, aom_sad32x8, aom_sad32x8_avg, aom_variance32x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002837 aom_sub_pixel_variance32x8, aom_sub_pixel_avg_variance32x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002838 aom_sad32x8x4d, aom_dist_wtd_sad32x8_avg,
2839 aom_dist_wtd_sub_pixel_avg_variance32x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002840
2841 BFP(BLOCK_16X64, aom_sad16x64, aom_sad16x64_avg, aom_variance16x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002842 aom_sub_pixel_variance16x64, aom_sub_pixel_avg_variance16x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002843 aom_sad16x64x4d, aom_dist_wtd_sad16x64_avg,
2844 aom_dist_wtd_sub_pixel_avg_variance16x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002845
2846 BFP(BLOCK_64X16, aom_sad64x16, aom_sad64x16_avg, aom_variance64x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002847 aom_sub_pixel_variance64x16, aom_sub_pixel_avg_variance64x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002848 aom_sad64x16x4d, aom_dist_wtd_sad64x16_avg,
2849 aom_dist_wtd_sub_pixel_avg_variance64x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002850
Cheng Chenf78632e2017-10-20 15:30:51 -07002851 BFP(BLOCK_128X128, aom_sad128x128, aom_sad128x128_avg, aom_variance128x128,
2852 aom_sub_pixel_variance128x128, aom_sub_pixel_avg_variance128x128,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002853 aom_sad128x128x4d, aom_dist_wtd_sad128x128_avg,
2854 aom_dist_wtd_sub_pixel_avg_variance128x128)
Cheng Chenf78632e2017-10-20 15:30:51 -07002855
2856 BFP(BLOCK_128X64, aom_sad128x64, aom_sad128x64_avg, aom_variance128x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002857 aom_sub_pixel_variance128x64, aom_sub_pixel_avg_variance128x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002858 aom_sad128x64x4d, aom_dist_wtd_sad128x64_avg,
2859 aom_dist_wtd_sub_pixel_avg_variance128x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002860
2861 BFP(BLOCK_64X128, aom_sad64x128, aom_sad64x128_avg, aom_variance64x128,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002862 aom_sub_pixel_variance64x128, aom_sub_pixel_avg_variance64x128,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002863 aom_sad64x128x4d, aom_dist_wtd_sad64x128_avg,
2864 aom_dist_wtd_sub_pixel_avg_variance64x128)
Cheng Chenf78632e2017-10-20 15:30:51 -07002865
2866 BFP(BLOCK_32X16, aom_sad32x16, aom_sad32x16_avg, aom_variance32x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002867 aom_sub_pixel_variance32x16, aom_sub_pixel_avg_variance32x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002868 aom_sad32x16x4d, aom_dist_wtd_sad32x16_avg,
2869 aom_dist_wtd_sub_pixel_avg_variance32x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002870
2871 BFP(BLOCK_16X32, aom_sad16x32, aom_sad16x32_avg, aom_variance16x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002872 aom_sub_pixel_variance16x32, aom_sub_pixel_avg_variance16x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002873 aom_sad16x32x4d, aom_dist_wtd_sad16x32_avg,
2874 aom_dist_wtd_sub_pixel_avg_variance16x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002875
2876 BFP(BLOCK_64X32, aom_sad64x32, aom_sad64x32_avg, aom_variance64x32,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002877 aom_sub_pixel_variance64x32, aom_sub_pixel_avg_variance64x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002878 aom_sad64x32x4d, aom_dist_wtd_sad64x32_avg,
2879 aom_dist_wtd_sub_pixel_avg_variance64x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002880
2881 BFP(BLOCK_32X64, aom_sad32x64, aom_sad32x64_avg, aom_variance32x64,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002882 aom_sub_pixel_variance32x64, aom_sub_pixel_avg_variance32x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002883 aom_sad32x64x4d, aom_dist_wtd_sad32x64_avg,
2884 aom_dist_wtd_sub_pixel_avg_variance32x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002885
2886 BFP(BLOCK_32X32, aom_sad32x32, aom_sad32x32_avg, aom_variance32x32,
2887 aom_sub_pixel_variance32x32, aom_sub_pixel_avg_variance32x32,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002888 aom_sad32x32x4d, aom_dist_wtd_sad32x32_avg,
2889 aom_dist_wtd_sub_pixel_avg_variance32x32)
Cheng Chenf78632e2017-10-20 15:30:51 -07002890
2891 BFP(BLOCK_64X64, aom_sad64x64, aom_sad64x64_avg, aom_variance64x64,
2892 aom_sub_pixel_variance64x64, aom_sub_pixel_avg_variance64x64,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002893 aom_sad64x64x4d, aom_dist_wtd_sad64x64_avg,
2894 aom_dist_wtd_sub_pixel_avg_variance64x64)
Cheng Chenf78632e2017-10-20 15:30:51 -07002895
2896 BFP(BLOCK_16X16, aom_sad16x16, aom_sad16x16_avg, aom_variance16x16,
2897 aom_sub_pixel_variance16x16, aom_sub_pixel_avg_variance16x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002898 aom_sad16x16x4d, aom_dist_wtd_sad16x16_avg,
2899 aom_dist_wtd_sub_pixel_avg_variance16x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002900
2901 BFP(BLOCK_16X8, aom_sad16x8, aom_sad16x8_avg, aom_variance16x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002902 aom_sub_pixel_variance16x8, aom_sub_pixel_avg_variance16x8,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002903 aom_sad16x8x4d, aom_dist_wtd_sad16x8_avg,
2904 aom_dist_wtd_sub_pixel_avg_variance16x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002905
2906 BFP(BLOCK_8X16, aom_sad8x16, aom_sad8x16_avg, aom_variance8x16,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002907 aom_sub_pixel_variance8x16, aom_sub_pixel_avg_variance8x16,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002908 aom_sad8x16x4d, aom_dist_wtd_sad8x16_avg,
2909 aom_dist_wtd_sub_pixel_avg_variance8x16)
Cheng Chenf78632e2017-10-20 15:30:51 -07002910
2911 BFP(BLOCK_8X8, aom_sad8x8, aom_sad8x8_avg, aom_variance8x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002912 aom_sub_pixel_variance8x8, aom_sub_pixel_avg_variance8x8, aom_sad8x8x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002913 aom_dist_wtd_sad8x8_avg, aom_dist_wtd_sub_pixel_avg_variance8x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002914
2915 BFP(BLOCK_8X4, aom_sad8x4, aom_sad8x4_avg, aom_variance8x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002916 aom_sub_pixel_variance8x4, aom_sub_pixel_avg_variance8x4, aom_sad8x4x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002917 aom_dist_wtd_sad8x4_avg, aom_dist_wtd_sub_pixel_avg_variance8x4)
Cheng Chenf78632e2017-10-20 15:30:51 -07002918
2919 BFP(BLOCK_4X8, aom_sad4x8, aom_sad4x8_avg, aom_variance4x8,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002920 aom_sub_pixel_variance4x8, aom_sub_pixel_avg_variance4x8, aom_sad4x8x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002921 aom_dist_wtd_sad4x8_avg, aom_dist_wtd_sub_pixel_avg_variance4x8)
Cheng Chenf78632e2017-10-20 15:30:51 -07002922
2923 BFP(BLOCK_4X4, aom_sad4x4, aom_sad4x4_avg, aom_variance4x4,
Kyle Siefringef6e2df2018-04-10 14:51:35 -04002924 aom_sub_pixel_variance4x4, aom_sub_pixel_avg_variance4x4, aom_sad4x4x4d,
Debargha Mukherjee0c96c112018-12-20 16:04:18 -08002925 aom_dist_wtd_sad4x4_avg, aom_dist_wtd_sub_pixel_avg_variance4x4)
Cheng Chenf78632e2017-10-20 15:30:51 -07002926
Yaowu Xuc27fc142016-08-22 16:08:15 -07002927#define OBFP(BT, OSDF, OVF, OSVF) \
2928 cpi->fn_ptr[BT].osdf = OSDF; \
2929 cpi->fn_ptr[BT].ovf = OVF; \
2930 cpi->fn_ptr[BT].osvf = OSVF;
2931
Yaowu Xuf883b422016-08-30 14:01:10 -07002932 OBFP(BLOCK_128X128, aom_obmc_sad128x128, aom_obmc_variance128x128,
2933 aom_obmc_sub_pixel_variance128x128)
2934 OBFP(BLOCK_128X64, aom_obmc_sad128x64, aom_obmc_variance128x64,
2935 aom_obmc_sub_pixel_variance128x64)
2936 OBFP(BLOCK_64X128, aom_obmc_sad64x128, aom_obmc_variance64x128,
2937 aom_obmc_sub_pixel_variance64x128)
Yaowu Xuf883b422016-08-30 14:01:10 -07002938 OBFP(BLOCK_64X64, aom_obmc_sad64x64, aom_obmc_variance64x64,
2939 aom_obmc_sub_pixel_variance64x64)
2940 OBFP(BLOCK_64X32, aom_obmc_sad64x32, aom_obmc_variance64x32,
2941 aom_obmc_sub_pixel_variance64x32)
2942 OBFP(BLOCK_32X64, aom_obmc_sad32x64, aom_obmc_variance32x64,
2943 aom_obmc_sub_pixel_variance32x64)
2944 OBFP(BLOCK_32X32, aom_obmc_sad32x32, aom_obmc_variance32x32,
2945 aom_obmc_sub_pixel_variance32x32)
2946 OBFP(BLOCK_32X16, aom_obmc_sad32x16, aom_obmc_variance32x16,
2947 aom_obmc_sub_pixel_variance32x16)
2948 OBFP(BLOCK_16X32, aom_obmc_sad16x32, aom_obmc_variance16x32,
2949 aom_obmc_sub_pixel_variance16x32)
2950 OBFP(BLOCK_16X16, aom_obmc_sad16x16, aom_obmc_variance16x16,
2951 aom_obmc_sub_pixel_variance16x16)
2952 OBFP(BLOCK_16X8, aom_obmc_sad16x8, aom_obmc_variance16x8,
2953 aom_obmc_sub_pixel_variance16x8)
2954 OBFP(BLOCK_8X16, aom_obmc_sad8x16, aom_obmc_variance8x16,
2955 aom_obmc_sub_pixel_variance8x16)
2956 OBFP(BLOCK_8X8, aom_obmc_sad8x8, aom_obmc_variance8x8,
2957 aom_obmc_sub_pixel_variance8x8)
2958 OBFP(BLOCK_4X8, aom_obmc_sad4x8, aom_obmc_variance4x8,
2959 aom_obmc_sub_pixel_variance4x8)
2960 OBFP(BLOCK_8X4, aom_obmc_sad8x4, aom_obmc_variance8x4,
2961 aom_obmc_sub_pixel_variance8x4)
2962 OBFP(BLOCK_4X4, aom_obmc_sad4x4, aom_obmc_variance4x4,
2963 aom_obmc_sub_pixel_variance4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002964 OBFP(BLOCK_4X16, aom_obmc_sad4x16, aom_obmc_variance4x16,
2965 aom_obmc_sub_pixel_variance4x16)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002966 OBFP(BLOCK_16X4, aom_obmc_sad16x4, aom_obmc_variance16x4,
2967 aom_obmc_sub_pixel_variance16x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002968 OBFP(BLOCK_8X32, aom_obmc_sad8x32, aom_obmc_variance8x32,
2969 aom_obmc_sub_pixel_variance8x32)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002970 OBFP(BLOCK_32X8, aom_obmc_sad32x8, aom_obmc_variance32x8,
2971 aom_obmc_sub_pixel_variance32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002972 OBFP(BLOCK_16X64, aom_obmc_sad16x64, aom_obmc_variance16x64,
2973 aom_obmc_sub_pixel_variance16x64)
Rupert Swarbrick72678572017-08-02 12:05:26 +01002974 OBFP(BLOCK_64X16, aom_obmc_sad64x16, aom_obmc_variance64x16,
2975 aom_obmc_sub_pixel_variance64x16)
Yaowu Xuc27fc142016-08-22 16:08:15 -07002976
David Barkerf19f35f2017-05-22 16:33:22 +01002977#define MBFP(BT, MCSDF, MCSVF) \
2978 cpi->fn_ptr[BT].msdf = MCSDF; \
2979 cpi->fn_ptr[BT].msvf = MCSVF;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002980
David Barkerf19f35f2017-05-22 16:33:22 +01002981 MBFP(BLOCK_128X128, aom_masked_sad128x128,
2982 aom_masked_sub_pixel_variance128x128)
2983 MBFP(BLOCK_128X64, aom_masked_sad128x64, aom_masked_sub_pixel_variance128x64)
2984 MBFP(BLOCK_64X128, aom_masked_sad64x128, aom_masked_sub_pixel_variance64x128)
David Barkerf19f35f2017-05-22 16:33:22 +01002985 MBFP(BLOCK_64X64, aom_masked_sad64x64, aom_masked_sub_pixel_variance64x64)
2986 MBFP(BLOCK_64X32, aom_masked_sad64x32, aom_masked_sub_pixel_variance64x32)
2987 MBFP(BLOCK_32X64, aom_masked_sad32x64, aom_masked_sub_pixel_variance32x64)
2988 MBFP(BLOCK_32X32, aom_masked_sad32x32, aom_masked_sub_pixel_variance32x32)
2989 MBFP(BLOCK_32X16, aom_masked_sad32x16, aom_masked_sub_pixel_variance32x16)
2990 MBFP(BLOCK_16X32, aom_masked_sad16x32, aom_masked_sub_pixel_variance16x32)
2991 MBFP(BLOCK_16X16, aom_masked_sad16x16, aom_masked_sub_pixel_variance16x16)
2992 MBFP(BLOCK_16X8, aom_masked_sad16x8, aom_masked_sub_pixel_variance16x8)
2993 MBFP(BLOCK_8X16, aom_masked_sad8x16, aom_masked_sub_pixel_variance8x16)
2994 MBFP(BLOCK_8X8, aom_masked_sad8x8, aom_masked_sub_pixel_variance8x8)
2995 MBFP(BLOCK_4X8, aom_masked_sad4x8, aom_masked_sub_pixel_variance4x8)
2996 MBFP(BLOCK_8X4, aom_masked_sad8x4, aom_masked_sub_pixel_variance8x4)
2997 MBFP(BLOCK_4X4, aom_masked_sad4x4, aom_masked_sub_pixel_variance4x4)
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002998
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002999 MBFP(BLOCK_4X16, aom_masked_sad4x16, aom_masked_sub_pixel_variance4x16)
3000
3001 MBFP(BLOCK_16X4, aom_masked_sad16x4, aom_masked_sub_pixel_variance16x4)
3002
3003 MBFP(BLOCK_8X32, aom_masked_sad8x32, aom_masked_sub_pixel_variance8x32)
3004
3005 MBFP(BLOCK_32X8, aom_masked_sad32x8, aom_masked_sub_pixel_variance32x8)
Rupert Swarbrick72678572017-08-02 12:05:26 +01003006
3007 MBFP(BLOCK_16X64, aom_masked_sad16x64, aom_masked_sub_pixel_variance16x64)
3008
3009 MBFP(BLOCK_64X16, aom_masked_sad64x16, aom_masked_sub_pixel_variance64x16)
Rupert Swarbrick2fa6e1c2017-09-11 12:38:10 +01003010
Yaowu Xuc27fc142016-08-22 16:08:15 -07003011 highbd_set_var_fns(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003012
Yaowu Xuf883b422016-08-30 14:01:10 -07003013 /* av1_init_quantizer() is first called here. Add check in
3014 * av1_frame_init_quantizer() so that av1_init_quantizer is only
Yaowu Xuc27fc142016-08-22 16:08:15 -07003015 * called later when needed. This will avoid unnecessary calls of
Yaowu Xuf883b422016-08-30 14:01:10 -07003016 * av1_init_quantizer() for every frame.
Yaowu Xuc27fc142016-08-22 16:08:15 -07003017 */
Yaowu Xuf883b422016-08-30 14:01:10 -07003018 av1_init_quantizer(cpi);
Zoe Liud902b742018-02-19 17:02:41 -08003019 av1_qm_init(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003020
Yaowu Xuf883b422016-08-30 14:01:10 -07003021 av1_loop_filter_init(cm);
Urvang Joshide71d142017-10-05 12:12:15 -07003022 cm->superres_scale_denominator = SCALE_NUMERATOR;
Debargha Mukherjee29e40a62017-06-14 09:37:12 -07003023 cm->superres_upscaled_width = oxcf->width;
3024 cm->superres_upscaled_height = oxcf->height;
Yaowu Xuf883b422016-08-30 14:01:10 -07003025 av1_loop_restoration_precal();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003026
3027 cm->error.setjmp = 0;
3028
3029 return cpi;
3030}
3031
Urvang Joshiee2c8112018-05-04 14:53:15 -07003032#if CONFIG_INTERNAL_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003033#define SNPRINT(H, T) snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T))
3034
3035#define SNPRINT2(H, T, V) \
3036 snprintf((H) + strlen(H), sizeof(H) - strlen(H), (T), (V))
Urvang Joshiee2c8112018-05-04 14:53:15 -07003037#endif // CONFIG_INTERNAL_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003038
Yaowu Xuf883b422016-08-30 14:01:10 -07003039void av1_remove_compressor(AV1_COMP *cpi) {
3040 AV1_COMMON *cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003041 unsigned int i;
3042 int t;
3043
3044 if (!cpi) return;
3045
3046 cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003047 const int num_planes = av1_num_planes(cm);
3048
David Turnerd2a592e2018-11-16 14:59:31 +00003049 if (cm->current_frame.frame_number > 0) {
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08003050#if CONFIG_ENTROPY_STATS
3051 if (cpi->oxcf.pass != 1) {
3052 fprintf(stderr, "Writing counts.stt\n");
3053 FILE *f = fopen("counts.stt", "wb");
3054 fwrite(&aggregate_fc, sizeof(aggregate_fc), 1, f);
3055 fclose(f);
3056 }
3057#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003058#if CONFIG_INTERNAL_STATS
Yaowu Xuf883b422016-08-30 14:01:10 -07003059 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003060
3061 if (cpi->oxcf.pass != 1) {
3062 char headings[512] = { 0 };
3063 char results[512] = { 0 };
3064 FILE *f = fopen("opsnr.stt", "a");
3065 double time_encoded =
3066 (cpi->last_end_time_stamp_seen - cpi->first_time_stamp_ever) /
3067 10000000.000;
3068 double total_encode_time =
3069 (cpi->time_receive_data + cpi->time_compress_data) / 1000.000;
3070 const double dr =
3071 (double)cpi->bytes * (double)8 / (double)1000 / time_encoded;
3072 const double peak = (double)((1 << cpi->oxcf.input_bit_depth) - 1);
3073 const double target_rate = (double)cpi->oxcf.target_bandwidth / 1000;
3074 const double rate_err = ((100.0 * (dr - target_rate)) / target_rate);
3075
3076 if (cpi->b_calculate_psnr) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003077 const double total_psnr = aom_sse_to_psnr(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003078 (double)cpi->total_samples, peak, (double)cpi->total_sq_error);
3079 const double total_ssim =
3080 100 * pow(cpi->summed_quality / cpi->summed_weights, 8.0);
3081 snprintf(headings, sizeof(headings),
Jingning Han87651b22017-11-28 20:02:26 -08003082 "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\tGLPsnrP\t"
Yaowu Xuf883b422016-08-30 14:01:10 -07003083 "AOMSSIM\tVPSSIMP\tFASTSIM\tPSNRHVS\t"
Jingning Hanbe1ae3f2017-11-27 10:27:56 -08003084 "WstPsnr\tWstSsim\tWstFast\tWstHVS\t"
Jingning Han87651b22017-11-28 20:02:26 -08003085 "AVPsrnY\tAPsnrCb\tAPsnrCr");
Yaowu Xuc27fc142016-08-22 16:08:15 -07003086 snprintf(results, sizeof(results),
3087 "%7.2f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
3088 "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
Jingning Hanbe1ae3f2017-11-27 10:27:56 -08003089 "%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
Jingning Han87651b22017-11-28 20:02:26 -08003090 "%7.3f\t%7.3f\t%7.3f",
Wan-Teh Changc25c92a2018-04-23 15:04:14 -07003091 dr, cpi->psnr.stat[STAT_ALL] / cpi->count, total_psnr,
3092 cpi->psnr.stat[STAT_ALL] / cpi->count, total_psnr, total_ssim,
3093 total_ssim, cpi->fastssim.stat[STAT_ALL] / cpi->count,
3094 cpi->psnrhvs.stat[STAT_ALL] / cpi->count, cpi->psnr.worst,
Jingning Hanbe1ae3f2017-11-27 10:27:56 -08003095 cpi->worst_ssim, cpi->fastssim.worst, cpi->psnrhvs.worst,
Wan-Teh Changc25c92a2018-04-23 15:04:14 -07003096 cpi->psnr.stat[STAT_Y] / cpi->count,
3097 cpi->psnr.stat[STAT_U] / cpi->count,
3098 cpi->psnr.stat[STAT_V] / cpi->count);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003099
3100 if (cpi->b_calculate_blockiness) {
3101 SNPRINT(headings, "\t Block\tWstBlck");
3102 SNPRINT2(results, "\t%7.3f", cpi->total_blockiness / cpi->count);
3103 SNPRINT2(results, "\t%7.3f", cpi->worst_blockiness);
3104 }
3105
3106 if (cpi->b_calculate_consistency) {
3107 double consistency =
Yaowu Xuf883b422016-08-30 14:01:10 -07003108 aom_sse_to_psnr((double)cpi->total_samples, peak,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003109 (double)cpi->total_inconsistency);
3110
3111 SNPRINT(headings, "\tConsist\tWstCons");
3112 SNPRINT2(results, "\t%7.3f", consistency);
3113 SNPRINT2(results, "\t%7.3f", cpi->worst_consistency);
3114 }
Sarah Parkerf97b7862016-08-25 17:42:57 -07003115 fprintf(f, "%s\t Time\tRcErr\tAbsErr\n", headings);
3116 fprintf(f, "%s\t%8.0f\t%7.2f\t%7.2f\n", results, total_encode_time,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003117 rate_err, fabs(rate_err));
3118 }
3119
3120 fclose(f);
3121 }
Urvang Joshiee2c8112018-05-04 14:53:15 -07003122#endif // CONFIG_INTERNAL_STATS
Debargha Mukherjee0857e662019-01-04 16:22:09 -08003123#if CONFIG_SPEED_STATS
3124 if (cpi->oxcf.pass != 1) {
3125 fprintf(stdout, "tx_search_count = %d\n", cpi->tx_search_count);
3126 }
3127#endif // CONFIG_SPEED_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003128 }
3129
Yue Chen7cae98f2018-08-24 10:43:16 -07003130 for (int frame = 0; frame < MAX_LAG_BUFFERS; ++frame) {
3131 aom_free(cpi->tpl_stats[frame].tpl_stats_ptr);
3132 cpi->tpl_stats[frame].is_valid = 0;
3133 }
3134
Ravi Chaudhary1f58dd82018-12-07 17:24:15 +05303135 for (t = cpi->num_workers - 1; t >= 0; --t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003136 AVxWorker *const worker = &cpi->workers[t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003137 EncWorkerData *const thread_data = &cpi->tile_thr_data[t];
3138
3139 // Deallocate allocated threads.
Yaowu Xuf883b422016-08-30 14:01:10 -07003140 aom_get_worker_interface()->end(worker);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003141
3142 // Deallocate allocated thread data.
Ravi Chaudhary1cf7d162018-10-09 17:00:43 +05303143 if (cpi->row_mt == 1) aom_free(thread_data->td->tctx);
Ravi Chaudhary1f58dd82018-12-07 17:24:15 +05303144 if (t > 0) {
hui sud9a812b2017-07-06 14:34:37 -07003145 aom_free(thread_data->td->palette_buffer);
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07003146 aom_free(thread_data->td->tmp_conv_dst);
3147 for (int j = 0; j < 2; ++j) {
3148 aom_free(thread_data->td->tmp_obmc_bufs[j]);
3149 }
Jingning Hand064cf02017-06-01 10:00:39 -07003150 aom_free(thread_data->td->above_pred_buf);
3151 aom_free(thread_data->td->left_pred_buf);
3152 aom_free(thread_data->td->wsrc_buf);
wenyao.liu22d8ab32018-10-16 09:11:29 +08003153
Ravi Chaudhary5d970f42018-09-25 11:25:32 +05303154#if CONFIG_COLLECT_INTER_MODE_RD_STATS
3155 aom_free(thread_data->td->inter_modes_info);
3156#endif
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07003157 for (int x = 0; x < 2; x++) {
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05303158 for (int y = 0; y < 2; y++) {
3159 aom_free(thread_data->td->hash_value_buffer[x][y]);
3160 thread_data->td->hash_value_buffer[x][y] = NULL;
3161 }
Urvang Joshi0a4cfad2018-09-07 11:10:39 -07003162 }
Jingning Hand064cf02017-06-01 10:00:39 -07003163 aom_free(thread_data->td->mask_buf);
Yaowu Xuf883b422016-08-30 14:01:10 -07003164 aom_free(thread_data->td->counts);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003165 av1_free_pc_tree(thread_data->td, num_planes);
Yaowu Xuf883b422016-08-30 14:01:10 -07003166 aom_free(thread_data->td);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003167 }
3168 }
Ravi Chaudhary90a15f42018-10-11 18:56:35 +05303169#if CONFIG_MULTITHREAD
3170 if (cpi->row_mt == 1) {
3171 if (cpi->row_mt_mutex_ != NULL) {
3172 pthread_mutex_destroy(cpi->row_mt_mutex_);
3173 aom_free(cpi->row_mt_mutex_);
3174 }
3175 }
3176#endif
Ravi Chaudharyc5e74692018-10-08 16:05:38 +05303177 av1_row_mt_mem_dealloc(cpi);
Yaowu Xuf883b422016-08-30 14:01:10 -07003178 aom_free(cpi->tile_thr_data);
3179 aom_free(cpi->workers);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003180
Deepa K G964e72e2018-05-16 16:56:01 +05303181 if (cpi->num_workers > 1) {
3182 av1_loop_filter_dealloc(&cpi->lf_row_sync);
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05303183 av1_loop_restoration_dealloc(&cpi->lr_row_sync, cpi->num_workers);
Deepa K G964e72e2018-05-16 16:56:01 +05303184 }
3185
Yaowu Xuc27fc142016-08-22 16:08:15 -07003186 dealloc_compressor_data(cpi);
3187
3188 for (i = 0; i < sizeof(cpi->mbgraph_stats) / sizeof(cpi->mbgraph_stats[0]);
3189 ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003190 aom_free(cpi->mbgraph_stats[i].mb_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003191 }
3192
3193#if CONFIG_FP_MB_STATS
3194 if (cpi->use_fp_mb_stats) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003195 aom_free(cpi->twopass.frame_mb_stats_buf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003196 cpi->twopass.frame_mb_stats_buf = NULL;
3197 }
3198#endif
Debargha Mukherjee5d157212017-01-10 14:44:47 -08003199#if CONFIG_INTERNAL_STATS
3200 aom_free(cpi->ssim_vars);
3201 cpi->ssim_vars = NULL;
3202#endif // CONFIG_INTERNAL_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003203
Yaowu Xuf883b422016-08-30 14:01:10 -07003204 av1_remove_common(cm);
RogerZhou80d52342017-11-20 10:56:26 -08003205 for (i = 0; i < FRAME_BUFFERS; ++i) {
3206 av1_hash_table_destroy(&cm->buffer_pool->frame_bufs[i].hash_table);
3207 }
Michelle Findlay-Olynykdea531d2017-12-13 14:10:56 -08003208 if (cpi->sf.use_hash_based_trellis) hbt_destroy();
Yaowu Xuf883b422016-08-30 14:01:10 -07003209 av1_free_ref_frame_buffers(cm->buffer_pool);
3210 aom_free(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003211
3212#ifdef OUTPUT_YUV_SKINMAP
3213 fclose(yuv_skinmap_file);
3214#endif
3215#ifdef OUTPUT_YUV_REC
3216 fclose(yuv_rec_file);
3217#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003218}
3219
Yaowu Xuf883b422016-08-30 14:01:10 -07003220static void generate_psnr_packet(AV1_COMP *cpi) {
3221 struct aom_codec_cx_pkt pkt;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003222 int i;
3223 PSNR_STATS psnr;
David Turnerc29e1a92018-12-06 14:10:14 +00003224 aom_calc_highbd_psnr(cpi->source, &cpi->common.cur_frame->buf, &psnr,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003225 cpi->td.mb.e_mbd.bd, cpi->oxcf.input_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003226
3227 for (i = 0; i < 4; ++i) {
3228 pkt.data.psnr.samples[i] = psnr.samples[i];
3229 pkt.data.psnr.sse[i] = psnr.sse[i];
3230 pkt.data.psnr.psnr[i] = psnr.psnr[i];
3231 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003232 pkt.kind = AOM_CODEC_PSNR_PKT;
3233 aom_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003234}
3235
Yaowu Xuf883b422016-08-30 14:01:10 -07003236int av1_use_as_reference(AV1_COMP *cpi, int ref_frame_flags) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003237 if (ref_frame_flags > ((1 << INTER_REFS_PER_FRAME) - 1)) return -1;
3238
Yunqing Wangf2e7a392017-11-08 00:27:21 -08003239 cpi->ext_ref_frame_flags = ref_frame_flags;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003240 return 0;
3241}
3242
Yunqing Wang9a50fec2017-11-02 17:02:00 -07003243void av1_update_reference(AV1_COMP *cpi, int ref_frame_upd_flags) {
3244 cpi->ext_refresh_last_frame = (ref_frame_upd_flags & AOM_LAST_FLAG) != 0;
3245 cpi->ext_refresh_golden_frame = (ref_frame_upd_flags & AOM_GOLD_FLAG) != 0;
3246 cpi->ext_refresh_alt_ref_frame = (ref_frame_upd_flags & AOM_ALT_FLAG) != 0;
3247 cpi->ext_refresh_bwd_ref_frame = (ref_frame_upd_flags & AOM_BWD_FLAG) != 0;
3248 cpi->ext_refresh_alt2_ref_frame = (ref_frame_upd_flags & AOM_ALT2_FLAG) != 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003249 cpi->ext_refresh_frame_flags_pending = 1;
3250}
3251
Thomas Daede497d1952017-08-08 17:33:06 -07003252int av1_copy_reference_enc(AV1_COMP *cpi, int idx, YV12_BUFFER_CONFIG *sd) {
3253 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003254 const int num_planes = av1_num_planes(cm);
Thomas Daede497d1952017-08-08 17:33:06 -07003255 YV12_BUFFER_CONFIG *cfg = get_ref_frame(cm, idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003256 if (cfg) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003257 aom_yv12_copy_frame(cfg, sd, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003258 return 0;
3259 } else {
3260 return -1;
3261 }
3262}
3263
Thomas Daede497d1952017-08-08 17:33:06 -07003264int av1_set_reference_enc(AV1_COMP *cpi, int idx, YV12_BUFFER_CONFIG *sd) {
3265 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003266 const int num_planes = av1_num_planes(cm);
Thomas Daede497d1952017-08-08 17:33:06 -07003267 YV12_BUFFER_CONFIG *cfg = get_ref_frame(cm, idx);
Yaowu Xuf883b422016-08-30 14:01:10 -07003268 if (cfg) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003269 aom_yv12_copy_frame(sd, cfg, num_planes);
Yaowu Xuf883b422016-08-30 14:01:10 -07003270 return 0;
3271 } else {
3272 return -1;
3273 }
3274}
3275
3276int av1_update_entropy(AV1_COMP *cpi, int update) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003277 cpi->ext_refresh_frame_context = update;
3278 cpi->ext_refresh_frame_context_pending = 1;
3279 return 0;
3280}
3281
3282#if defined(OUTPUT_YUV_DENOISED) || defined(OUTPUT_YUV_SKINMAP)
3283// The denoiser buffer is allocated as a YUV 440 buffer. This function writes it
3284// as YUV 420. We simply use the top-left pixels of the UV buffers, since we do
3285// not denoise the UV channels at this time. If ever we implement UV channel
3286// denoising we will have to modify this.
Yaowu Xuf883b422016-08-30 14:01:10 -07003287void aom_write_yuv_frame_420(YV12_BUFFER_CONFIG *s, FILE *f) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003288 uint8_t *src = s->y_buffer;
3289 int h = s->y_height;
3290
3291 do {
3292 fwrite(src, s->y_width, 1, f);
3293 src += s->y_stride;
3294 } while (--h);
3295
3296 src = s->u_buffer;
3297 h = s->uv_height;
3298
3299 do {
3300 fwrite(src, s->uv_width, 1, f);
3301 src += s->uv_stride;
3302 } while (--h);
3303
3304 src = s->v_buffer;
3305 h = s->uv_height;
3306
3307 do {
3308 fwrite(src, s->uv_width, 1, f);
3309 src += s->uv_stride;
3310 } while (--h);
3311}
3312#endif
3313
Yaowu Xuf883b422016-08-30 14:01:10 -07003314static void check_show_existing_frame(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003315 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
Yaowu Xuf883b422016-08-30 14:01:10 -07003316 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003317 const FRAME_UPDATE_TYPE next_frame_update_type =
3318 gf_group->update_type[gf_group->index];
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003319#if USE_SYMM_MULTI_LAYER
3320 const int which_arf = (cpi->new_bwdref_update_rule == 1)
3321 ? gf_group->arf_update_idx[gf_group->index] > 0
3322 : gf_group->arf_update_idx[gf_group->index];
3323#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003324 const int which_arf = gf_group->arf_update_idx[gf_group->index];
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003325#endif
Zoe Liu5fca7242016-10-10 17:18:57 -07003326
3327 if (cm->show_existing_frame == 1) {
3328 cm->show_existing_frame = 0;
3329 } else if (cpi->rc.is_last_bipred_frame) {
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003330#if USE_SYMM_MULTI_LAYER
3331 // NOTE: When new structure is used, every bwdref will have one overlay
3332 // frame. Therefore, there is no need to find out which frame to
3333 // show in advance.
3334 if (cpi->new_bwdref_update_rule == 0) {
3335#endif
3336 // NOTE: If the current frame is a last bi-predictive frame, it is
3337 // needed next to show the BWDREF_FRAME, which is pointed by
3338 // the last_fb_idxes[0] after reference frame buffer update
3339 cpi->rc.is_last_bipred_frame = 0;
3340 cm->show_existing_frame = 1;
David Turnera21966b2018-12-05 14:48:49 +00003341 cpi->existing_fb_idx_to_show = cm->remapped_ref_idx[0];
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003342#if USE_SYMM_MULTI_LAYER
3343 }
3344#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003345 } else if (cpi->is_arf_filter_off[which_arf] &&
3346 (next_frame_update_type == OVERLAY_UPDATE ||
3347 next_frame_update_type == INTNL_OVERLAY_UPDATE)) {
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003348#if USE_SYMM_MULTI_LAYER
3349 const int bwdref_to_show =
3350 (cpi->new_bwdref_update_rule == 1) ? BWDREF_FRAME : ALTREF2_FRAME;
3351#else
3352 const int bwdref_to_show = ALTREF2_FRAME;
3353#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003354 // Other parameters related to OVERLAY_UPDATE will be taken care of
Yaowu Xuf883b422016-08-30 14:01:10 -07003355 // in av1_rc_get_second_pass_params(cpi)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003356 cm->show_existing_frame = 1;
3357 cpi->rc.is_src_frame_alt_ref = 1;
Urvang Joshi4d9f15f2018-11-05 15:26:22 -08003358 cpi->existing_fb_idx_to_show =
3359 (next_frame_update_type == OVERLAY_UPDATE)
David Turnera21966b2018-12-05 14:48:49 +00003360 ? get_ref_frame_map_idx(cm, ALTREF_FRAME)
3361 : get_ref_frame_map_idx(cm, bwdref_to_show);
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003362#if USE_SYMM_MULTI_LAYER
3363 if (cpi->new_bwdref_update_rule == 0)
3364#endif
3365 cpi->is_arf_filter_off[which_arf] = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003366 }
3367 cpi->rc.is_src_frame_ext_arf = 0;
3368}
Yaowu Xuc27fc142016-08-22 16:08:15 -07003369
3370#ifdef OUTPUT_YUV_REC
Yaowu Xuf883b422016-08-30 14:01:10 -07003371void aom_write_one_yuv_frame(AV1_COMMON *cm, YV12_BUFFER_CONFIG *s) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003372 uint8_t *src = s->y_buffer;
3373 int h = cm->height;
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07003374 if (yuv_rec_file == NULL) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003375 if (s->flags & YV12_FLAG_HIGHBITDEPTH) {
3376 uint16_t *src16 = CONVERT_TO_SHORTPTR(s->y_buffer);
3377
3378 do {
3379 fwrite(src16, s->y_width, 2, yuv_rec_file);
3380 src16 += s->y_stride;
3381 } while (--h);
3382
3383 src16 = CONVERT_TO_SHORTPTR(s->u_buffer);
3384 h = s->uv_height;
3385
3386 do {
3387 fwrite(src16, s->uv_width, 2, yuv_rec_file);
3388 src16 += s->uv_stride;
3389 } while (--h);
3390
3391 src16 = CONVERT_TO_SHORTPTR(s->v_buffer);
3392 h = s->uv_height;
3393
3394 do {
3395 fwrite(src16, s->uv_width, 2, yuv_rec_file);
3396 src16 += s->uv_stride;
3397 } while (--h);
3398
3399 fflush(yuv_rec_file);
3400 return;
3401 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003402
3403 do {
3404 fwrite(src, s->y_width, 1, yuv_rec_file);
3405 src += s->y_stride;
3406 } while (--h);
3407
3408 src = s->u_buffer;
3409 h = s->uv_height;
3410
3411 do {
3412 fwrite(src, s->uv_width, 1, yuv_rec_file);
3413 src += s->uv_stride;
3414 } while (--h);
3415
3416 src = s->v_buffer;
3417 h = s->uv_height;
3418
3419 do {
3420 fwrite(src, s->uv_width, 1, yuv_rec_file);
3421 src += s->uv_stride;
3422 } while (--h);
3423
3424 fflush(yuv_rec_file);
3425}
3426#endif // OUTPUT_YUV_REC
3427
Debargha Mukherjee11f0e402017-03-29 07:42:40 -07003428#define GM_RECODE_LOOP_NUM4X4_FACTOR 192
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003429static int recode_loop_test_global_motion(AV1_COMP *cpi) {
3430 int i;
3431 int recode = 0;
Debargha Mukherjeea575d232017-04-28 17:46:47 -07003432 RD_COUNTS *const rdc = &cpi->td.rd_counts;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003433 AV1_COMMON *const cm = &cpi->common;
3434 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
3435 if (cm->global_motion[i].wmtype != IDENTITY &&
Debargha Mukherjeea575d232017-04-28 17:46:47 -07003436 rdc->global_motion_used[i] * GM_RECODE_LOOP_NUM4X4_FACTOR <
Debargha Mukherjee265db6d2017-03-28 11:15:27 -07003437 cpi->gmparams_cost[i]) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003438 cm->global_motion[i] = default_warp_params;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07003439 assert(cm->global_motion[i].wmtype == IDENTITY);
Debargha Mukherjee265db6d2017-03-28 11:15:27 -07003440 cpi->gmparams_cost[i] = 0;
David Barker43479c62016-11-30 10:34:20 +00003441 recode = 1;
Urvang Joshi02aade82017-12-18 17:18:16 -08003442 // TODO(sarahparker): The earlier condition for recoding here was:
3443 // "recode |= (rdc->global_motion_used[i] > 0);". Can we bring something
3444 // similar to that back to speed up global motion?
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003445 }
3446 }
3447 return recode;
3448}
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003449
Yaowu Xuc27fc142016-08-22 16:08:15 -07003450// Function to test for conditions that indicate we should loop
3451// back and recode a frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07003452static int recode_loop_test(AV1_COMP *cpi, int high_limit, int low_limit, int q,
3453 int maxq, int minq) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003454 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -07003455 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003456 const int frame_is_kfgfarf = frame_is_kf_gf_arf(cpi);
3457 int force_recode = 0;
3458
3459 if ((rc->projected_frame_size >= rc->max_frame_bandwidth) ||
3460 (cpi->sf.recode_loop == ALLOW_RECODE) ||
3461 (frame_is_kfgfarf && (cpi->sf.recode_loop == ALLOW_RECODE_KFARFGF))) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003462 // TODO(agrange) high_limit could be greater than the scale-down threshold.
3463 if ((rc->projected_frame_size > high_limit && q < maxq) ||
3464 (rc->projected_frame_size < low_limit && q > minq)) {
3465 force_recode = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003466 } else if (cpi->oxcf.rc_mode == AOM_CQ) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003467 // Deal with frame undershoot and whether or not we are
3468 // below the automatically set cq level.
3469 if (q > oxcf->cq_level &&
3470 rc->projected_frame_size < ((rc->this_frame_target * 7) >> 3)) {
3471 force_recode = 1;
3472 }
3473 }
3474 }
3475 return force_recode;
3476}
3477
Yaowu Xuc27fc142016-08-22 16:08:15 -07003478#define DUMP_REF_FRAME_IMAGES 0
3479
3480#if DUMP_REF_FRAME_IMAGES == 1
Yaowu Xuf883b422016-08-30 14:01:10 -07003481static int dump_one_image(AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003482 const YV12_BUFFER_CONFIG *const ref_buf,
3483 char *file_name) {
3484 int h;
3485 FILE *f_ref = NULL;
3486
3487 if (ref_buf == NULL) {
3488 printf("Frame data buffer is NULL.\n");
Yaowu Xuf883b422016-08-30 14:01:10 -07003489 return AOM_CODEC_MEM_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003490 }
3491
3492 if ((f_ref = fopen(file_name, "wb")) == NULL) {
3493 printf("Unable to open file %s to write.\n", file_name);
Yaowu Xuf883b422016-08-30 14:01:10 -07003494 return AOM_CODEC_MEM_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003495 }
3496
3497 // --- Y ---
3498 for (h = 0; h < cm->height; ++h) {
3499 fwrite(&ref_buf->y_buffer[h * ref_buf->y_stride], 1, cm->width, f_ref);
3500 }
3501 // --- U ---
3502 for (h = 0; h < (cm->height >> 1); ++h) {
3503 fwrite(&ref_buf->u_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
3504 f_ref);
3505 }
3506 // --- V ---
3507 for (h = 0; h < (cm->height >> 1); ++h) {
3508 fwrite(&ref_buf->v_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
3509 f_ref);
3510 }
3511
3512 fclose(f_ref);
3513
Yaowu Xuf883b422016-08-30 14:01:10 -07003514 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003515}
3516
Yaowu Xuf883b422016-08-30 14:01:10 -07003517static void dump_ref_frame_images(AV1_COMP *cpi) {
3518 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003519 MV_REFERENCE_FRAME ref_frame;
3520
3521 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3522 char file_name[256] = "";
3523 snprintf(file_name, sizeof(file_name), "/tmp/enc_F%d_ref_%d.yuv",
David Turnerd2a592e2018-11-16 14:59:31 +00003524 cm->current_frame.frame_number, ref_frame);
David Turnera21966b2018-12-05 14:48:49 +00003525 dump_one_image(cm, get_ref_frame_yv12_buf(cpi, ref_frame), file_name);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003526 }
3527}
3528#endif // DUMP_REF_FRAME_IMAGES == 1
3529
Yaowu Xuc27fc142016-08-22 16:08:15 -07003530// This function is used to shift the virtual indices of last reference frames
3531// as follows:
3532// LAST_FRAME -> LAST2_FRAME -> LAST3_FRAME
3533// when the LAST_FRAME is updated.
Yaowu Xuf883b422016-08-30 14:01:10 -07003534static INLINE void shift_last_ref_frames(AV1_COMP *cpi) {
Imdad Sardharwalladadaba62018-02-23 12:06:56 +00003535 // TODO(isbs): shift the scaled indices as well
Urvang Joshia130dcc2018-11-06 10:27:35 -08003536 for (int ref_frame = LAST3_FRAME; ref_frame > LAST_FRAME; --ref_frame) {
3537 const int ref_idx = ref_frame - LAST_FRAME;
David Turnera21966b2018-12-05 14:48:49 +00003538 cpi->common.remapped_ref_idx[ref_idx] =
3539 cpi->common.remapped_ref_idx[ref_idx - 1];
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003540
3541 if (!cpi->rc.is_src_frame_alt_ref) {
3542 memcpy(cpi->interp_filter_selected[ref_frame],
3543 cpi->interp_filter_selected[ref_frame - 1],
3544 sizeof(cpi->interp_filter_selected[ref_frame - 1]));
3545 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003546 }
3547}
Yaowu Xuc27fc142016-08-22 16:08:15 -07003548
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003549#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003550// This function is used to shift the virtual indices of bwd reference
3551// frames as follows:
3552// BWD_REF -> ALT2_REF -> EXT_REF
3553// to clear a space to store the closest bwdref
3554static INLINE void rshift_bwd_ref_frames(AV1_COMP *cpi) {
3555 // TODO(isbs): shift the scaled indices as well
Urvang Joshi03d8ebe2018-11-08 17:13:44 -08003556 static const int ordered_bwd[3] = { BWDREF_FRAME, ALTREF2_FRAME,
3557 EXTREF_FRAME };
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003558
3559 for (int i = 2; i > 0; --i) {
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003560 // [0] is allocated to the current coded frame, i.e. bwdref
3561 memcpy(cpi->interp_filter_selected[ordered_bwd[i]],
3562 cpi->interp_filter_selected[ordered_bwd[i - 1]],
3563 sizeof(cpi->interp_filter_selected[ordered_bwd[i - 1]]));
3564
David Turnera21966b2018-12-05 14:48:49 +00003565 cpi->common.remapped_ref_idx[ordered_bwd[i] - LAST_FRAME] =
3566 cpi->common.remapped_ref_idx[ordered_bwd[i - 1] - LAST_FRAME];
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003567 }
3568}
3569
3570// This function is used to shift the virtual indices of bwd reference
3571// frames as follows:
3572// BWD_REF <- ALT2_REF <- EXT_REF
3573// to update the bwd reference frame for coding the next frame.
3574static INLINE void lshift_bwd_ref_frames(AV1_COMP *cpi) {
3575 // TODO(isbs): shift the scaled indices as well
Urvang Joshi03d8ebe2018-11-08 17:13:44 -08003576 static const int ordered_bwd[3] = { BWDREF_FRAME, ALTREF2_FRAME,
3577 EXTREF_FRAME };
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003578
3579 for (int i = 0; i < 2; ++i) {
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003580 // [0] is allocated to the current coded frame, i.e. bwdref
3581 memcpy(cpi->interp_filter_selected[ordered_bwd[i]],
3582 cpi->interp_filter_selected[ordered_bwd[i + 1]],
3583 sizeof(cpi->interp_filter_selected[ordered_bwd[i + 1]]));
3584
David Turnera21966b2018-12-05 14:48:49 +00003585 cpi->common.remapped_ref_idx[ordered_bwd[i] - LAST_FRAME] =
3586 cpi->common.remapped_ref_idx[ordered_bwd[i + 1] - LAST_FRAME];
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003587 }
3588}
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003589#endif // USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003590
Zoe Liu8dd1c982017-09-11 10:14:35 -07003591static void update_reference_frames(AV1_COMP *cpi) {
3592 AV1_COMMON *const cm = &cpi->common;
3593
Yaowu Xuc27fc142016-08-22 16:08:15 -07003594 // NOTE: Save the new show frame buffer index for --test-code=warn, i.e.,
3595 // for the purpose to verify no mismatch between encoder and decoder.
David Turnere7ebf902018-12-04 14:04:55 +00003596 if (cm->show_frame) cpi->last_show_frame_buf = cm->cur_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003597
Sarah Parker33005522018-07-27 14:46:25 -07003598 // In the case of show_existing frame, we will not send fresh flag
3599 // to decoder. Any change in the reference frame buffer can be done by
3600 // switching the virtual indices.
3601 if (cm->show_existing_frame) {
Sarah Parker29147cf2018-10-16 20:34:51 -07003602 // If we are not indicating to the decoder that this frame is
3603 // a show_existing_frame, which occurs in error_resilient mode,
Sarah Parkera9e19052018-10-18 17:49:26 -07003604 // we still want to refresh the LAST_FRAME when the current frame
3605 // was the source of an ext_arf.
3606 cpi->refresh_last_frame =
3607 !encode_show_existing_frame(cm) && cpi->rc.is_src_frame_ext_arf;
Sarah Parker33005522018-07-27 14:46:25 -07003608 cpi->refresh_golden_frame = 0;
3609 cpi->refresh_bwd_ref_frame = 0;
3610 cpi->refresh_alt2_ref_frame = 0;
3611 cpi->refresh_alt_ref_frame = 0;
3612
3613 cpi->rc.is_bwd_ref_frame = 0;
3614 cpi->rc.is_last_bipred_frame = 0;
3615 cpi->rc.is_bipred_frame = 0;
3616 }
3617
Yaowu Xuc27fc142016-08-22 16:08:15 -07003618 // At this point the new frame has been encoded.
3619 // If any buffer copy / swapping is signaled it should be done here.
Zoe Liubcef1e62018-04-06 20:56:11 -07003620
Sarah Parkerb9041612018-05-22 19:06:47 -07003621 // Only update all of the reference buffers if a KEY_FRAME is also a
3622 // show_frame. This ensures a fwd keyframe does not update all of the buffers
David Turnerd2a592e2018-11-16 14:59:31 +00003623 if ((cm->current_frame.frame_type == KEY_FRAME && cm->show_frame) ||
3624 frame_is_sframe(cm)) {
Zoe Liubcef1e62018-04-06 20:56:11 -07003625 for (int ref_frame = 0; ref_frame < REF_FRAMES; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00003626 assign_frame_buffer_p(&cm->ref_frame_map[cm->remapped_ref_idx[ref_frame]],
3627 cm->cur_frame);
Zoe Liubcef1e62018-04-06 20:56:11 -07003628 }
3629 return;
3630 }
3631
3632 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003633 // We have decided to preserve the previously existing golden frame as our
3634 // new ARF frame. However, in the short term in function
Yaowu Xuf883b422016-08-30 14:01:10 -07003635 // av1_bitstream.c::get_refresh_mask() we left it in the GF slot and, if
Yaowu Xuc27fc142016-08-22 16:08:15 -07003636 // we're updating the GF with the current decoded frame, we save it to the
3637 // ARF slot instead.
3638 // We now have to update the ARF with the current frame and swap gld_fb_idx
3639 // and alt_fb_idx so that, overall, we've stored the old GF in the new ARF
3640 // slot and, if we're updating the GF, the current frame becomes the new GF.
3641 int tmp;
3642
Wei-Ting Lina8c02452018-08-13 11:04:06 -07003643 // ARF in general is a better reference than overlay. We shouldkeep ARF as
3644 // reference instead of replacing it with overlay.
3645
3646 if (!cpi->preserve_arf_as_gld) {
David Turnere7ebf902018-12-04 14:04:55 +00003647 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003648 &cm->ref_frame_map[get_ref_frame_map_idx(cm, ALTREF_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003649 cm->cur_frame);
Wei-Ting Lina8c02452018-08-13 11:04:06 -07003650 }
3651
David Turnera21966b2018-12-05 14:48:49 +00003652 tmp = get_ref_frame_map_idx(cm, ALTREF_FRAME);
3653 cm->remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] =
3654 get_ref_frame_map_idx(cm, GOLDEN_FRAME);
3655 cm->remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] = tmp;
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003656
3657 // TODO(zoeliu): Do we need to copy cpi->interp_filter_selected[0] over to
3658 // cpi->interp_filter_selected[GOLDEN_FRAME]?
Sarah Parker7a9bb782018-10-11 14:52:42 -07003659 } else if (cpi->rc.is_src_frame_ext_arf && encode_show_existing_frame(cm)) {
Wei-Ting Linb72453f2018-06-26 14:05:38 -07003660#if CONFIG_DEBUG
3661 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
3662 assert(gf_group->update_type[gf_group->index] == INTNL_OVERLAY_UPDATE);
3663#endif
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003664#if USE_SYMM_MULTI_LAYER
3665 const int bwdref_to_show =
3666 (cpi->new_bwdref_update_rule == 1) ? BWDREF_FRAME : ALTREF2_FRAME;
3667#else
3668 const int bwdref_to_show = ALTREF2_FRAME;
3669#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003670 // Deal with the special case for showing existing internal ALTREF_FRAME
3671 // Refresh the LAST_FRAME with the ALTREF_FRAME and retire the LAST3_FRAME
3672 // by updating the virtual indices.
David Turnera21966b2018-12-05 14:48:49 +00003673 const int last3_remapped_idx = get_ref_frame_map_idx(cm, LAST3_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003674 shift_last_ref_frames(cpi);
Zoe Liue9b15e22017-07-19 15:53:01 -07003675
David Turnera21966b2018-12-05 14:48:49 +00003676 cm->remapped_ref_idx[LAST_FRAME - LAST_FRAME] =
3677 get_ref_frame_map_idx(cm, bwdref_to_show);
Zoe Liue9b15e22017-07-19 15:53:01 -07003678
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003679 memcpy(cpi->interp_filter_selected[LAST_FRAME],
3680 cpi->interp_filter_selected[bwdref_to_show],
3681 sizeof(cpi->interp_filter_selected[bwdref_to_show]));
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003682#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003683 if (cpi->new_bwdref_update_rule == 1) {
3684 lshift_bwd_ref_frames(cpi);
3685 // pass outdated forward reference frame (previous LAST3) to the
3686 // spared space
David Turnera21966b2018-12-05 14:48:49 +00003687 cm->remapped_ref_idx[EXTREF_FRAME - LAST_FRAME] = last3_remapped_idx;
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003688 } else {
3689#endif
David Turnera21966b2018-12-05 14:48:49 +00003690 cm->remapped_ref_idx[bwdref_to_show - LAST_FRAME] = last3_remapped_idx;
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003691#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003692 }
3693#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003694 } else { /* For non key/golden frames */
Zoe Liue9b15e22017-07-19 15:53:01 -07003695 // === ALTREF_FRAME ===
Yaowu Xuc27fc142016-08-22 16:08:15 -07003696 if (cpi->refresh_alt_ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00003697 int arf_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
David Turnere7ebf902018-12-04 14:04:55 +00003698 assign_frame_buffer_p(&cm->ref_frame_map[arf_idx], cm->cur_frame);
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003699
3700 memcpy(cpi->interp_filter_selected[ALTREF_FRAME],
3701 cpi->interp_filter_selected[0],
3702 sizeof(cpi->interp_filter_selected[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003703 }
3704
Zoe Liue9b15e22017-07-19 15:53:01 -07003705 // === GOLDEN_FRAME ===
Yaowu Xuc27fc142016-08-22 16:08:15 -07003706 if (cpi->refresh_golden_frame) {
David Turnere7ebf902018-12-04 14:04:55 +00003707 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003708 &cm->ref_frame_map[get_ref_frame_map_idx(cm, GOLDEN_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003709 cm->cur_frame);
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003710
3711 memcpy(cpi->interp_filter_selected[GOLDEN_FRAME],
3712 cpi->interp_filter_selected[0],
3713 sizeof(cpi->interp_filter_selected[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003714 }
3715
Zoe Liue9b15e22017-07-19 15:53:01 -07003716 // === BWDREF_FRAME ===
Yaowu Xuc27fc142016-08-22 16:08:15 -07003717 if (cpi->refresh_bwd_ref_frame) {
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003718#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003719 if (cpi->new_bwdref_update_rule) {
3720 // We shift the backward reference frame as follows:
3721 // BWDREF -> ALTREF2 -> EXTREF
3722 // and assign the newly coded frame to BWDREF so that it always
3723 // keeps the nearest future frame
David Turnera21966b2018-12-05 14:48:49 +00003724 const int tmp = get_ref_frame_map_idx(cm, EXTREF_FRAME);
David Turnere7ebf902018-12-04 14:04:55 +00003725 assign_frame_buffer_p(&cm->ref_frame_map[tmp], cm->cur_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003726
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003727 rshift_bwd_ref_frames(cpi);
David Turnera21966b2018-12-05 14:48:49 +00003728 cm->remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = tmp;
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003729 } else {
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003730#endif // USE_SYMM_MULTI_LAYER
David Turnere7ebf902018-12-04 14:04:55 +00003731 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003732 &cm->ref_frame_map[get_ref_frame_map_idx(cm, BWDREF_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003733 cm->cur_frame);
Wei-Ting Lin240d9b42018-07-12 11:48:02 -07003734#if USE_SYMM_MULTI_LAYER
Wei-Ting Lincc75ca72018-07-10 15:36:32 -07003735 }
3736#endif
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003737 memcpy(cpi->interp_filter_selected[BWDREF_FRAME],
3738 cpi->interp_filter_selected[0],
3739 sizeof(cpi->interp_filter_selected[0]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003740 }
Zoe Liue9b15e22017-07-19 15:53:01 -07003741
Zoe Liue9b15e22017-07-19 15:53:01 -07003742 // === ALTREF2_FRAME ===
3743 if (cpi->refresh_alt2_ref_frame) {
David Turnere7ebf902018-12-04 14:04:55 +00003744 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003745 &cm->ref_frame_map[get_ref_frame_map_idx(cm, ALTREF2_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003746 cm->cur_frame);
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003747
3748 memcpy(cpi->interp_filter_selected[ALTREF2_FRAME],
3749 cpi->interp_filter_selected[0],
3750 sizeof(cpi->interp_filter_selected[0]));
Zoe Liue9b15e22017-07-19 15:53:01 -07003751 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003752 }
3753
3754 if (cpi->refresh_last_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003755 // NOTE(zoeliu): We have two layers of mapping (1) from the per-frame
3756 // reference to the reference frame buffer virtual index; and then (2) from
David Turnere7ebf902018-12-04 14:04:55 +00003757 // the virtual index to the reference frame buffer (RefCntBuffer):
Yaowu Xuc27fc142016-08-22 16:08:15 -07003758 //
Urvang Joshi4d9f15f2018-11-05 15:26:22 -08003759 // LAST_FRAME, ..., EXTREF_FRAME
3760 // | |
3761 // v v
3762 // remapped_ref_idx[LAST_FRAME - 1], ..., remapped_ref_idx[EXTREF_FRAME - 1]
3763 // | |
3764 // v v
3765 // ref_frame_map[], ..., ref_frame_map[]
Yaowu Xuc27fc142016-08-22 16:08:15 -07003766 //
3767 // When refresh_last_frame is set, it is intended to retire LAST3_FRAME,
3768 // have the other 2 LAST reference frames shifted as follows:
3769 // LAST_FRAME -> LAST2_FRAME -> LAST3_FRAME
3770 // , and then have LAST_FRAME refreshed by the newly coded frame.
3771 //
3772 // To fulfill it, the decoder will be notified to execute following 2 steps:
3773 //
3774 // (a) To change ref_frame_map[] and have the virtual index of LAST3_FRAME
3775 // to point to the newly coded frame, i.e.
David Turnere7ebf902018-12-04 14:04:55 +00003776 // ref_frame_map[lst_fb_idexes[2]] => cur_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003777 //
3778 // (b) To change the 1st layer mapping to have LAST_FRAME mapped to the
3779 // original virtual index of LAST3_FRAME and have the other mappings
3780 // shifted as follows:
Urvang Joshi4d9f15f2018-11-05 15:26:22 -08003781 // LAST_FRAME, LAST2_FRAME, LAST3_FRAME
3782 // | | |
3783 // v v v
3784 // remapped_ref_idx[2], remapped_ref_idx[0], remapped_ref_idx[1]
David Turnere7ebf902018-12-04 14:04:55 +00003785 assign_frame_buffer_p(
David Turnera21966b2018-12-05 14:48:49 +00003786 &cm->ref_frame_map[get_ref_frame_map_idx(cm, LAST3_FRAME)],
David Turnere7ebf902018-12-04 14:04:55 +00003787 cm->cur_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003788
David Turnera21966b2018-12-05 14:48:49 +00003789 int last3_remapped_idx = get_ref_frame_map_idx(cm, LAST3_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003790
Zoe Liubcef1e62018-04-06 20:56:11 -07003791 shift_last_ref_frames(cpi);
David Turnera21966b2018-12-05 14:48:49 +00003792 cm->remapped_ref_idx[LAST_FRAME - LAST_FRAME] = last3_remapped_idx;
Zoe Liubcef1e62018-04-06 20:56:11 -07003793
Sarah Parker5336b9c2018-10-18 11:34:20 -07003794 assert(!encode_show_existing_frame(cm));
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003795 memcpy(cpi->interp_filter_selected[LAST_FRAME],
3796 cpi->interp_filter_selected[0],
3797 sizeof(cpi->interp_filter_selected[0]));
Zoe Liubcef1e62018-04-06 20:56:11 -07003798
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003799 // If the new structure is used, we will always have overlay frames coupled
3800 // with bwdref frames. Therefore, we won't have to perform this update
3801 // in advance (we do this update when the overlay frame shows up).
3802#if USE_SYMM_MULTI_LAYER
3803 if (cpi->new_bwdref_update_rule == 0 && cpi->rc.is_last_bipred_frame) {
3804#else
Zoe Liubcef1e62018-04-06 20:56:11 -07003805 if (cpi->rc.is_last_bipred_frame) {
Wei-Ting Linbafa11c2018-07-10 13:20:59 -07003806#endif
Zoe Liubcef1e62018-04-06 20:56:11 -07003807 // Refresh the LAST_FRAME with the BWDREF_FRAME and retire the
3808 // LAST3_FRAME by updating the virtual indices.
3809 //
3810 // NOTE: The source frame for BWDREF does not have a holding position as
3811 // the OVERLAY frame for ALTREF's. Hence, to resolve the reference
3812 // virtual index reshuffling for BWDREF, the encoder always
3813 // specifies a LAST_BIPRED right before BWDREF and completes the
3814 // reshuffling job accordingly.
David Turnera21966b2018-12-05 14:48:49 +00003815 last3_remapped_idx = get_ref_frame_map_idx(cm, LAST3_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003816
3817 shift_last_ref_frames(cpi);
David Turnera21966b2018-12-05 14:48:49 +00003818 cm->remapped_ref_idx[LAST_FRAME - LAST_FRAME] =
3819 get_ref_frame_map_idx(cm, BWDREF_FRAME);
3820 cm->remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = last3_remapped_idx;
Yunqing Wang9538e4d2019-01-07 18:28:08 +00003821
3822 memcpy(cpi->interp_filter_selected[LAST_FRAME],
3823 cpi->interp_filter_selected[BWDREF_FRAME],
3824 sizeof(cpi->interp_filter_selected[BWDREF_FRAME]));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003825 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003826 }
3827
3828#if DUMP_REF_FRAME_IMAGES == 1
3829 // Dump out all reference frame images.
3830 dump_ref_frame_images(cpi);
3831#endif // DUMP_REF_FRAME_IMAGES
3832}
3833
David Turnere7ebf902018-12-04 14:04:55 +00003834static INLINE void alloc_frame_mvs(AV1_COMMON *const cm, RefCntBuffer *buf) {
3835 assert(buf != NULL);
3836 ensure_mv_buffer(buf, cm);
3837 buf->width = cm->width;
3838 buf->height = cm->height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003839}
3840
Cheng Chen46f30c72017-09-07 11:13:33 -07003841static void scale_references(AV1_COMP *cpi) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003842 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00003843 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003844 MV_REFERENCE_FRAME ref_frame;
Yaowu Xuf883b422016-08-30 14:01:10 -07003845 const AOM_REFFRAME ref_mask[INTER_REFS_PER_FRAME] = {
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02003846 AOM_LAST_FLAG, AOM_LAST2_FLAG, AOM_LAST3_FLAG, AOM_GOLD_FLAG,
3847 AOM_BWD_FLAG, AOM_ALT2_FLAG, AOM_ALT_FLAG
Yaowu Xuc27fc142016-08-22 16:08:15 -07003848 };
3849
3850 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003851 // Need to convert from AOM_REFFRAME to index into ref_mask (subtract 1).
Yaowu Xuc27fc142016-08-22 16:08:15 -07003852 if (cpi->ref_frame_flags & ref_mask[ref_frame - 1]) {
3853 BufferPool *const pool = cm->buffer_pool;
3854 const YV12_BUFFER_CONFIG *const ref =
David Turnera21966b2018-12-05 14:48:49 +00003855 get_ref_frame_yv12_buf(cm, ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003856
3857 if (ref == NULL) {
David Turnere7ebf902018-12-04 14:04:55 +00003858 cpi->scaled_ref_buf[ref_frame - 1] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003859 continue;
3860 }
3861
Yaowu Xuc27fc142016-08-22 16:08:15 -07003862 if (ref->y_crop_width != cm->width || ref->y_crop_height != cm->height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003863 int force_scaling = 0;
David Turnere7ebf902018-12-04 14:04:55 +00003864 RefCntBuffer *new_fb = cpi->scaled_ref_buf[ref_frame - 1];
3865 if (new_fb == NULL) {
3866 const int new_fb_idx = get_free_fb(cm);
3867 if (new_fb_idx == INVALID_IDX) {
Wan-Teh Chang4a8c0042018-10-05 09:41:52 -07003868 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
3869 "Unable to find free frame buffer");
David Turnere7ebf902018-12-04 14:04:55 +00003870 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003871 force_scaling = 1;
David Turnere7ebf902018-12-04 14:04:55 +00003872 new_fb = &pool->frame_bufs[new_fb_idx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003873 }
David Turnere7ebf902018-12-04 14:04:55 +00003874
3875 if (force_scaling || new_fb->buf.y_crop_width != cm->width ||
3876 new_fb->buf.y_crop_height != cm->height) {
Yaowu Xu671f2bd2016-09-30 15:07:57 -07003877 if (aom_realloc_frame_buffer(
David Turnere7ebf902018-12-04 14:04:55 +00003878 &new_fb->buf, cm->width, cm->height,
Urvang Joshi20cf30e2018-07-19 02:33:58 -07003879 cm->seq_params.subsampling_x, cm->seq_params.subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +05303880 cm->seq_params.use_highbitdepth, cpi->oxcf.border_in_pixels,
Wan-Teh Chang41d286f2018-10-03 11:43:03 -07003881 cm->byte_alignment, NULL, NULL, NULL)) {
3882 if (force_scaling) {
3883 // Release the reference acquired in the get_free_fb() call above.
David Turnere7ebf902018-12-04 14:04:55 +00003884 --new_fb->ref_count;
Wan-Teh Chang41d286f2018-10-03 11:43:03 -07003885 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003886 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003887 "Failed to allocate frame buffer");
Wan-Teh Chang41d286f2018-10-03 11:43:03 -07003888 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07003889 av1_resize_and_extend_frame(
David Turnere7ebf902018-12-04 14:04:55 +00003890 ref, &new_fb->buf, (int)cm->seq_params.bit_depth, num_planes);
3891 cpi->scaled_ref_buf[ref_frame - 1] = new_fb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003892 alloc_frame_mvs(cm, new_fb);
3893 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003894 } else {
David Turnera21966b2018-12-05 14:48:49 +00003895 RefCntBuffer *buf = get_ref_frame_buf(cm, ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003896 buf->buf.y_crop_width = ref->y_crop_width;
3897 buf->buf.y_crop_height = ref->y_crop_height;
David Turnere7ebf902018-12-04 14:04:55 +00003898 cpi->scaled_ref_buf[ref_frame - 1] = buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003899 ++buf->ref_count;
3900 }
3901 } else {
David Turnere7ebf902018-12-04 14:04:55 +00003902 if (cpi->oxcf.pass != 0) cpi->scaled_ref_buf[ref_frame - 1] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003903 }
3904 }
3905}
3906
Yaowu Xuf883b422016-08-30 14:01:10 -07003907static void release_scaled_references(AV1_COMP *cpi) {
Imdad Sardharwalladadaba62018-02-23 12:06:56 +00003908 // TODO(isbs): only refresh the necessary frames, rather than all of them
David Turnere7ebf902018-12-04 14:04:55 +00003909 for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3910 RefCntBuffer *const buf = cpi->scaled_ref_buf[i];
3911 if (buf != NULL) {
Imdad Sardharwalladadaba62018-02-23 12:06:56 +00003912 --buf->ref_count;
David Turnere7ebf902018-12-04 14:04:55 +00003913 cpi->scaled_ref_buf[i] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003914 }
3915 }
3916}
3917
Yaowu Xuf883b422016-08-30 14:01:10 -07003918static void set_mv_search_params(AV1_COMP *cpi) {
3919 const AV1_COMMON *const cm = &cpi->common;
3920 const unsigned int max_mv_def = AOMMIN(cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003921
3922 // Default based on max resolution.
Yaowu Xuf883b422016-08-30 14:01:10 -07003923 cpi->mv_step_param = av1_init_search_range(max_mv_def);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003924
3925 if (cpi->sf.mv.auto_mv_step_size) {
3926 if (frame_is_intra_only(cm)) {
3927 // Initialize max_mv_magnitude for use in the first INTER frame
3928 // after a key/intra-only frame.
3929 cpi->max_mv_magnitude = max_mv_def;
3930 } else {
3931 if (cm->show_frame) {
3932 // Allow mv_steps to correspond to twice the max mv magnitude found
3933 // in the previous frame, capped by the default max_mv_magnitude based
3934 // on resolution.
Yaowu Xuf883b422016-08-30 14:01:10 -07003935 cpi->mv_step_param = av1_init_search_range(
3936 AOMMIN(max_mv_def, 2 * cpi->max_mv_magnitude));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003937 }
3938 cpi->max_mv_magnitude = 0;
3939 }
3940 }
3941}
3942
Yaowu Xuf883b422016-08-30 14:01:10 -07003943static void set_size_independent_vars(AV1_COMP *cpi) {
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003944 int i;
Debargha Mukherjeedf713102018-10-02 12:33:32 -07003945 AV1_COMMON *cm = &cpi->common;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003946 for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
Debargha Mukherjeedf713102018-10-02 12:33:32 -07003947 cm->global_motion[i] = default_warp_params;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08003948 }
3949 cpi->global_motion_search_done = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07003950 av1_set_speed_features_framesize_independent(cpi);
3951 av1_set_rd_speed_thresholds(cpi);
3952 av1_set_rd_speed_thresholds_sub8x8(cpi);
Debargha Mukherjeedf713102018-10-02 12:33:32 -07003953 cm->interp_filter = SWITCHABLE;
3954 cm->switchable_motion_mode = 1;
3955
3956 if (frame_is_intra_only(cm)) {
3957 if (cm->seq_params.force_screen_content_tools == 2) {
3958 cm->allow_screen_content_tools =
3959 cpi->oxcf.content == AOM_CONTENT_SCREEN ||
3960 is_screen_content(cpi->source->y_buffer,
3961 cpi->source->flags & YV12_FLAG_HIGHBITDEPTH,
3962 cm->seq_params.bit_depth, cpi->source->y_stride,
3963 cpi->source->y_width, cpi->source->y_height);
3964 } else {
3965 cm->allow_screen_content_tools =
3966 cm->seq_params.force_screen_content_tools;
3967 }
3968 }
Aniket Dhokf6d7ed82019-01-04 14:05:57 +05303969 cpi->is_screen_content_type = (cm->allow_screen_content_tools != 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003970}
3971
Yaowu Xuf883b422016-08-30 14:01:10 -07003972static void set_size_dependent_vars(AV1_COMP *cpi, int *q, int *bottom_index,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003973 int *top_index) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003974 AV1_COMMON *const cm = &cpi->common;
3975 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003976
3977 // Setup variables that depend on the dimensions of the frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07003978 av1_set_speed_features_framesize_dependent(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003979
Sebastien Alaiwan41cae6a2018-01-12 12:22:29 +01003980 // Decide q and q bounds.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07003981 *q = av1_rc_pick_q_and_bounds(cpi, cm->width, cm->height, bottom_index,
3982 top_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003983
James Zern01a9d702017-08-25 19:09:33 +00003984 if (!frame_is_intra_only(cm)) {
RogerZhou3b635242017-09-19 10:06:46 -07003985 set_high_precision_mv(cpi, (*q) < HIGH_PRECISION_MV_QTHRESH,
RogerZhou10a03802017-10-26 11:49:48 -07003986 cpi->common.cur_frame_force_integer_mv);
James Zern01a9d702017-08-25 19:09:33 +00003987 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003988
3989 // Configure experimental use of segmentation for enhanced coding of
3990 // static regions if indicated.
3991 // Only allowed in the second pass of a two pass encode, as it requires
3992 // lagged coding, and if the relevant speed feature flag is set.
3993 if (oxcf->pass == 2 && cpi->sf.static_segmentation)
3994 configure_static_seg_features(cpi);
3995}
3996
Yaowu Xuf883b422016-08-30 14:01:10 -07003997static void init_motion_estimation(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003998 int y_stride = cpi->scaled_source.y_stride;
3999
4000 if (cpi->sf.mv.search_method == NSTEP) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004001 av1_init3smotion_compensation(&cpi->ss_cfg, y_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004002 } else if (cpi->sf.mv.search_method == DIAMOND) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004003 av1_init_dsmotion_compensation(&cpi->ss_cfg, y_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004004 }
4005}
4006
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07004007#define COUPLED_CHROMA_FROM_LUMA_RESTORATION 0
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01004008static void set_restoration_unit_size(int width, int height, int sx, int sy,
4009 RestorationInfo *rst) {
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08004010 (void)width;
4011 (void)height;
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07004012 (void)sx;
4013 (void)sy;
4014#if COUPLED_CHROMA_FROM_LUMA_RESTORATION
4015 int s = AOMMIN(sx, sy);
4016#else
4017 int s = 0;
4018#endif // !COUPLED_CHROMA_FROM_LUMA_RESTORATION
4019
Debargha Mukherjee5f7f3672017-08-12 10:22:49 -07004020 if (width * height > 352 * 288)
Urvang Joshi813186b2018-03-08 15:38:46 -08004021 rst[0].restoration_unit_size = RESTORATION_UNITSIZE_MAX;
Debargha Mukherjee5f7f3672017-08-12 10:22:49 -07004022 else
Urvang Joshi813186b2018-03-08 15:38:46 -08004023 rst[0].restoration_unit_size = (RESTORATION_UNITSIZE_MAX >> 1);
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01004024 rst[1].restoration_unit_size = rst[0].restoration_unit_size >> s;
4025 rst[2].restoration_unit_size = rst[1].restoration_unit_size;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08004026}
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08004027
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05304028static void init_ref_frame_bufs(AV1_COMP *cpi) {
4029 AV1_COMMON *const cm = &cpi->common;
Cheng Chen46f30c72017-09-07 11:13:33 -07004030 int i;
4031 BufferPool *const pool = cm->buffer_pool;
Jack Haughtonddb80602018-11-21 16:41:49 +00004032 cm->cur_frame = NULL;
Cheng Chen46f30c72017-09-07 11:13:33 -07004033 for (i = 0; i < REF_FRAMES; ++i) {
David Turnere7ebf902018-12-04 14:04:55 +00004034 cm->ref_frame_map[i] = NULL;
Wan-Teh Changd05e0332018-10-03 12:00:43 -07004035 }
4036 for (i = 0; i < FRAME_BUFFERS; ++i) {
Cheng Chen46f30c72017-09-07 11:13:33 -07004037 pool->frame_bufs[i].ref_count = 0;
4038 }
RogerZhou86902d02018-02-28 15:29:16 -08004039 if (cm->seq_params.force_screen_content_tools) {
Hui Su2d5fd742018-02-21 18:10:37 -08004040 for (i = 0; i < FRAME_BUFFERS; ++i) {
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05304041 av1_hash_table_init(&pool->frame_bufs[i].hash_table, &cpi->td.mb);
Hui Su2d5fd742018-02-21 18:10:37 -08004042 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004043 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004044}
4045
Yaowu Xud3e7c682017-12-21 14:08:25 -08004046static void check_initial_width(AV1_COMP *cpi, int use_highbitdepth,
Cheng Chen46f30c72017-09-07 11:13:33 -07004047 int subsampling_x, int subsampling_y) {
4048 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004049 SequenceHeader *const seq_params = &cm->seq_params;
Cheng Chen46f30c72017-09-07 11:13:33 -07004050
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004051 if (!cpi->initial_width || seq_params->use_highbitdepth != use_highbitdepth ||
4052 seq_params->subsampling_x != subsampling_x ||
4053 seq_params->subsampling_y != subsampling_y) {
4054 seq_params->subsampling_x = subsampling_x;
4055 seq_params->subsampling_y = subsampling_y;
4056 seq_params->use_highbitdepth = use_highbitdepth;
Cheng Chen46f30c72017-09-07 11:13:33 -07004057
4058 alloc_raw_frame_buffers(cpi);
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05304059 init_ref_frame_bufs(cpi);
Cheng Chen46f30c72017-09-07 11:13:33 -07004060 alloc_util_frame_buffers(cpi);
4061
4062 init_motion_estimation(cpi); // TODO(agrange) This can be removed.
4063
4064 cpi->initial_width = cm->width;
4065 cpi->initial_height = cm->height;
4066 cpi->initial_mbs = cm->MBs;
4067 }
4068}
4069
4070// Returns 1 if the assigned width or height was <= 0.
4071static int set_size_literal(AV1_COMP *cpi, int width, int height) {
4072 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004073 const int num_planes = av1_num_planes(cm);
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004074 check_initial_width(cpi, cm->seq_params.use_highbitdepth,
4075 cm->seq_params.subsampling_x,
4076 cm->seq_params.subsampling_y);
Cheng Chen46f30c72017-09-07 11:13:33 -07004077
4078 if (width <= 0 || height <= 0) return 1;
4079
4080 cm->width = width;
Cheng Chen46f30c72017-09-07 11:13:33 -07004081 cm->height = height;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004082
4083 if (cpi->initial_width && cpi->initial_height &&
4084 (cm->width > cpi->initial_width || cm->height > cpi->initial_height)) {
4085 av1_free_context_buffers(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004086 av1_free_pc_tree(&cpi->td, num_planes);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004087 alloc_compressor_data(cpi);
4088 realloc_segmentation_maps(cpi);
4089 cpi->initial_width = cpi->initial_height = 0;
Cheng Chen46f30c72017-09-07 11:13:33 -07004090 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004091 update_frame_size(cpi);
4092
4093 return 0;
4094}
4095
Fergus Simpsonbc189932017-05-16 17:02:39 -07004096static void set_frame_size(AV1_COMP *cpi, int width, int height) {
Fergus Simpsonbc189932017-05-16 17:02:39 -07004097 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004098 const SequenceHeader *const seq_params = &cm->seq_params;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004099 const int num_planes = av1_num_planes(cm);
Fergus Simpsonbc189932017-05-16 17:02:39 -07004100 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004101 int ref_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004102
Fergus Simpsonbc189932017-05-16 17:02:39 -07004103 if (width != cm->width || height != cm->height) {
Fergus Simpson3502d082017-04-10 12:25:07 -07004104 // There has been a change in the encoded frame size
Cheng Chen46f30c72017-09-07 11:13:33 -07004105 set_size_literal(cpi, width, height);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004106 set_mv_search_params(cpi);
Urvang Joshic8b52d52018-03-23 13:16:51 -07004107 // Recalculate 'all_lossless' in case super-resolution was (un)selected.
Cheng Chen09c83a52018-06-05 12:27:36 -07004108 cm->all_lossless = cm->coded_lossless && !av1_superres_scaled(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004109 }
4110
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004111 if (cpi->oxcf.pass == 2) {
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004112 av1_set_target_rate(cpi, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004113 }
4114
David Turnere7ebf902018-12-04 14:04:55 +00004115 alloc_frame_mvs(cm, cm->cur_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004116
Cherma Rajan A71d20db2018-04-27 11:15:32 +05304117 // Allocate above context buffers
Cherma Rajan Af1479082018-05-09 14:26:34 +05304118 if (cm->num_allocated_above_context_planes < av1_num_planes(cm) ||
4119 cm->num_allocated_above_context_mi_col < cm->mi_cols ||
Cherma Rajan A71d20db2018-04-27 11:15:32 +05304120 cm->num_allocated_above_contexts < cm->tile_rows) {
4121 av1_free_above_context_buffers(cm, cm->num_allocated_above_contexts);
4122 if (av1_alloc_above_context_buffers(cm, cm->tile_rows))
4123 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
4124 "Failed to allocate context buffers");
4125 }
4126
Yaowu Xuc27fc142016-08-22 16:08:15 -07004127 // Reset the frame pointers to the current frame size.
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004128 if (aom_realloc_frame_buffer(
Jack Haughtonddb80602018-11-21 16:41:49 +00004129 &cm->cur_frame->buf, cm->width, cm->height, seq_params->subsampling_x,
4130 seq_params->subsampling_y, seq_params->use_highbitdepth,
Satish Kumar Suman29909962019-01-09 10:31:21 +05304131 cpi->oxcf.border_in_pixels, cm->byte_alignment, NULL, NULL, NULL))
Yaowu Xuf883b422016-08-30 14:01:10 -07004132 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004133 "Failed to allocate frame buffer");
4134
Rupert Swarbrickf88bc042017-10-18 10:45:51 +01004135 const int frame_width = cm->superres_upscaled_width;
4136 const int frame_height = cm->superres_upscaled_height;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004137 set_restoration_unit_size(frame_width, frame_height,
4138 seq_params->subsampling_x,
4139 seq_params->subsampling_y, cm->rst_info);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004140 for (int i = 0; i < num_planes; ++i)
Rupert Swarbrick1a96c3f2017-10-24 11:55:00 +01004141 cm->rst_info[i].frame_restoration_type = RESTORE_NONE;
Rupert Swarbrickf88bc042017-10-18 10:45:51 +01004142
4143 av1_alloc_restoration_buffers(cm);
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004144 alloc_util_frame_buffers(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004145 init_motion_estimation(cpi);
4146
4147 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00004148 RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
David Turnere7ebf902018-12-04 14:04:55 +00004149 if (buf != NULL) {
David Turnera21966b2018-12-05 14:48:49 +00004150 struct scale_factors *sf = get_ref_scale_factors(cm, ref_frame);
4151 av1_setup_scale_factors_for_frame(sf, buf->buf.y_crop_width,
David Turner1bcefb32018-11-19 17:54:00 +00004152 buf->buf.y_crop_height, cm->width,
Debargha Mukherjeee242a812018-03-07 21:43:09 -08004153 cm->height);
David Turnera21966b2018-12-05 14:48:49 +00004154 if (av1_is_scaled(sf)) aom_extend_frame_borders(&buf->buf, num_planes);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004155 }
4156 }
Zoe Liu7b1ec7a2017-05-24 22:28:24 -07004157
Hui Su5ebd8702018-01-08 18:09:20 -08004158 av1_setup_scale_factors_for_frame(&cm->sf_identity, cm->width, cm->height,
Debargha Mukherjeee242a812018-03-07 21:43:09 -08004159 cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004160
4161 set_ref_ptrs(cm, xd, LAST_FRAME, LAST_FRAME);
4162}
4163
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004164static uint8_t calculate_next_resize_scale(const AV1_COMP *cpi) {
4165 // Choose an arbitrary random number
4166 static unsigned int seed = 56789;
4167 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Urvang Joshide71d142017-10-05 12:12:15 -07004168 if (oxcf->pass == 1) return SCALE_NUMERATOR;
4169 uint8_t new_denom = SCALE_NUMERATOR;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004170
Debargha Mukherjee2b7c2b32018-04-10 07:35:28 -07004171 if (cpi->common.seq_params.reduced_still_picture_hdr) return SCALE_NUMERATOR;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004172 switch (oxcf->resize_mode) {
Urvang Joshide71d142017-10-05 12:12:15 -07004173 case RESIZE_NONE: new_denom = SCALE_NUMERATOR; break;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004174 case RESIZE_FIXED:
David Turnerd2a592e2018-11-16 14:59:31 +00004175 if (cpi->common.current_frame.frame_type == KEY_FRAME)
Urvang Joshide71d142017-10-05 12:12:15 -07004176 new_denom = oxcf->resize_kf_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004177 else
Urvang Joshide71d142017-10-05 12:12:15 -07004178 new_denom = oxcf->resize_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004179 break;
Urvang Joshide71d142017-10-05 12:12:15 -07004180 case RESIZE_RANDOM: new_denom = lcg_rand16(&seed) % 9 + 8; break;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004181 default: assert(0);
4182 }
Urvang Joshide71d142017-10-05 12:12:15 -07004183 return new_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004184}
4185
Debargha Mukherjeee3cd5a52018-11-29 11:05:22 -08004186#define ENERGY_BY_Q2_THRESH 0.01
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004187#define ENERGY_BY_AC_THRESH 0.2
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004188
4189static uint8_t get_superres_denom_from_qindex_energy(int qindex, double *energy,
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004190 double threshq,
4191 double threshp) {
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004192 const double q = av1_convert_qindex_to_q(qindex, AOM_BITS_8);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004193 const double tq = threshq * q * q;
4194 const double tp = threshp * energy[1];
4195 const double thresh = AOMMIN(tq, tp);
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004196 int k;
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004197 for (k = 16; k > 8; --k) {
4198 if (energy[k - 1] > thresh) break;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004199 }
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004200 return 3 * SCALE_NUMERATOR - k;
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004201}
4202
4203static uint8_t get_superres_denom_for_qindex(const AV1_COMP *cpi, int qindex) {
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004204 double energy[16];
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004205 analyze_hor_freq(cpi, energy);
Debargha Mukherjee21eb0402018-12-03 12:10:59 -08004206 /*
4207 printf("\nenergy = [");
4208 for (int k = 1; k < 16; ++k) printf("%f, ", energy[k]);
4209 printf("]\n");
4210 */
4211 return get_superres_denom_from_qindex_energy(
4212 qindex, energy, ENERGY_BY_Q2_THRESH, ENERGY_BY_AC_THRESH);
Debargha Mukherjeef48b0d22018-11-20 12:23:43 -08004213}
4214
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004215static uint8_t calculate_next_superres_scale(AV1_COMP *cpi) {
4216 // Choose an arbitrary random number
4217 static unsigned int seed = 34567;
4218 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Urvang Joshide71d142017-10-05 12:12:15 -07004219 if (oxcf->pass == 1) return SCALE_NUMERATOR;
4220 uint8_t new_denom = SCALE_NUMERATOR;
Urvang Joshi2c92b072018-03-19 17:23:31 -07004221
4222 // Make sure that superres mode of the frame is consistent with the
4223 // sequence-level flag.
4224 assert(IMPLIES(oxcf->superres_mode != SUPERRES_NONE,
4225 cpi->common.seq_params.enable_superres));
4226 assert(IMPLIES(!cpi->common.seq_params.enable_superres,
4227 oxcf->superres_mode == SUPERRES_NONE));
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004228
4229 switch (oxcf->superres_mode) {
Urvang Joshide71d142017-10-05 12:12:15 -07004230 case SUPERRES_NONE: new_denom = SCALE_NUMERATOR; break;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004231 case SUPERRES_FIXED:
David Turnerd2a592e2018-11-16 14:59:31 +00004232 if (cpi->common.current_frame.frame_type == KEY_FRAME)
Urvang Joshide71d142017-10-05 12:12:15 -07004233 new_denom = oxcf->superres_kf_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004234 else
Urvang Joshide71d142017-10-05 12:12:15 -07004235 new_denom = oxcf->superres_scale_denominator;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004236 break;
Urvang Joshide71d142017-10-05 12:12:15 -07004237 case SUPERRES_RANDOM: new_denom = lcg_rand16(&seed) % 9 + 8; break;
Urvang Joshif1fa6862018-01-08 16:39:33 -08004238 case SUPERRES_QTHRESH: {
Debargha Mukherjeedf713102018-10-02 12:33:32 -07004239 // Do not use superres when screen content tools are used.
4240 if (cpi->common.allow_screen_content_tools) break;
Debargha Mukherjee2b2c5fd2018-11-14 13:21:24 -08004241 if (oxcf->rc_mode == AOM_VBR || oxcf->rc_mode == AOM_CQ)
4242 av1_set_target_rate(cpi, cpi->oxcf.width, cpi->oxcf.height);
Urvang Joshi2c92b072018-03-19 17:23:31 -07004243 int bottom_index, top_index;
4244 const int q = av1_rc_pick_q_and_bounds(
4245 cpi, cpi->oxcf.width, cpi->oxcf.height, &bottom_index, &top_index);
Debargha Mukherjeef48b0d22018-11-20 12:23:43 -08004246
Debargha Mukherjeeacd9b7d2018-11-26 15:15:05 -08004247 const int qthresh = (frame_is_intra_only(&cpi->common))
4248 ? oxcf->superres_kf_qthresh
4249 : oxcf->superres_qthresh;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004250 if (q < qthresh) {
Urvang Joshide71d142017-10-05 12:12:15 -07004251 new_denom = SCALE_NUMERATOR;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004252 } else {
Debargha Mukherjeee3cd5a52018-11-29 11:05:22 -08004253 new_denom = get_superres_denom_for_qindex(cpi, q);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004254 }
4255 break;
Urvang Joshif1fa6862018-01-08 16:39:33 -08004256 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004257 default: assert(0);
4258 }
Urvang Joshide71d142017-10-05 12:12:15 -07004259 return new_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004260}
4261
Urvang Joshide71d142017-10-05 12:12:15 -07004262static int dimension_is_ok(int orig_dim, int resized_dim, int denom) {
4263 return (resized_dim * SCALE_NUMERATOR >= orig_dim * denom / 2);
4264}
4265
4266static int dimensions_are_ok(int owidth, int oheight, size_params_type *rsz) {
Urvang Joshi94ad3702017-12-06 11:38:08 -08004267 // Only need to check the width, as scaling is horizontal only.
4268 (void)oheight;
4269 return dimension_is_ok(owidth, rsz->resize_width, rsz->superres_denom);
Urvang Joshide71d142017-10-05 12:12:15 -07004270}
4271
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004272static int validate_size_scales(RESIZE_MODE resize_mode,
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004273 SUPERRES_MODE superres_mode, int owidth,
4274 int oheight, size_params_type *rsz) {
Urvang Joshide71d142017-10-05 12:12:15 -07004275 if (dimensions_are_ok(owidth, oheight, rsz)) { // Nothing to do.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004276 return 1;
Urvang Joshide71d142017-10-05 12:12:15 -07004277 }
4278
Urvang Joshi69fde2e2017-10-09 15:34:18 -07004279 // Calculate current resize scale.
Urvang Joshide71d142017-10-05 12:12:15 -07004280 int resize_denom =
4281 AOMMAX(DIVIDE_AND_ROUND(owidth * SCALE_NUMERATOR, rsz->resize_width),
4282 DIVIDE_AND_ROUND(oheight * SCALE_NUMERATOR, rsz->resize_height));
4283
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004284 if (resize_mode != RESIZE_RANDOM && superres_mode == SUPERRES_RANDOM) {
Urvang Joshide71d142017-10-05 12:12:15 -07004285 // Alter superres scale as needed to enforce conformity.
4286 rsz->superres_denom =
4287 (2 * SCALE_NUMERATOR * SCALE_NUMERATOR) / resize_denom;
4288 if (!dimensions_are_ok(owidth, oheight, rsz)) {
4289 if (rsz->superres_denom > SCALE_NUMERATOR) --rsz->superres_denom;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004290 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004291 } else if (resize_mode == RESIZE_RANDOM && superres_mode != SUPERRES_RANDOM) {
Urvang Joshide71d142017-10-05 12:12:15 -07004292 // Alter resize scale as needed to enforce conformity.
4293 resize_denom =
4294 (2 * SCALE_NUMERATOR * SCALE_NUMERATOR) / rsz->superres_denom;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004295 rsz->resize_width = owidth;
4296 rsz->resize_height = oheight;
4297 av1_calculate_scaled_size(&rsz->resize_width, &rsz->resize_height,
Urvang Joshide71d142017-10-05 12:12:15 -07004298 resize_denom);
4299 if (!dimensions_are_ok(owidth, oheight, rsz)) {
4300 if (resize_denom > SCALE_NUMERATOR) {
4301 --resize_denom;
4302 rsz->resize_width = owidth;
4303 rsz->resize_height = oheight;
4304 av1_calculate_scaled_size(&rsz->resize_width, &rsz->resize_height,
4305 resize_denom);
4306 }
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004307 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004308 } else if (resize_mode == RESIZE_RANDOM && superres_mode == SUPERRES_RANDOM) {
Urvang Joshide71d142017-10-05 12:12:15 -07004309 // Alter both resize and superres scales as needed to enforce conformity.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004310 do {
Urvang Joshide71d142017-10-05 12:12:15 -07004311 if (resize_denom > rsz->superres_denom)
4312 --resize_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004313 else
Urvang Joshide71d142017-10-05 12:12:15 -07004314 --rsz->superres_denom;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004315 rsz->resize_width = owidth;
4316 rsz->resize_height = oheight;
4317 av1_calculate_scaled_size(&rsz->resize_width, &rsz->resize_height,
Urvang Joshide71d142017-10-05 12:12:15 -07004318 resize_denom);
4319 } while (!dimensions_are_ok(owidth, oheight, rsz) &&
4320 (resize_denom > SCALE_NUMERATOR ||
4321 rsz->superres_denom > SCALE_NUMERATOR));
Urvang Joshif1fa6862018-01-08 16:39:33 -08004322 } else { // We are allowed to alter neither resize scale nor superres
4323 // scale.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004324 return 0;
4325 }
Urvang Joshide71d142017-10-05 12:12:15 -07004326 return dimensions_are_ok(owidth, oheight, rsz);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004327}
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004328
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004329// Calculates resize and superres params for next frame
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004330size_params_type av1_calculate_next_size_params(AV1_COMP *cpi) {
4331 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Debargha Mukherjee3a4959f2018-02-26 15:34:03 -08004332 size_params_type rsz = { oxcf->width, oxcf->height, SCALE_NUMERATOR };
Urvang Joshide71d142017-10-05 12:12:15 -07004333 int resize_denom;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004334 if (oxcf->pass == 1) return rsz;
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004335 if (cpi->resize_pending_width && cpi->resize_pending_height) {
4336 rsz.resize_width = cpi->resize_pending_width;
4337 rsz.resize_height = cpi->resize_pending_height;
4338 cpi->resize_pending_width = cpi->resize_pending_height = 0;
4339 } else {
Urvang Joshide71d142017-10-05 12:12:15 -07004340 resize_denom = calculate_next_resize_scale(cpi);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004341 rsz.resize_width = cpi->oxcf.width;
4342 rsz.resize_height = cpi->oxcf.height;
4343 av1_calculate_scaled_size(&rsz.resize_width, &rsz.resize_height,
Urvang Joshide71d142017-10-05 12:12:15 -07004344 resize_denom);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004345 }
Urvang Joshide71d142017-10-05 12:12:15 -07004346 rsz.superres_denom = calculate_next_superres_scale(cpi);
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004347 if (!validate_size_scales(oxcf->resize_mode, oxcf->superres_mode, oxcf->width,
4348 oxcf->height, &rsz))
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004349 assert(0 && "Invalid scale parameters");
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004350 return rsz;
4351}
4352
Urvang Joshi22b150b2019-01-10 14:32:32 -08004353static void setup_frame_size_from_params(AV1_COMP *cpi,
4354 const size_params_type *rsz) {
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004355 int encode_width = rsz->resize_width;
4356 int encode_height = rsz->resize_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004357
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004358 AV1_COMMON *cm = &cpi->common;
4359 cm->superres_upscaled_width = encode_width;
4360 cm->superres_upscaled_height = encode_height;
Urvang Joshide71d142017-10-05 12:12:15 -07004361 cm->superres_scale_denominator = rsz->superres_denom;
Urvang Joshi69fde2e2017-10-09 15:34:18 -07004362 av1_calculate_scaled_superres_size(&encode_width, &encode_height,
4363 rsz->superres_denom);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004364 set_frame_size(cpi, encode_width, encode_height);
4365}
4366
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004367static void setup_frame_size(AV1_COMP *cpi) {
Urvang Joshi22b150b2019-01-10 14:32:32 -08004368 // Reset superres params from previous frame.
4369 cpi->common.superres_scale_denominator = SCALE_NUMERATOR;
4370 const size_params_type rsz = av1_calculate_next_size_params(cpi);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004371 setup_frame_size_from_params(cpi, &rsz);
4372}
4373
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004374static void superres_post_encode(AV1_COMP *cpi) {
4375 AV1_COMMON *cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004376 const int num_planes = av1_num_planes(cm);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004377
Cheng Chen09c83a52018-06-05 12:27:36 -07004378 if (!av1_superres_scaled(cm)) return;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004379
Urvang Joshid6b5d512018-03-20 13:34:38 -07004380 assert(cpi->oxcf.enable_superres);
4381 assert(!is_lossless_requested(&cpi->oxcf));
Urvang Joshic8b52d52018-03-23 13:16:51 -07004382 assert(!cm->all_lossless);
Urvang Joshid6b5d512018-03-20 13:34:38 -07004383
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004384 av1_superres_upscale(cm, NULL);
4385
4386 // If regular resizing is occurring the source will need to be downscaled to
4387 // match the upscaled superres resolution. Otherwise the original source is
4388 // used.
Cheng Chen09c83a52018-06-05 12:27:36 -07004389 if (!av1_resize_scaled(cm)) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004390 cpi->source = cpi->unscaled_source;
4391 if (cpi->last_source != NULL) cpi->last_source = cpi->unscaled_last_source;
4392 } else {
Fergus Simpsonabd43432017-06-12 15:54:43 -07004393 assert(cpi->unscaled_source->y_crop_width != cm->superres_upscaled_width);
4394 assert(cpi->unscaled_source->y_crop_height != cm->superres_upscaled_height);
Urvang Joshif1fa6862018-01-08 16:39:33 -08004395 // Do downscale. cm->(width|height) has been updated by
4396 // av1_superres_upscale
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004397 if (aom_realloc_frame_buffer(
4398 &cpi->scaled_source, cm->superres_upscaled_width,
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004399 cm->superres_upscaled_height, cm->seq_params.subsampling_x,
4400 cm->seq_params.subsampling_y, cm->seq_params.use_highbitdepth,
4401 AOM_BORDER_IN_PIXELS, cm->byte_alignment, NULL, NULL, NULL))
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004402 aom_internal_error(
4403 &cm->error, AOM_CODEC_MEM_ERROR,
4404 "Failed to reallocate scaled source buffer for superres");
4405 assert(cpi->scaled_source.y_crop_width == cm->superres_upscaled_width);
4406 assert(cpi->scaled_source.y_crop_height == cm->superres_upscaled_height);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004407 av1_resize_and_extend_frame(cpi->unscaled_source, &cpi->scaled_source,
Urvang Joshi20cf30e2018-07-19 02:33:58 -07004408 (int)cm->seq_params.bit_depth, num_planes);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004409 cpi->source = &cpi->scaled_source;
4410 }
4411}
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004412
4413static void loopfilter_frame(AV1_COMP *cpi, AV1_COMMON *cm) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004414 const int num_planes = av1_num_planes(cm);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004415 MACROBLOCKD *xd = &cpi->td.mb.e_mbd;
Urvang Joshi14072aa2018-03-21 17:43:36 -07004416
Urvang Joshic8b52d52018-03-23 13:16:51 -07004417 assert(IMPLIES(is_lossless_requested(&cpi->oxcf),
4418 cm->coded_lossless && cm->all_lossless));
4419
4420 const int no_loopfilter = cm->coded_lossless || cm->large_scale_tile;
4421 const int no_cdef =
Debargha Mukherjee98a311c2018-03-25 16:33:11 -07004422 !cm->seq_params.enable_cdef || cm->coded_lossless || cm->large_scale_tile;
4423 const int no_restoration = !cm->seq_params.enable_restoration ||
4424 cm->all_lossless || cm->large_scale_tile;
Urvang Joshi14072aa2018-03-21 17:43:36 -07004425
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004426 struct loopfilter *lf = &cm->lf;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004427
4428 if (no_loopfilter) {
Cheng Chen179479f2017-08-04 10:56:39 -07004429 lf->filter_level[0] = 0;
4430 lf->filter_level[1] = 0;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004431 } else {
4432 struct aom_usec_timer timer;
4433
4434 aom_clear_system_state();
4435
4436 aom_usec_timer_start(&timer);
4437
4438 av1_pick_filter_level(cpi->source, cpi, cpi->sf.lpf_pick);
4439
4440 aom_usec_timer_mark(&timer);
4441 cpi->time_pick_lpf += aom_usec_timer_elapsed(&timer);
4442 }
4443
Debargha Mukherjee2382b142018-02-26 14:31:32 -08004444 if (lf->filter_level[0] || lf->filter_level[1]) {
Deepa K G964e72e2018-05-16 16:56:01 +05304445 if (cpi->num_workers > 1)
David Turnerc29e1a92018-12-06 14:10:14 +00004446 av1_loop_filter_frame_mt(&cm->cur_frame->buf, cm, xd, 0, num_planes, 0,
Cheng Chene3600cd2018-09-21 18:45:42 -07004447#if LOOP_FILTER_BITMASK
4448 0,
4449#endif
Deepa K G964e72e2018-05-16 16:56:01 +05304450 cpi->workers, cpi->num_workers,
4451 &cpi->lf_row_sync);
4452 else
David Turnerc29e1a92018-12-06 14:10:14 +00004453 av1_loop_filter_frame(&cm->cur_frame->buf, cm, xd,
Cheng Chen84b09932018-08-12 17:35:13 -07004454#if LOOP_FILTER_BITMASK
4455 0,
Cheng Chen8ab1f442018-04-27 18:01:52 -07004456#endif
Cheng Chen84b09932018-08-12 17:35:13 -07004457 0, num_planes, 0);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004458 }
Debargha Mukherjeee168a782017-08-31 12:30:10 -07004459
Yaowu Xu35ee2342017-11-08 11:50:46 -08004460 if (!no_restoration)
David Turnerc29e1a92018-12-06 14:10:14 +00004461 av1_loop_restoration_save_boundary_lines(&cm->cur_frame->buf, cm, 0);
Ola Hugosson1e7f2d02017-09-22 21:36:26 +02004462
Yaowu Xu35ee2342017-11-08 11:50:46 -08004463 if (no_cdef) {
David Turnerebf96f42018-11-14 16:57:57 +00004464 cm->cdef_info.cdef_bits = 0;
4465 cm->cdef_info.cdef_strengths[0] = 0;
4466 cm->cdef_info.nb_cdef_strengths = 1;
4467 cm->cdef_info.cdef_uv_strengths[0] = 0;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004468 } else {
Steinar Midtskogen59782122017-07-20 08:49:43 +02004469 // Find CDEF parameters
David Turnerc29e1a92018-12-06 14:10:14 +00004470 av1_cdef_search(&cm->cur_frame->buf, cpi->source, cm, xd,
Debargha Mukherjeed7338aa2017-11-04 07:34:50 -07004471 cpi->sf.fast_cdef_search);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004472
4473 // Apply the filter
David Turnerc29e1a92018-12-06 14:10:14 +00004474 av1_cdef_frame(&cm->cur_frame->buf, cm, xd);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004475 }
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004476
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004477 superres_post_encode(cpi);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004478
Yaowu Xu35ee2342017-11-08 11:50:46 -08004479 if (no_restoration) {
4480 cm->rst_info[0].frame_restoration_type = RESTORE_NONE;
4481 cm->rst_info[1].frame_restoration_type = RESTORE_NONE;
4482 cm->rst_info[2].frame_restoration_type = RESTORE_NONE;
4483 } else {
David Turnerc29e1a92018-12-06 14:10:14 +00004484 av1_loop_restoration_save_boundary_lines(&cm->cur_frame->buf, cm, 1);
Yaowu Xu35ee2342017-11-08 11:50:46 -08004485 av1_pick_filter_restoration(cpi->source, cpi);
4486 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
4487 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
4488 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05304489 if (cpi->num_workers > 1)
David Turnerc29e1a92018-12-06 14:10:14 +00004490 av1_loop_restoration_filter_frame_mt(&cm->cur_frame->buf, cm, 0,
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05304491 cpi->workers, cpi->num_workers,
4492 &cpi->lr_row_sync, &cpi->lr_ctxt);
4493 else
David Turnerc29e1a92018-12-06 14:10:14 +00004494 av1_loop_restoration_filter_frame(&cm->cur_frame->buf, cm, 0,
Ravi Chaudharye2aa4012018-06-04 14:20:00 +05304495 &cpi->lr_ctxt);
Yaowu Xu35ee2342017-11-08 11:50:46 -08004496 }
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07004497 }
Fergus Simpsonbc189932017-05-16 17:02:39 -07004498}
4499
David Turner996b2c12018-12-07 15:52:30 +00004500static int get_refresh_frame_flags(const AV1_COMP *const cpi) {
4501 const AV1_COMMON *const cm = &cpi->common;
4502
4503 // Switch frames and shown key-frames overwrite all reference slots
4504 if ((cm->current_frame.frame_type == KEY_FRAME && cm->show_frame) ||
4505 frame_is_sframe(cm))
4506 return 0xFF;
4507
4508 int refresh_mask = 0;
4509
4510 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
4511 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
4512 // the 3 LAST reference frames will be updated accordingly, i.e.:
4513 // (1) The original virtual index for LAST3_FRAME will become the new virtual
4514 // index for LAST_FRAME; and
4515 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
4516 // shifted and become the new virtual indexes for LAST2_FRAME and
4517 // LAST3_FRAME.
4518 refresh_mask |=
4519 (cpi->refresh_last_frame << get_ref_frame_map_idx(cm, LAST3_FRAME));
4520
4521#if USE_SYMM_MULTI_LAYER
4522 const int bwd_ref_frame =
4523 (cpi->new_bwdref_update_rule == 1) ? EXTREF_FRAME : BWDREF_FRAME;
4524#else
4525 const int bwd_ref_frame = BWDREF_FRAME;
4526#endif
4527 refresh_mask |=
4528 (cpi->refresh_bwd_ref_frame << get_ref_frame_map_idx(cm, bwd_ref_frame));
4529
4530 refresh_mask |=
4531 (cpi->refresh_alt2_ref_frame << get_ref_frame_map_idx(cm, ALTREF2_FRAME));
4532
4533 if (av1_preserve_existing_gf(cpi)) {
4534 // We have decided to preserve the previously existing golden frame as our
4535 // new ARF frame. However, in the short term we leave it in the GF slot and,
4536 // if we're updating the GF with the current decoded frame, we save it
4537 // instead to the ARF slot.
4538 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
4539 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
4540 // there so that it can be done outside of the recode loop.
4541 // Note: This is highly specific to the use of ARF as a forward reference,
4542 // and this needs to be generalized as other uses are implemented
4543 // (like RTC/temporal scalability).
4544
4545 if (!cpi->preserve_arf_as_gld) {
4546 refresh_mask |= (cpi->refresh_golden_frame
4547 << get_ref_frame_map_idx(cm, ALTREF_FRAME));
4548 }
4549 } else {
4550 refresh_mask |=
4551 (cpi->refresh_golden_frame << get_ref_frame_map_idx(cm, GOLDEN_FRAME));
4552 refresh_mask |=
4553 (cpi->refresh_alt_ref_frame << get_ref_frame_map_idx(cm, ALTREF_FRAME));
4554 }
4555 return refresh_mask;
4556}
4557
David Turnerf2b334c2018-12-13 13:00:55 +00004558static void fix_interp_filter(InterpFilter *const interp_filter,
4559 const FRAME_COUNTS *const counts) {
4560 if (*interp_filter == SWITCHABLE) {
4561 // Check to see if only one of the filters is actually used
4562 int count[SWITCHABLE_FILTERS] = { 0 };
4563 int num_filters_used = 0;
4564 for (int i = 0; i < SWITCHABLE_FILTERS; ++i) {
4565 for (int j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
4566 count[i] += counts->switchable_interp[j][i];
4567 num_filters_used += (count[i] > 0);
4568 }
4569 if (num_filters_used == 1) {
4570 // Only one filter is used. So set the filter at frame level
4571 for (int i = 0; i < SWITCHABLE_FILTERS; ++i) {
4572 if (count[i]) {
4573 if (i == EIGHTTAP_REGULAR) *interp_filter = i;
4574 break;
4575 }
4576 }
4577 }
4578 }
4579}
4580
David Turner996b2c12018-12-07 15:52:30 +00004581static void finalize_encoded_frame(AV1_COMP *const cpi) {
4582 AV1_COMMON *const cm = &cpi->common;
David Turner99e990e2018-12-10 12:54:26 +00004583 CurrentFrame *const current_frame = &cm->current_frame;
David Turner996b2c12018-12-07 15:52:30 +00004584
4585 // This bitfield indicates which reference frame slots will be overwritten by
4586 // the current frame
David Turner99e990e2018-12-10 12:54:26 +00004587 current_frame->refresh_frame_flags = get_refresh_frame_flags(cpi);
4588
4589 if (!encode_show_existing_frame(cm)) {
4590 // Refresh fb_of_context_type[]: see encoder.h for explanation
4591 if (current_frame->frame_type == KEY_FRAME) {
4592 // All ref frames are refreshed, pick one that will live long enough
4593 cpi->fb_of_context_type[REGULAR_FRAME] = 0;
4594 } else {
4595 // If more than one frame is refreshed, it doesn't matter which one we
4596 // pick so pick the first. LST sometimes doesn't refresh any: this is ok
4597 const int current_frame_ref_type = get_current_frame_ref_type(cpi);
4598 for (int i = 0; i < REF_FRAMES; i++) {
4599 if (current_frame->refresh_frame_flags & (1 << i)) {
4600 cpi->fb_of_context_type[current_frame_ref_type] = i;
4601 break;
4602 }
4603 }
4604 }
4605 }
4606
4607 if (!cm->seq_params.reduced_still_picture_hdr &&
4608 encode_show_existing_frame(cm)) {
4609 RefCntBuffer *const frame_to_show =
4610 cm->ref_frame_map[cpi->existing_fb_idx_to_show];
4611
4612 if (frame_to_show == NULL || frame_to_show->ref_count < 1) {
4613 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
4614 "Buffer does not contain a reconstructed frame");
4615 }
4616 assign_frame_buffer_p(&cm->cur_frame, frame_to_show);
4617 if (cm->reset_decoder_state && frame_to_show->frame_type != KEY_FRAME) {
4618 aom_internal_error(
4619 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
4620 "show_existing_frame to reset state on KEY_FRAME only");
4621 }
4622 }
David Turner08f909c2018-12-18 13:29:14 +00004623
4624 if (!encode_show_existing_frame(cm) &&
4625 cm->seq_params.film_grain_params_present &&
4626 (cm->show_frame || cm->showable_frame)) {
4627 // Copy the current frame's film grain params to the its corresponding
4628 // RefCntBuffer slot.
4629 cm->cur_frame->film_grain_params = cm->film_grain_params;
4630
4631 // We must update the parameters if this is not an INTER_FRAME
4632 if (current_frame->frame_type != INTER_FRAME)
4633 cm->cur_frame->film_grain_params.update_parameters = 1;
4634
4635 // Iterate the random seed for the next frame.
4636 cm->film_grain_params.random_seed += 3381;
4637 if (cm->film_grain_params.random_seed == 0)
4638 cm->film_grain_params.random_seed = 7391;
4639 }
David Turnerf2b334c2018-12-13 13:00:55 +00004640
4641 // Initialise all tiles' contexts from the global frame context
4642 for (int tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
4643 for (int tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
4644 const int tile_idx = tile_row * cm->tile_cols + tile_col;
4645 cpi->tile_data[tile_idx].tctx = *cm->fc;
4646 }
4647 }
4648
4649 fix_interp_filter(&cm->interp_filter, cpi->td.counts);
David Turner996b2c12018-12-07 15:52:30 +00004650}
4651
David Turner2f3b5df2019-01-02 14:30:50 +00004652// Called after encode_with_recode_loop() has just encoded a frame and packed
4653// its bitstream. This function works out whether we under- or over-shot
4654// our bitrate target and adjusts q as appropriate. Also decides whether
4655// or not we should do another recode loop, indicated by *loop
4656static void recode_loop_update_q(AV1_COMP *const cpi, int *const loop,
4657 int *const q, int *const q_low,
4658 int *const q_high, const int top_index,
4659 const int bottom_index,
4660 int *const undershoot_seen,
4661 int *const overshoot_seen,
4662 const int loop_at_this_size) {
4663 AV1_COMMON *const cm = &cpi->common;
4664 RATE_CONTROL *const rc = &cpi->rc;
4665
4666 int frame_over_shoot_limit = 0, frame_under_shoot_limit = 0;
4667 av1_rc_compute_frame_size_bounds(cpi, rc->this_frame_target,
4668 &frame_under_shoot_limit,
4669 &frame_over_shoot_limit);
4670 if (frame_over_shoot_limit == 0) frame_over_shoot_limit = 1;
4671
4672 if ((cm->current_frame.frame_type == KEY_FRAME) &&
4673 rc->this_key_frame_forced &&
4674 (rc->projected_frame_size < rc->max_frame_bandwidth)) {
4675 int last_q = *q;
4676 int64_t kf_err;
4677
4678 int64_t high_err_target = cpi->ambient_err;
4679 int64_t low_err_target = cpi->ambient_err >> 1;
4680
4681 if (cm->seq_params.use_highbitdepth) {
4682 kf_err = aom_highbd_get_y_sse(cpi->source, &cm->cur_frame->buf);
4683 } else {
4684 kf_err = aom_get_y_sse(cpi->source, &cm->cur_frame->buf);
4685 }
4686 // Prevent possible divide by zero error below for perfect KF
4687 kf_err += !kf_err;
4688
4689 // The key frame is not good enough or we can afford
4690 // to make it better without undue risk of popping.
4691 if ((kf_err > high_err_target &&
4692 rc->projected_frame_size <= frame_over_shoot_limit) ||
4693 (kf_err > low_err_target &&
4694 rc->projected_frame_size <= frame_under_shoot_limit)) {
4695 // Lower q_high
4696 *q_high = *q > *q_low ? *q - 1 : *q_low;
4697
4698 // Adjust Q
4699 *q = (int)((*q * high_err_target) / kf_err);
4700 *q = AOMMIN(*q, (*q_high + *q_low) >> 1);
4701 } else if (kf_err < low_err_target &&
4702 rc->projected_frame_size >= frame_under_shoot_limit) {
4703 // The key frame is much better than the previous frame
4704 // Raise q_low
4705 *q_low = *q < *q_high ? *q + 1 : *q_high;
4706
4707 // Adjust Q
4708 *q = (int)((*q * low_err_target) / kf_err);
4709 *q = AOMMIN(*q, (*q_high + *q_low + 1) >> 1);
4710 }
4711
4712 // Clamp Q to upper and lower limits:
4713 *q = clamp(*q, *q_low, *q_high);
4714
4715 *loop = *q != last_q;
4716 } else if (recode_loop_test(cpi, frame_over_shoot_limit,
4717 frame_under_shoot_limit, *q,
4718 AOMMAX(*q_high, top_index), bottom_index)) {
4719 // Is the projected frame size out of range and are we allowed
4720 // to attempt to recode.
4721 int last_q = *q;
4722 int retries = 0;
4723
4724 // Frame size out of permitted range:
4725 // Update correction factor & compute new Q to try...
4726 // Frame is too large
4727 if (rc->projected_frame_size > rc->this_frame_target) {
4728 // Special case if the projected size is > the max allowed.
4729 if (rc->projected_frame_size >= rc->max_frame_bandwidth)
4730 *q_high = rc->worst_quality;
4731
4732 // Raise Qlow as to at least the current value
4733 *q_low = *q < *q_high ? *q + 1 : *q_high;
4734
4735 if (*undershoot_seen || loop_at_this_size > 1) {
4736 // Update rate_correction_factor unless
4737 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4738
4739 *q = (*q_high + *q_low + 1) / 2;
4740 } else {
4741 // Update rate_correction_factor unless
4742 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4743
4744 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4745 AOMMAX(*q_high, top_index), cm->width,
4746 cm->height);
4747
4748 while (*q < *q_low && retries < 10) {
4749 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4750 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4751 AOMMAX(*q_high, top_index), cm->width,
4752 cm->height);
4753 retries++;
4754 }
4755 }
4756
4757 *overshoot_seen = 1;
4758 } else {
4759 // Frame is too small
4760 *q_high = *q > *q_low ? *q - 1 : *q_low;
4761
4762 if (*overshoot_seen || loop_at_this_size > 1) {
4763 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4764 *q = (*q_high + *q_low) / 2;
4765 } else {
4766 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4767 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4768 top_index, cm->width, cm->height);
4769 // Special case reset for qlow for constrained quality.
4770 // This should only trigger where there is very substantial
4771 // undershoot on a frame and the auto cq level is above
4772 // the user passsed in value.
4773 if (cpi->oxcf.rc_mode == AOM_CQ && *q < *q_low) {
4774 *q_low = *q;
4775 }
4776
4777 while (*q > *q_high && retries < 10) {
4778 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
4779 *q = av1_rc_regulate_q(cpi, rc->this_frame_target, bottom_index,
4780 top_index, cm->width, cm->height);
4781 retries++;
4782 }
4783 }
4784
4785 *undershoot_seen = 1;
4786 }
4787
4788 // Clamp Q to upper and lower limits:
4789 *q = clamp(*q, *q_low, *q_high);
4790
4791 *loop = (*q != last_q);
4792 } else {
4793 *loop = 0;
4794 }
4795}
4796
Tom Finegane4099e32018-01-23 12:01:51 -08004797static int encode_with_recode_loop(AV1_COMP *cpi, size_t *size, uint8_t *dest) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004798 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004799 RATE_CONTROL *const rc = &cpi->rc;
David Turner2f3b5df2019-01-02 14:30:50 +00004800 const int allow_recode = cpi->sf.recode_loop != DISALLOW_RECODE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004801
4802 set_size_independent_vars(cpi);
4803
Yaowu Xu9b0f7032017-07-31 11:01:19 -07004804 cpi->source->buf_8bit_valid = 0;
Yaowu Xu9b0f7032017-07-31 11:01:19 -07004805
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004806 setup_frame_size(cpi);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07004807
David Turner2f3b5df2019-01-02 14:30:50 +00004808 int top_index = 0, bottom_index = 0;
4809 int q = 0, q_low = 0, q_high = 0;
4810 set_size_dependent_vars(cpi, &q, &bottom_index, &top_index);
4811 q_low = bottom_index;
4812 q_high = top_index;
4813
4814 // Loop variables
4815 int loop_count = 0;
4816 int loop_at_this_size = 0;
4817 int loop = 0;
4818 int overshoot_seen = 0;
4819 int undershoot_seen = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004820 do {
Yaowu Xuf883b422016-08-30 14:01:10 -07004821 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07004822
Urvang Joshif1fa6862018-01-08 16:39:33 -08004823 // if frame was scaled calculate global_motion_search again if already
4824 // done
David Turner2f3b5df2019-01-02 14:30:50 +00004825 if (loop_count > 0 && cpi->source && cpi->global_motion_search_done) {
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004826 if (cpi->source->y_crop_width != cm->width ||
David Turner2f3b5df2019-01-02 14:30:50 +00004827 cpi->source->y_crop_height != cm->height) {
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004828 cpi->global_motion_search_done = 0;
David Turner2f3b5df2019-01-02 14:30:50 +00004829 }
4830 }
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07004831 cpi->source =
4832 av1_scale_if_required(cm, cpi->unscaled_source, &cpi->scaled_source);
David Turner2f3b5df2019-01-02 14:30:50 +00004833 if (cpi->unscaled_last_source != NULL) {
Debargha Mukherjee17e7b082017-08-13 09:33:03 -07004834 cpi->last_source = av1_scale_if_required(cm, cpi->unscaled_last_source,
4835 &cpi->scaled_last_source);
David Turner2f3b5df2019-01-02 14:30:50 +00004836 }
Debargha Mukherjee17e7b082017-08-13 09:33:03 -07004837
David Turner2f3b5df2019-01-02 14:30:50 +00004838 if (!frame_is_intra_only(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004839 if (loop_count > 0) {
4840 release_scaled_references(cpi);
4841 }
Cheng Chen46f30c72017-09-07 11:13:33 -07004842 scale_references(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004843 }
Yaowu Xuf883b422016-08-30 14:01:10 -07004844 av1_set_quantizer(cm, q);
Debargha Mukherjeef48b0d22018-11-20 12:23:43 -08004845 // printf("Frame %d/%d: q = %d, frame_type = %d superres_denom = %d\n",
4846 // cm->current_frame.frame_number, cm->show_frame, q,
4847 // cm->current_frame.frame_type, cm->superres_scale_denominator);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004848
David Turner2f3b5df2019-01-02 14:30:50 +00004849 if (loop_count == 0) {
4850 setup_frame(cpi);
4851 } else if (get_primary_ref_frame_buf(cm) == NULL) {
4852 // Base q-index may have changed, so we need to assign proper default coef
4853 // probs before every iteration.
Yaowu Xuf883b422016-08-30 14:01:10 -07004854 av1_default_coef_probs(cm);
Hui Su3694c832017-11-10 14:15:58 -08004855 av1_setup_frame_contexts(cm);
David Barkerfc91b392018-03-09 15:32:03 +00004856 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004857
Yaowu Xuc27fc142016-08-22 16:08:15 -07004858 if (cpi->oxcf.aq_mode == VARIANCE_AQ) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004859 av1_vaq_frame_setup(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004860 } else if (cpi->oxcf.aq_mode == COMPLEXITY_AQ) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004861 av1_setup_in_frame_q_adj(cpi);
David Turner2f3b5df2019-01-02 14:30:50 +00004862 } else if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && !allow_recode) {
4863 suppress_active_map(cpi);
4864 av1_cyclic_refresh_setup(cpi);
4865 apply_active_map(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004866 }
David Turner2f3b5df2019-01-02 14:30:50 +00004867
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00004868 if (cm->seg.enabled) {
David Barkercab37552018-03-21 11:56:24 +00004869 if (!cm->seg.update_data && cm->prev_frame) {
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00004870 segfeatures_copy(&cm->seg, &cm->prev_frame->seg);
David Barker11c93562018-06-05 12:00:07 +01004871 } else {
4872 calculate_segdata(&cm->seg);
Yue Chend90d3432018-03-16 11:28:42 -07004873 }
David Barkercab37552018-03-21 11:56:24 +00004874 } else {
4875 memset(&cm->seg, 0, sizeof(cm->seg));
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00004876 }
David Barkercab37552018-03-21 11:56:24 +00004877 segfeatures_copy(&cm->cur_frame->seg, &cm->seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004878
David Turner2f3b5df2019-01-02 14:30:50 +00004879 if (allow_recode) save_coding_context(cpi);
4880
Yaowu Xuc27fc142016-08-22 16:08:15 -07004881 // transform / motion compensation build reconstruction frame
Yaowu Xuf883b422016-08-30 14:01:10 -07004882 av1_encode_frame(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004883
David Turner2f3b5df2019-01-02 14:30:50 +00004884 // Update some stats from cyclic refresh, and check if we should not update
4885 // golden reference, for 1 pass CBR.
4886 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ &&
4887 cm->current_frame.frame_type != KEY_FRAME &&
4888 (cpi->oxcf.pass == 0 && cpi->oxcf.rc_mode == AOM_CBR)) {
4889 av1_cyclic_refresh_check_golden_update(cpi);
4890 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004891
Yaowu Xuf883b422016-08-30 14:01:10 -07004892 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07004893
4894 // Dummy pack of the bitstream using up to date stats to get an
4895 // accurate estimate of output frame size to determine if we need
4896 // to recode.
4897 if (cpi->sf.recode_loop >= ALLOW_RECODE_KFARFGF) {
Jingning Han8f661602017-08-19 08:16:50 -07004898 restore_coding_context(cpi);
Tom Finegane4099e32018-01-23 12:01:51 -08004899
David Turner996b2c12018-12-07 15:52:30 +00004900 finalize_encoded_frame(cpi);
David Turner35cba132018-12-10 15:48:15 +00004901 int largest_tile_id = 0; // Output from bitstream: unused here
4902 if (av1_pack_bitstream(cpi, dest, size, &largest_tile_id) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08004903 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004904
4905 rc->projected_frame_size = (int)(*size) << 3;
4906 restore_coding_context(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004907 }
4908
David Turner2f3b5df2019-01-02 14:30:50 +00004909 if (allow_recode && cpi->oxcf.rc_mode != AOM_Q) {
4910 // Update q and decide whether to do a recode loop
4911 recode_loop_update_q(cpi, &loop, &q, &q_low, &q_high, top_index,
4912 bottom_index, &undershoot_seen, &overshoot_seen,
4913 loop_at_this_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004914 }
4915
4916 // Special case for overlay frame.
4917 if (rc->is_src_frame_alt_ref &&
4918 rc->projected_frame_size < rc->max_frame_bandwidth)
4919 loop = 0;
4920
David Turner2f3b5df2019-01-02 14:30:50 +00004921 if (allow_recode && !cpi->sf.gm_disable_recode &&
4922 recode_loop_test_global_motion(cpi)) {
4923 loop = 1;
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08004924 }
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08004925
Yaowu Xuc27fc142016-08-22 16:08:15 -07004926 if (loop) {
4927 ++loop_count;
4928 ++loop_at_this_size;
4929
4930#if CONFIG_INTERNAL_STATS
4931 ++cpi->tot_recode_hits;
4932#endif
4933 }
4934 } while (loop);
Tom Finegane4099e32018-01-23 12:01:51 -08004935
4936 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004937}
4938
Yaowu Xuc27fc142016-08-22 16:08:15 -07004939#define DUMP_RECON_FRAMES 0
4940
4941#if DUMP_RECON_FRAMES == 1
4942// NOTE(zoeliu): For debug - Output the filtered reconstructed video.
Yaowu Xuf883b422016-08-30 14:01:10 -07004943static void dump_filtered_recon_frames(AV1_COMP *cpi) {
4944 AV1_COMMON *const cm = &cpi->common;
David Turnerd2a592e2018-11-16 14:59:31 +00004945 const CurrentFrame *const current_frame = &cm->current_frame;
David Turnerc29e1a92018-12-06 14:10:14 +00004946 const YV12_BUFFER_CONFIG *recon_buf = &cm->cur_frame->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004947
Zoe Liub4f31032017-11-03 23:48:35 -07004948 if (recon_buf == NULL) {
David Turnerd2a592e2018-11-16 14:59:31 +00004949 printf("Frame %d is not ready.\n", current_frame->frame_number);
Zoe Liub4f31032017-11-03 23:48:35 -07004950 return;
4951 }
4952
Zoe Liu27deb382018-03-27 15:13:56 -07004953 static const int flag_list[REF_FRAMES] = { 0,
4954 AOM_LAST_FLAG,
4955 AOM_LAST2_FLAG,
4956 AOM_LAST3_FLAG,
4957 AOM_GOLD_FLAG,
4958 AOM_BWD_FLAG,
4959 AOM_ALT2_FLAG,
4960 AOM_ALT_FLAG };
Zoe Liub4f31032017-11-03 23:48:35 -07004961 printf(
4962 "\n***Frame=%d (frame_offset=%d, show_frame=%d, "
4963 "show_existing_frame=%d) "
4964 "[LAST LAST2 LAST3 GOLDEN BWD ALT2 ALT]=[",
David Turnerd2a592e2018-11-16 14:59:31 +00004965 current_frame->frame_number, current_frame->order_hint, cm->show_frame,
Zoe Liub4f31032017-11-03 23:48:35 -07004966 cm->show_existing_frame);
4967 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00004968 const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
4969 const int ref_offset = buf != NULL ? (int)buf->order_hint : -1;
David Turner1bcefb32018-11-19 17:54:00 +00004970 printf(" %d(%c-%d-%4.2f)", ref_offset,
4971 (cpi->ref_frame_flags & flag_list[ref_frame]) ? 'Y' : 'N',
David Turnera21966b2018-12-05 14:48:49 +00004972 buf ? (int)buf->frame_rf_level : -1,
4973 buf ? rate_factor_deltas[buf->frame_rf_level] : -1);
Zoe Liub4f31032017-11-03 23:48:35 -07004974 }
4975 printf(" ]\n");
Zoe Liub4f31032017-11-03 23:48:35 -07004976
4977 if (!cm->show_frame) {
4978 printf("Frame %d is a no show frame, so no image dump.\n",
David Turnerd2a592e2018-11-16 14:59:31 +00004979 current_frame->frame_number);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004980 return;
4981 }
4982
Zoe Liub4f31032017-11-03 23:48:35 -07004983 int h;
4984 char file_name[256] = "/tmp/enc_filtered_recon.yuv";
4985 FILE *f_recon = NULL;
4986
David Turnerd2a592e2018-11-16 14:59:31 +00004987 if (current_frame->frame_number == 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004988 if ((f_recon = fopen(file_name, "wb")) == NULL) {
4989 printf("Unable to open file %s to write.\n", file_name);
4990 return;
4991 }
4992 } else {
4993 if ((f_recon = fopen(file_name, "ab")) == NULL) {
4994 printf("Unable to open file %s to append.\n", file_name);
4995 return;
4996 }
4997 }
4998 printf(
Zoe Liuf40a9572017-10-13 12:37:19 -07004999 "\nFrame=%5d, encode_update_type[%5d]=%1d, frame_offset=%d, "
5000 "show_frame=%d, show_existing_frame=%d, source_alt_ref_active=%d, "
5001 "refresh_alt_ref_frame=%d, rf_level=%d, "
5002 "y_stride=%4d, uv_stride=%4d, cm->width=%4d, cm->height=%4d\n\n",
David Turnerd2a592e2018-11-16 14:59:31 +00005003 current_frame->frame_number, cpi->twopass.gf_group.index,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005004 cpi->twopass.gf_group.update_type[cpi->twopass.gf_group.index],
David Turnerd2a592e2018-11-16 14:59:31 +00005005 current_frame->order_hint, cm->show_frame, cm->show_existing_frame,
Zoe Liuf40a9572017-10-13 12:37:19 -07005006 cpi->rc.source_alt_ref_active, cpi->refresh_alt_ref_frame,
5007 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index],
5008 recon_buf->y_stride, recon_buf->uv_stride, cm->width, cm->height);
Zoe Liue9b15e22017-07-19 15:53:01 -07005009#if 0
5010 int ref_frame;
5011 printf("get_ref_frame_map_idx: [");
5012 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame)
David Turnera21966b2018-12-05 14:48:49 +00005013 printf(" %d", get_ref_frame_map_idx(cm, ref_frame));
Zoe Liue9b15e22017-07-19 15:53:01 -07005014 printf(" ]\n");
Zoe Liue9b15e22017-07-19 15:53:01 -07005015#endif // 0
Yaowu Xuc27fc142016-08-22 16:08:15 -07005016
5017 // --- Y ---
5018 for (h = 0; h < cm->height; ++h) {
5019 fwrite(&recon_buf->y_buffer[h * recon_buf->y_stride], 1, cm->width,
5020 f_recon);
5021 }
5022 // --- U ---
5023 for (h = 0; h < (cm->height >> 1); ++h) {
5024 fwrite(&recon_buf->u_buffer[h * recon_buf->uv_stride], 1, (cm->width >> 1),
5025 f_recon);
5026 }
5027 // --- V ---
5028 for (h = 0; h < (cm->height >> 1); ++h) {
5029 fwrite(&recon_buf->v_buffer[h * recon_buf->uv_stride], 1, (cm->width >> 1),
5030 f_recon);
5031 }
5032
5033 fclose(f_recon);
5034}
5035#endif // DUMP_RECON_FRAMES
5036
Wei-Ting Linfb7dc062018-06-28 18:26:13 -07005037static INLINE int is_frame_droppable(AV1_COMP *cpi) {
5038 return !(cpi->refresh_alt_ref_frame || cpi->refresh_alt2_ref_frame ||
5039 cpi->refresh_bwd_ref_frame || cpi->refresh_golden_frame ||
5040 cpi->refresh_last_frame);
5041}
5042
Sachin Kumar Gargfd39b232019-01-03 17:41:09 +05305043static int setup_interp_filter_search_mask(AV1_COMP *cpi) {
5044 InterpFilters ifilter;
5045 int ref_total[REF_FRAMES] = { 0 };
5046 MV_REFERENCE_FRAME ref;
5047 int mask = 0;
5048 int arf_idx = ALTREF_FRAME;
5049 if (cpi->common.last_frame_type == KEY_FRAME || cpi->refresh_alt_ref_frame)
5050 return mask;
5051 for (ref = LAST_FRAME; ref <= ALTREF_FRAME; ++ref)
5052 for (ifilter = EIGHTTAP_REGULAR; ifilter <= MULTITAP_SHARP; ++ifilter)
5053 ref_total[ref] += cpi->interp_filter_selected[ref][ifilter];
5054
5055 for (ifilter = EIGHTTAP_REGULAR; ifilter <= MULTITAP_SHARP; ++ifilter) {
5056 if ((ref_total[LAST_FRAME] &&
5057 cpi->interp_filter_selected[LAST_FRAME][ifilter] * 30 <=
5058 ref_total[LAST_FRAME]) &&
5059 (((cpi->interp_filter_selected[LAST2_FRAME][ifilter] * 20) +
5060 (cpi->interp_filter_selected[LAST3_FRAME][ifilter] * 20) +
5061 (cpi->interp_filter_selected[GOLDEN_FRAME][ifilter] * 20) +
5062 (cpi->interp_filter_selected[BWDREF_FRAME][ifilter] * 10) +
5063 (cpi->interp_filter_selected[ALTREF2_FRAME][ifilter] * 10) +
5064 (cpi->interp_filter_selected[arf_idx][ifilter] * 10)) <
5065 (ref_total[LAST2_FRAME] + ref_total[LAST3_FRAME] +
5066 ref_total[GOLDEN_FRAME] + ref_total[BWDREF_FRAME] +
5067 ref_total[ALTREF2_FRAME] + ref_total[ALTREF_FRAME])))
5068 mask |= 1 << ifilter;
5069 }
5070 return mask;
5071}
5072
Tom Finegane4099e32018-01-23 12:01:51 -08005073static int encode_frame_to_data_rate(AV1_COMP *cpi, size_t *size, uint8_t *dest,
Tom Finegane4099e32018-01-23 12:01:51 -08005074 unsigned int *frame_flags) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005075 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005076 SequenceHeader *const seq_params = &cm->seq_params;
David Turnerd2a592e2018-11-16 14:59:31 +00005077 CurrentFrame *const current_frame = &cm->current_frame;
Yaowu Xuf883b422016-08-30 14:01:10 -07005078 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005079 struct segmentation *const seg = &cm->seg;
Thomas Davies4822e142017-10-10 11:30:36 +01005080
Yaowu Xuf883b422016-08-30 14:01:10 -07005081 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07005082
Fangwen Fu8d164de2016-12-14 13:40:54 -08005083 // frame type has been decided outside of this function call
David Turnerd2a592e2018-11-16 14:59:31 +00005084 cm->cur_frame->frame_type = current_frame->frame_type;
Debargha Mukherjee07a7c1f2018-03-21 17:39:13 -07005085
Yunqing Wang9612d552018-05-15 14:58:30 -07005086 cm->large_scale_tile = cpi->oxcf.large_scale_tile;
5087 cm->single_tile_decoding = cpi->oxcf.single_tile_decoding;
Yunqing Wang9612d552018-05-15 14:58:30 -07005088
sarahparker21dbca42018-03-30 17:43:44 -07005089 cm->allow_ref_frame_mvs &= frame_might_allow_ref_frame_mvs(cm);
Yunqing Wangd48fb162018-06-15 10:55:28 -07005090 // cm->allow_ref_frame_mvs needs to be written into the frame header while
5091 // cm->large_scale_tile is 1, therefore, "cm->large_scale_tile=1" case is
5092 // separated from frame_might_allow_ref_frame_mvs().
5093 cm->allow_ref_frame_mvs &= !cm->large_scale_tile;
5094
Debargha Mukherjee1d7217e2018-03-26 13:32:13 -07005095 cm->allow_warped_motion =
Debargha Mukherjeea5b810a2018-03-26 19:19:55 -07005096 cpi->oxcf.allow_warped_motion && frame_might_allow_warped_motion(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005097
Jingning Hand8a15a62017-10-30 10:53:42 -07005098 // Reset the frame packet stamp index.
David Turnerd2a592e2018-11-16 14:59:31 +00005099 if (current_frame->frame_type == KEY_FRAME && cm->show_frame)
5100 current_frame->frame_number = 0;
Jingning Hand8a15a62017-10-30 10:53:42 -07005101
Sachin Kumar Gargfd39b232019-01-03 17:41:09 +05305102 cm->last_frame_type = current_frame->frame_type;
5103 if (cpi->oxcf.pass == 2 && cpi->sf.adaptive_interp_filter_search)
5104 cpi->sf.interp_filter_search_mask = setup_interp_filter_search_mask(cpi);
5105
Sarah Parker33005522018-07-27 14:46:25 -07005106 if (encode_show_existing_frame(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005107 // NOTE(zoeliu): In BIDIR_PRED, the existing frame to show is the current
5108 // BWDREF_FRAME in the reference frame buffer.
David Turnerd2a592e2018-11-16 14:59:31 +00005109 if (current_frame->frame_type == KEY_FRAME) {
Sarah Parkerb9041612018-05-22 19:06:47 -07005110 cm->reset_decoder_state = 1;
5111 } else {
David Turnerd2a592e2018-11-16 14:59:31 +00005112 current_frame->frame_type = INTER_FRAME;
Sarah Parkerb9041612018-05-22 19:06:47 -07005113 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005114 cm->show_frame = 1;
5115 cpi->frame_flags = *frame_flags;
5116
Jingning Han8f661602017-08-19 08:16:50 -07005117 restore_coding_context(cpi);
Zoe Liub4f31032017-11-03 23:48:35 -07005118
David Turner996b2c12018-12-07 15:52:30 +00005119 finalize_encoded_frame(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005120 // Build the bitstream
David Turner35cba132018-12-10 15:48:15 +00005121 int largest_tile_id = 0; // Output from bitstream: unused here
5122 if (av1_pack_bitstream(cpi, dest, size, &largest_tile_id) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08005123 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005124
David Turner90311862018-11-29 13:34:36 +00005125 if (seq_params->frame_id_numbers_present_flag &&
5126 current_frame->frame_type == KEY_FRAME) {
5127 // Displaying a forward key-frame, so reset the ref buffer IDs
5128 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
5129 for (int i = 0; i < REF_FRAMES; i++)
5130 cm->ref_frame_id[i] = display_frame_id;
5131 }
5132
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07005133 cpi->seq_params_locked = 1;
5134
Zoe Liub4f31032017-11-03 23:48:35 -07005135 // Update current frame offset.
Jack Haughtonddb80602018-11-21 16:41:49 +00005136 current_frame->order_hint = cm->cur_frame->order_hint;
Zoe Liub4f31032017-11-03 23:48:35 -07005137
Yaowu Xuc27fc142016-08-22 16:08:15 -07005138#if DUMP_RECON_FRAMES == 1
5139 // NOTE(zoeliu): For debug - Output the filtered reconstructed video.
5140 dump_filtered_recon_frames(cpi);
5141#endif // DUMP_RECON_FRAMES
5142
5143 // Update the LAST_FRAME in the reference frame buffer.
Zoe Liue9b15e22017-07-19 15:53:01 -07005144 // NOTE:
5145 // (1) For BWDREF_FRAME as the show_existing_frame, the reference frame
5146 // update has been done previously when handling the LAST_BIPRED_FRAME
5147 // right before BWDREF_FRAME (in the display order);
5148 // (2) For INTNL_OVERLAY as the show_existing_frame, the reference frame
Urvang Joshif1fa6862018-01-08 16:39:33 -08005149 // update will be done when the following is called, which will
5150 // exchange
Zoe Liue9b15e22017-07-19 15:53:01 -07005151 // the virtual indexes between LAST_FRAME and ALTREF2_FRAME, so that
Urvang Joshif1fa6862018-01-08 16:39:33 -08005152 // LAST3 will get retired, LAST2 becomes LAST3, LAST becomes LAST2,
5153 // and
Zoe Liue9b15e22017-07-19 15:53:01 -07005154 // ALTREF2_FRAME will serve as the new LAST_FRAME.
Cheng Chen46f30c72017-09-07 11:13:33 -07005155 update_reference_frames(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005156
5157 // Update frame flags
5158 cpi->frame_flags &= ~FRAMEFLAGS_GOLDEN;
5159 cpi->frame_flags &= ~FRAMEFLAGS_BWDREF;
5160 cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
5161
5162 *frame_flags = cpi->frame_flags & ~FRAMEFLAGS_KEY;
5163
Yaowu Xuc27fc142016-08-22 16:08:15 -07005164 // Since we allocate a spot for the OVERLAY frame in the gf group, we need
5165 // to do post-encoding update accordingly.
5166 if (cpi->rc.is_src_frame_alt_ref) {
Debargha Mukherjee7166f222017-09-05 21:32:42 -07005167 av1_set_target_rate(cpi, cm->width, cm->height);
Yaowu Xuf883b422016-08-30 14:01:10 -07005168 av1_rc_postencode_update(cpi, *size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005169 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005170
David Turnerd2a592e2018-11-16 14:59:31 +00005171 ++current_frame->frame_number;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005172
Tom Finegane4099e32018-01-23 12:01:51 -08005173 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005174 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005175
5176 // Set default state for segment based loop filter update flags.
5177 cm->lf.mode_ref_delta_update = 0;
5178
Yaowu Xuc27fc142016-08-22 16:08:15 -07005179 // Set various flags etc to special state if it is a key frame.
Tarek AMARAc9813852018-03-05 18:40:18 -05005180 if (frame_is_intra_only(cm) || frame_is_sframe(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005181 // Reset the loop filter deltas and segmentation map.
Yaowu Xuf883b422016-08-30 14:01:10 -07005182 av1_reset_segment_features(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005183
5184 // If segmentation is enabled force a map update for key frames.
5185 if (seg->enabled) {
5186 seg->update_map = 1;
5187 seg->update_data = 1;
5188 }
5189
5190 // The alternate reference frame cannot be active for a key frame.
5191 cpi->rc.source_alt_ref_active = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005192 }
Thomas Daviesaf6df172016-11-09 14:04:18 +00005193 if (cpi->oxcf.mtu == 0) {
5194 cm->num_tg = cpi->oxcf.num_tile_groups;
5195 } else {
Yaowu Xu859a5272016-11-10 15:32:21 -08005196 // Use a default value for the purposes of weighting costs in probability
5197 // updates
Thomas Daviesaf6df172016-11-09 14:04:18 +00005198 cm->num_tg = DEFAULT_MAX_NUM_TG;
5199 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005200
5201 // For 1 pass CBR, check if we are dropping this frame.
5202 // Never drop on key frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07005203 if (oxcf->pass == 0 && oxcf->rc_mode == AOM_CBR &&
David Turnerd2a592e2018-11-16 14:59:31 +00005204 current_frame->frame_type != KEY_FRAME) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005205 if (av1_rc_drop_frame(cpi)) {
5206 av1_rc_postencode_update_drop_frame(cpi);
Tom Finegane4099e32018-01-23 12:01:51 -08005207 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005208 }
5209 }
5210
Yaowu Xuf883b422016-08-30 14:01:10 -07005211 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07005212
5213#if CONFIG_INTERNAL_STATS
5214 memset(cpi->mode_chosen_counts, 0,
5215 MAX_MODES * sizeof(*cpi->mode_chosen_counts));
5216#endif
5217
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005218 if (seq_params->frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005219 /* Non-normative definition of current_frame_id ("frame counter" with
Johann123e8a62017-12-28 14:40:49 -08005220 * wraparound) */
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005221 if (cm->current_frame_id == -1) {
David Barker49a76562016-12-07 14:50:21 +00005222 int lsb, msb;
Yaowu Xud3e7c682017-12-21 14:08:25 -08005223 /* quasi-random initialization of current_frame_id for a key frame */
Alex Conversef77fd0b2017-04-20 11:00:24 -07005224 if (cpi->source->flags & YV12_FLAG_HIGHBITDEPTH) {
5225 lsb = CONVERT_TO_SHORTPTR(cpi->source->y_buffer)[0] & 0xff;
5226 msb = CONVERT_TO_SHORTPTR(cpi->source->y_buffer)[1] & 0xff;
David Barker49a76562016-12-07 14:50:21 +00005227 } else {
Alex Conversef77fd0b2017-04-20 11:00:24 -07005228 lsb = cpi->source->y_buffer[0] & 0xff;
5229 msb = cpi->source->y_buffer[1] & 0xff;
David Barker49a76562016-12-07 14:50:21 +00005230 }
David Turner760a2f42018-12-07 15:25:36 +00005231 cm->current_frame_id =
5232 ((msb << 8) + lsb) % (1 << seq_params->frame_id_length);
Tarek AMARAc9813852018-03-05 18:40:18 -05005233
5234 // S_frame is meant for stitching different streams of different
5235 // resolutions together, so current_frame_id must be the
5236 // same across different streams of the same content current_frame_id
5237 // should be the same and not random. 0x37 is a chosen number as start
5238 // point
5239 if (cpi->oxcf.sframe_enabled) cm->current_frame_id = 0x37;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005240 } else {
5241 cm->current_frame_id =
David Turner760a2f42018-12-07 15:25:36 +00005242 (cm->current_frame_id + 1 + (1 << seq_params->frame_id_length)) %
5243 (1 << seq_params->frame_id_length);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005244 }
5245 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005246
Hui Su483a8452018-02-26 12:28:48 -08005247 switch (cpi->oxcf.cdf_update_mode) {
5248 case 0: // No CDF update for any frames(4~6% compression loss).
5249 cm->disable_cdf_update = 1;
5250 break;
5251 case 1: // Enable CDF update for all frames.
5252 cm->disable_cdf_update = 0;
5253 break;
5254 case 2:
5255 // Strategically determine at which frames to do CDF update.
5256 // Currently only enable CDF update for all-intra and no-show frames(1.5%
5257 // compression loss).
5258 // TODO(huisu@google.com): design schemes for various trade-offs between
5259 // compression quality and decoding speed.
Hui Sub1b76b32018-02-27 15:24:48 -08005260 cm->disable_cdf_update =
5261 (frame_is_intra_only(cm) || !cm->show_frame) ? 0 : 1;
Hui Su483a8452018-02-26 12:28:48 -08005262 break;
5263 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005264 cm->timing_info_present &= !seq_params->reduced_still_picture_hdr;
Hui Su483a8452018-02-26 12:28:48 -08005265
David Turner2f3b5df2019-01-02 14:30:50 +00005266 if (encode_with_recode_loop(cpi, size, dest) != AOM_CODEC_OK)
5267 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005268
5269#ifdef OUTPUT_YUV_SKINMAP
David Turnerd2a592e2018-11-16 14:59:31 +00005270 if (cpi->common.current_frame.frame_number > 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005271 av1_compute_skin_map(cpi, yuv_skinmap_file);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005272 }
5273#endif // OUTPUT_YUV_SKINMAP
5274
5275 // Special case code to reduce pulsing when key frames are forced at a
5276 // fixed interval. Note the reconstruction error if it is the frame before
5277 // the force key frame
5278 if (cpi->rc.next_key_frame_forced && cpi->rc.frames_to_key == 1) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005279 if (seq_params->use_highbitdepth) {
Jack Haughtonddb80602018-11-21 16:41:49 +00005280 cpi->ambient_err = aom_highbd_get_y_sse(cpi->source, &cm->cur_frame->buf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005281 } else {
Jack Haughtonddb80602018-11-21 16:41:49 +00005282 cpi->ambient_err = aom_get_y_sse(cpi->source, &cm->cur_frame->buf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005283 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005284 }
5285
Tarek AMARAc9813852018-03-05 18:40:18 -05005286 // If the encoder forced a KEY_FRAME decision or if frame is an S_FRAME
David Turnerd2a592e2018-11-16 14:59:31 +00005287 if ((current_frame->frame_type == KEY_FRAME && cm->show_frame) ||
5288 frame_is_sframe(cm)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005289 cpi->refresh_last_frame = 1;
5290 }
5291
David Turnerc29e1a92018-12-06 14:10:14 +00005292 cm->cur_frame->buf.color_primaries = seq_params->color_primaries;
5293 cm->cur_frame->buf.transfer_characteristics =
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005294 seq_params->transfer_characteristics;
David Turnerc29e1a92018-12-06 14:10:14 +00005295 cm->cur_frame->buf.matrix_coefficients = seq_params->matrix_coefficients;
5296 cm->cur_frame->buf.monochrome = seq_params->monochrome;
5297 cm->cur_frame->buf.chroma_sample_position =
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005298 seq_params->chroma_sample_position;
David Turnerc29e1a92018-12-06 14:10:14 +00005299 cm->cur_frame->buf.color_range = seq_params->color_range;
5300 cm->cur_frame->buf.render_width = cm->render_width;
5301 cm->cur_frame->buf.render_height = cm->render_height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005302
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02005303 // TODO(zoeliu): For non-ref frames, loop filtering may need to be turned
5304 // off.
Yaowu Xuc27fc142016-08-22 16:08:15 -07005305
5306 // Pick the loop filter level for the frame.
Cheng Chen68dc9142018-05-02 17:46:28 -07005307 if (!cm->allow_intrabc) {
David Barker218556e2018-02-14 14:23:12 +00005308 loopfilter_frame(cpi, cm);
Hui Su06463e42018-02-23 22:17:36 -08005309 } else {
Hui Su06463e42018-02-23 22:17:36 -08005310 cm->lf.filter_level[0] = 0;
5311 cm->lf.filter_level[1] = 0;
David Turnerebf96f42018-11-14 16:57:57 +00005312 cm->cdef_info.cdef_bits = 0;
5313 cm->cdef_info.cdef_strengths[0] = 0;
5314 cm->cdef_info.nb_cdef_strengths = 1;
5315 cm->cdef_info.cdef_uv_strengths[0] = 0;
Hui Su06463e42018-02-23 22:17:36 -08005316 cm->rst_info[0].frame_restoration_type = RESTORE_NONE;
5317 cm->rst_info[1].frame_restoration_type = RESTORE_NONE;
5318 cm->rst_info[2].frame_restoration_type = RESTORE_NONE;
Hui Su06463e42018-02-23 22:17:36 -08005319 }
David Barker218556e2018-02-14 14:23:12 +00005320
5321 // TODO(debargha): Fix mv search range on encoder side
David Turnerc29e1a92018-12-06 14:10:14 +00005322 // aom_extend_frame_inner_borders(&cm->cur_frame->buf, av1_num_planes(cm));
5323 aom_extend_frame_borders(&cm->cur_frame->buf, av1_num_planes(cm));
Yaowu Xuc27fc142016-08-22 16:08:15 -07005324
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07005325#ifdef OUTPUT_YUV_REC
David Turnerc29e1a92018-12-06 14:10:14 +00005326 aom_write_one_yuv_frame(cm, &cm->cur_frame->buf);
Wei-Ting Lin01d4d8f2017-08-03 17:04:12 -07005327#endif
5328
David Turner996b2c12018-12-07 15:52:30 +00005329 finalize_encoded_frame(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005330 // Build the bitstream
David Turner35cba132018-12-10 15:48:15 +00005331 int largest_tile_id = 0; // Output from pack_bitstream
5332 if (av1_pack_bitstream(cpi, dest, size, &largest_tile_id) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08005333 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005334
Debargha Mukherjeef2e5bb32018-03-26 14:35:24 -07005335 cpi->seq_params_locked = 1;
5336
David Turner996b2c12018-12-07 15:52:30 +00005337 // Update reference frame ids for reference frames this frame will overwrite
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005338 if (seq_params->frame_id_numbers_present_flag) {
David Turner996b2c12018-12-07 15:52:30 +00005339 for (int i = 0; i < REF_FRAMES; i++) {
5340 if ((current_frame->refresh_frame_flags >> i) & 1) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005341 cm->ref_frame_id[i] = cm->current_frame_id;
5342 }
5343 }
5344 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01005345
Yaowu Xuc27fc142016-08-22 16:08:15 -07005346#if DUMP_RECON_FRAMES == 1
5347 // NOTE(zoeliu): For debug - Output the filtered reconstructed video.
Zoe Liub4f31032017-11-03 23:48:35 -07005348 dump_filtered_recon_frames(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005349#endif // DUMP_RECON_FRAMES
5350
Soo-Chul Han934af352017-10-15 15:21:51 -04005351 if (cm->seg.enabled) {
5352 if (cm->seg.update_map) {
5353 update_reference_segmentation_map(cpi);
Yue Chend90d3432018-03-16 11:28:42 -07005354 } else if (cm->last_frame_seg_map) {
David Turnerb757ce02018-11-12 15:01:28 +00005355 memcpy(cm->cur_frame->seg_map, cm->last_frame_seg_map,
Soo-Chul Han934af352017-10-15 15:21:51 -04005356 cm->mi_cols * cm->mi_rows * sizeof(uint8_t));
5357 }
5358 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005359
5360 if (frame_is_intra_only(cm) == 0) {
5361 release_scaled_references(cpi);
5362 }
5363
Cheng Chen46f30c72017-09-07 11:13:33 -07005364 update_reference_frames(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005365
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08005366#if CONFIG_ENTROPY_STATS
Yue Chencc6a6ef2018-05-21 16:21:05 -07005367 av1_accumulate_frame_counts(&aggregate_fc, &cpi->counts);
Debargha Mukherjee5802ebe2016-12-21 04:17:24 -08005368#endif // CONFIG_ENTROPY_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005369
Hui Sudc54be62018-03-14 19:14:28 -07005370 if (cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
David Turner35cba132018-12-10 15:48:15 +00005371 *cm->fc = cpi->tile_data[largest_tile_id].tctx;
Hui Sudc54be62018-03-14 19:14:28 -07005372 av1_reset_cdf_symbol_counters(cm->fc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005373 }
5374
5375 if (cpi->refresh_golden_frame == 1)
5376 cpi->frame_flags |= FRAMEFLAGS_GOLDEN;
5377 else
5378 cpi->frame_flags &= ~FRAMEFLAGS_GOLDEN;
5379
5380 if (cpi->refresh_alt_ref_frame == 1)
5381 cpi->frame_flags |= FRAMEFLAGS_ALTREF;
5382 else
5383 cpi->frame_flags &= ~FRAMEFLAGS_ALTREF;
5384
Yaowu Xuc27fc142016-08-22 16:08:15 -07005385 if (cpi->refresh_bwd_ref_frame == 1)
5386 cpi->frame_flags |= FRAMEFLAGS_BWDREF;
5387 else
5388 cpi->frame_flags &= ~FRAMEFLAGS_BWDREF;
Sachin Kumar Gargfd39b232019-01-03 17:41:09 +05305389 cm->last_frame_type = current_frame->frame_type;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005390
Yaowu Xuf883b422016-08-30 14:01:10 -07005391 av1_rc_postencode_update(cpi, *size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005392
David Turnerd2a592e2018-11-16 14:59:31 +00005393 if (current_frame->frame_type == KEY_FRAME) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005394 // Tell the caller that the frame was coded as a key frame
5395 *frame_flags = cpi->frame_flags | FRAMEFLAGS_KEY;
5396 } else {
5397 *frame_flags = cpi->frame_flags & ~FRAMEFLAGS_KEY;
5398 }
5399
5400 // Clear the one shot update flags for segmentation map and mode/ref loop
5401 // filter deltas.
5402 cm->seg.update_map = 0;
5403 cm->seg.update_data = 0;
5404 cm->lf.mode_ref_delta_update = 0;
5405
Wei-Ting Linfb7dc062018-06-28 18:26:13 -07005406 // A droppable frame might not be shown but it always
5407 // takes a space in the gf group. Therefore, even when
5408 // it is not shown, we still need update the count down.
5409
Yaowu Xuc27fc142016-08-22 16:08:15 -07005410 if (cm->show_frame) {
Urvang Joshif1fa6862018-01-08 16:39:33 -08005411 // TODO(zoeliu): We may only swamp mi and prev_mi for those frames that
5412 // are
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02005413 // being used as reference.
Cheng Chen46f30c72017-09-07 11:13:33 -07005414 swap_mi_and_prev_mi(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005415 // Don't increment frame counters if this was an altref buffer
5416 // update not a real frame
Wei-Ting Lin96ee0eb2018-06-22 15:27:22 -07005417
David Turnerd2a592e2018-11-16 14:59:31 +00005418 ++current_frame->frame_number;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005419 }
5420
Tom Finegane4099e32018-01-23 12:01:51 -08005421 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005422}
5423
David Turner056f7cd2019-01-07 17:48:13 +00005424int av1_encode(AV1_COMP *const cpi, uint8_t *const dest,
5425 const EncodeFrameParams *const frame_params,
5426 EncodeFrameResults *const frame_results) {
David Turner07dbd8e2019-01-08 17:16:25 +00005427 AV1_COMMON *const cm = &cpi->common;
5428
David Turner056f7cd2019-01-07 17:48:13 +00005429 // TODO(david.turner@argondesign.com): Copy data from frame_params to cpi and
5430 // cm as appropriate
5431
David Turner07dbd8e2019-01-08 17:16:25 +00005432 cm->error_resilient_mode = frame_params->error_resilient_mode;
5433 cpi->ref_frame_flags = frame_params->ref_frame_flags;
5434
David Turner056f7cd2019-01-07 17:48:13 +00005435 if (encode_frame_to_data_rate(cpi, &frame_results->size, dest,
5436 frame_params->frame_flags) != AOM_CODEC_OK) {
5437 return AOM_CODEC_ERROR;
5438 }
5439
5440 return AOM_CODEC_OK;
5441}
5442
Sarah Parker3491dd22018-08-08 18:38:31 -07005443static INLINE void update_keyframe_counters(AV1_COMP *cpi) {
5444 // TODO(zoeliu): To investigate whether we should treat BWDREF_FRAME
5445 // differently here for rc->avg_frame_bandwidth.
5446 if (cpi->common.show_frame || cpi->rc.is_bwd_ref_frame) {
5447 if (!cpi->common.show_existing_frame || cpi->rc.is_src_frame_alt_ref ||
David Turnerd2a592e2018-11-16 14:59:31 +00005448 cpi->common.current_frame.frame_type == KEY_FRAME) {
Sarah Parker3491dd22018-08-08 18:38:31 -07005449 // If this is a show_existing_frame with a source other than altref,
5450 // or if it is not a displayed forward keyframe, the keyframe update
5451 // counters were incremented when it was originally encoded.
5452 cpi->rc.frames_since_key++;
5453 cpi->rc.frames_to_key--;
5454 }
5455 }
5456}
5457
5458static INLINE void update_frames_till_gf_update(AV1_COMP *cpi) {
5459 // TODO(weitinglin): Updating this counter for is_frame_droppable
5460 // is a work-around to handle the condition when a frame is drop.
5461 // We should fix the cpi->common.show_frame flag
5462 // instead of checking the other condition to update the counter properly.
5463 if (cpi->common.show_frame || is_frame_droppable(cpi)) {
5464 // Decrement count down till next gf
5465 if (cpi->rc.frames_till_gf_update_due > 0)
5466 cpi->rc.frames_till_gf_update_due--;
5467 }
5468}
5469
5470static INLINE void update_twopass_gf_group_index(AV1_COMP *cpi) {
5471 // Increment the gf group index ready for the next frame. If this is
5472 // a show_existing_frame with a source other than altref, or if it is not
5473 // a displayed forward keyframe, the index was incremented when it was
5474 // originally encoded.
5475 if (!cpi->common.show_existing_frame || cpi->rc.is_src_frame_alt_ref ||
David Turnerd2a592e2018-11-16 14:59:31 +00005476 cpi->common.current_frame.frame_type == KEY_FRAME) {
Sarah Parker3491dd22018-08-08 18:38:31 -07005477 ++cpi->twopass.gf_group.index;
5478 }
5479}
5480
5481static void update_rc_counts(AV1_COMP *cpi) {
5482 update_keyframe_counters(cpi);
5483 update_frames_till_gf_update(cpi);
5484 if (cpi->oxcf.pass == 2) update_twopass_gf_group_index(cpi);
5485}
5486
Debargha Mukherjee57378252018-09-21 18:29:37 -07005487static void set_additional_frame_flags(AV1_COMMON *const cm,
5488 unsigned int *frame_flags) {
5489 if (frame_is_intra_only(cm)) *frame_flags |= FRAMEFLAGS_INTRAONLY;
5490 if (frame_is_sframe(cm)) *frame_flags |= FRAMEFLAGS_SWITCH;
5491 if (cm->error_resilient_mode) *frame_flags |= FRAMEFLAGS_ERROR_RESILIENT;
5492}
5493
Tom Finegane4099e32018-01-23 12:01:51 -08005494static int Pass0Encode(AV1_COMP *cpi, size_t *size, uint8_t *dest,
David Turner056f7cd2019-01-07 17:48:13 +00005495 unsigned int *frame_flags) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005496 if (cpi->oxcf.rc_mode == AOM_CBR) {
5497 av1_rc_get_one_pass_cbr_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005498 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07005499 av1_rc_get_one_pass_vbr_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005500 }
David Turner056f7cd2019-01-07 17:48:13 +00005501 if (av1_encode_strategy(cpi, size, dest, frame_flags) != AOM_CODEC_OK) {
Debargha Mukherjeeff48c092018-04-04 23:53:40 -07005502 return AOM_CODEC_ERROR;
5503 }
Debargha Mukherjee57378252018-09-21 18:29:37 -07005504 set_additional_frame_flags(&cpi->common, frame_flags);
5505
Sarah Parker3491dd22018-08-08 18:38:31 -07005506 update_rc_counts(cpi);
Debargha Mukherjeeff48c092018-04-04 23:53:40 -07005507 check_show_existing_frame(cpi);
5508 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005509}
5510
Tom Finegane4099e32018-01-23 12:01:51 -08005511static int Pass2Encode(AV1_COMP *cpi, size_t *size, uint8_t *dest,
5512 unsigned int *frame_flags) {
Angie Chiang5b5f4df2017-12-06 10:41:12 -08005513#if CONFIG_MISMATCH_DEBUG
5514 mismatch_move_frame_idx_w();
5515#endif
Angie Chiang4d55d762017-12-13 16:18:37 -08005516#if TXCOEFF_COST_TIMER
5517 AV1_COMMON *cm = &cpi->common;
5518 cm->txcoeff_cost_timer = 0;
5519 cm->txcoeff_cost_count = 0;
5520#endif
Tom Finegane4099e32018-01-23 12:01:51 -08005521
David Turner056f7cd2019-01-07 17:48:13 +00005522 if (av1_encode_strategy(cpi, size, dest, frame_flags) != AOM_CODEC_OK) {
Tom Finegane4099e32018-01-23 12:01:51 -08005523 return AOM_CODEC_ERROR;
5524 }
Debargha Mukherjee57378252018-09-21 18:29:37 -07005525 set_additional_frame_flags(&cpi->common, frame_flags);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005526
Angie Chiang4d55d762017-12-13 16:18:37 -08005527#if TXCOEFF_COST_TIMER
5528 cm->cum_txcoeff_cost_timer += cm->txcoeff_cost_timer;
5529 fprintf(stderr,
5530 "\ntxb coeff cost block number: %ld, frame time: %ld, cum time %ld "
5531 "in us\n",
5532 cm->txcoeff_cost_count, cm->txcoeff_cost_timer,
5533 cm->cum_txcoeff_cost_timer);
5534#endif
5535
Sarah Parker3491dd22018-08-08 18:38:31 -07005536 av1_twopass_postencode_update(cpi);
5537 update_rc_counts(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005538 check_show_existing_frame(cpi);
Tom Finegane4099e32018-01-23 12:01:51 -08005539 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005540}
5541
Neil Birkbecka2893ab2018-06-08 14:45:13 -07005542#if CONFIG_DENOISE
5543static int apply_denoise_2d(AV1_COMP *cpi, YV12_BUFFER_CONFIG *sd,
5544 int block_size, float noise_level,
5545 int64_t time_stamp, int64_t end_time) {
5546 AV1_COMMON *const cm = &cpi->common;
5547 if (!cpi->denoise_and_model) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005548 cpi->denoise_and_model = aom_denoise_and_model_alloc(
5549 cm->seq_params.bit_depth, block_size, noise_level);
Neil Birkbecka2893ab2018-06-08 14:45:13 -07005550 if (!cpi->denoise_and_model) {
5551 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
5552 "Error allocating denoise and model");
5553 return -1;
5554 }
5555 }
5556 if (!cpi->film_grain_table) {
5557 cpi->film_grain_table = aom_malloc(sizeof(*cpi->film_grain_table));
5558 if (!cpi->film_grain_table) {
5559 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
5560 "Error allocating grain table");
5561 return -1;
5562 }
5563 memset(cpi->film_grain_table, 0, sizeof(*cpi->film_grain_table));
5564 }
5565 if (aom_denoise_and_model_run(cpi->denoise_and_model, sd,
5566 &cm->film_grain_params)) {
5567 if (cm->film_grain_params.apply_grain) {
5568 aom_film_grain_table_append(cpi->film_grain_table, time_stamp, end_time,
5569 &cm->film_grain_params);
5570 }
5571 }
5572 return 0;
5573}
5574#endif
5575
James Zern3e2613b2017-03-30 23:14:40 -07005576int av1_receive_raw_frame(AV1_COMP *cpi, aom_enc_frame_flags_t frame_flags,
Yaowu Xuf883b422016-08-30 14:01:10 -07005577 YV12_BUFFER_CONFIG *sd, int64_t time_stamp,
5578 int64_t end_time) {
5579 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005580 const SequenceHeader *const seq_params = &cm->seq_params;
Yaowu Xuf883b422016-08-30 14:01:10 -07005581 struct aom_usec_timer timer;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005582 int res = 0;
5583 const int subsampling_x = sd->subsampling_x;
5584 const int subsampling_y = sd->subsampling_y;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005585 const int use_highbitdepth = (sd->flags & YV12_FLAG_HIGHBITDEPTH) != 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005586
Yaowu Xuc27fc142016-08-22 16:08:15 -07005587 check_initial_width(cpi, use_highbitdepth, subsampling_x, subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005588
Yaowu Xuf883b422016-08-30 14:01:10 -07005589 aom_usec_timer_start(&timer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005590
Neil Birkbecka2893ab2018-06-08 14:45:13 -07005591#if CONFIG_DENOISE
5592 if (cpi->oxcf.noise_level > 0)
5593 if (apply_denoise_2d(cpi, sd, cpi->oxcf.noise_block_size,
5594 cpi->oxcf.noise_level, time_stamp, end_time) < 0)
5595 res = -1;
5596#endif // CONFIG_DENOISE
5597
Yaowu Xuf883b422016-08-30 14:01:10 -07005598 if (av1_lookahead_push(cpi->lookahead, sd, time_stamp, end_time,
Yaowu Xud3e7c682017-12-21 14:08:25 -08005599 use_highbitdepth, frame_flags))
Yaowu Xuc27fc142016-08-22 16:08:15 -07005600 res = -1;
Yaowu Xuf883b422016-08-30 14:01:10 -07005601 aom_usec_timer_mark(&timer);
5602 cpi->time_receive_data += aom_usec_timer_elapsed(&timer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005603
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005604 if ((seq_params->profile == PROFILE_0) && !seq_params->monochrome &&
Yaowu Xuc27fc142016-08-22 16:08:15 -07005605 (subsampling_x != 1 || subsampling_y != 1)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005606 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005607 "Non-4:2:0 color format requires profile 1 or 2");
Yaowu Xuc27fc142016-08-22 16:08:15 -07005608 res = -1;
5609 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005610 if ((seq_params->profile == PROFILE_1) &&
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005611 !(subsampling_x == 0 && subsampling_y == 0)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005612 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005613 "Profile 1 requires 4:4:4 color format");
5614 res = -1;
5615 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005616 if ((seq_params->profile == PROFILE_2) &&
5617 (seq_params->bit_depth <= AOM_BITS_10) &&
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08005618 !(subsampling_x == 1 && subsampling_y == 0)) {
5619 aom_internal_error(&cm->error, AOM_CODEC_INVALID_PARAM,
5620 "Profile 2 bit-depth < 10 requires 4:2:2 color format");
Yaowu Xuc27fc142016-08-22 16:08:15 -07005621 res = -1;
5622 }
5623
5624 return res;
5625}
5626
Yaowu Xuf883b422016-08-30 14:01:10 -07005627static void adjust_frame_rate(AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005628 const struct lookahead_entry *source) {
5629 int64_t this_duration;
5630 int step = 0;
5631
5632 if (source->ts_start == cpi->first_time_stamp_ever) {
5633 this_duration = source->ts_end - source->ts_start;
5634 step = 1;
5635 } else {
5636 int64_t last_duration =
5637 cpi->last_end_time_stamp_seen - cpi->last_time_stamp_seen;
5638
5639 this_duration = source->ts_end - cpi->last_end_time_stamp_seen;
5640
5641 // do a step update if the duration changes by 10%
5642 if (last_duration)
5643 step = (int)((this_duration - last_duration) * 10 / last_duration);
5644 }
5645
5646 if (this_duration) {
5647 if (step) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005648 av1_new_framerate(cpi, 10000000.0 / this_duration);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005649 } else {
5650 // Average this frame's rate into the last second's average
5651 // frame rate. If we haven't seen 1 second yet, then average
5652 // over the whole interval seen.
Yaowu Xuf883b422016-08-30 14:01:10 -07005653 const double interval = AOMMIN(
Yaowu Xuc27fc142016-08-22 16:08:15 -07005654 (double)(source->ts_end - cpi->first_time_stamp_ever), 10000000.0);
5655 double avg_duration = 10000000.0 / cpi->framerate;
5656 avg_duration *= (interval - avg_duration + this_duration);
5657 avg_duration /= interval;
5658
Yaowu Xuf883b422016-08-30 14:01:10 -07005659 av1_new_framerate(cpi, 10000000.0 / avg_duration);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005660 }
5661 }
5662 cpi->last_time_stamp_seen = source->ts_start;
5663 cpi->last_end_time_stamp_seen = source->ts_end;
5664}
5665
5666// Returns 0 if this is not an alt ref else the offset of the source frame
5667// used as the arf midpoint.
Yaowu Xuf883b422016-08-30 14:01:10 -07005668static int get_arf_src_index(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005669 RATE_CONTROL *const rc = &cpi->rc;
5670 int arf_src_index = 0;
5671 if (is_altref_enabled(cpi)) {
5672 if (cpi->oxcf.pass == 2) {
5673 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5674 if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
5675 arf_src_index = gf_group->arf_src_offset[gf_group->index];
5676 }
5677 } else if (rc->source_alt_ref_pending) {
5678 arf_src_index = rc->frames_till_gf_update_due;
5679 }
5680 }
5681 return arf_src_index;
5682}
5683
Yaowu Xuf883b422016-08-30 14:01:10 -07005684static int get_brf_src_index(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005685 int brf_src_index = 0;
5686 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5687
5688 // TODO(zoeliu): We need to add the check on the -bwd_ref command line setup
5689 // flag.
5690 if (gf_group->bidir_pred_enabled[gf_group->index]) {
5691 if (cpi->oxcf.pass == 2) {
5692 if (gf_group->update_type[gf_group->index] == BRF_UPDATE)
5693 brf_src_index = gf_group->brf_src_offset[gf_group->index];
5694 } else {
5695 // TODO(zoeliu): To re-visit the setup for this scenario
5696 brf_src_index = cpi->rc.bipred_group_interval - 1;
5697 }
5698 }
5699
5700 return brf_src_index;
5701}
Zoe Liue9b15e22017-07-19 15:53:01 -07005702
Zoe Liue9b15e22017-07-19 15:53:01 -07005703// Returns 0 if this is not an alt ref else the offset of the source frame
5704// used as the arf midpoint.
5705static int get_arf2_src_index(AV1_COMP *cpi) {
5706 int arf2_src_index = 0;
5707 if (is_altref_enabled(cpi) && cpi->num_extra_arfs) {
5708 if (cpi->oxcf.pass == 2) {
5709 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5710 if (gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE) {
5711 arf2_src_index = gf_group->arf_src_offset[gf_group->index];
5712 }
5713 }
5714 }
5715 return arf2_src_index;
5716}
Yaowu Xuc27fc142016-08-22 16:08:15 -07005717
Yaowu Xuf883b422016-08-30 14:01:10 -07005718static void check_src_altref(AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005719 const struct lookahead_entry *source) {
5720 RATE_CONTROL *const rc = &cpi->rc;
5721
5722 // If pass == 2, the parameters set here will be reset in
Yaowu Xuf883b422016-08-30 14:01:10 -07005723 // av1_rc_get_second_pass_params()
Yaowu Xuc27fc142016-08-22 16:08:15 -07005724
5725 if (cpi->oxcf.pass == 2) {
5726 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
5727 rc->is_src_frame_alt_ref =
Yaowu Xuc27fc142016-08-22 16:08:15 -07005728 (gf_group->update_type[gf_group->index] == INTNL_OVERLAY_UPDATE) ||
Yaowu Xuc27fc142016-08-22 16:08:15 -07005729 (gf_group->update_type[gf_group->index] == OVERLAY_UPDATE);
Zoe Liue9b15e22017-07-19 15:53:01 -07005730 rc->is_src_frame_ext_arf =
5731 gf_group->update_type[gf_group->index] == INTNL_OVERLAY_UPDATE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005732 } else {
5733 rc->is_src_frame_alt_ref =
5734 cpi->alt_ref_source && (source == cpi->alt_ref_source);
5735 }
5736
5737 if (rc->is_src_frame_alt_ref) {
5738 // Current frame is an ARF overlay frame.
5739 cpi->alt_ref_source = NULL;
5740
Zoe Liue9b15e22017-07-19 15:53:01 -07005741 if (rc->is_src_frame_ext_arf && !cpi->common.show_existing_frame) {
5742 // For INTNL_OVERLAY, when show_existing_frame == 0, they do need to
5743 // refresh the LAST_FRAME, i.e. LAST3 gets retired, LAST2 becomes LAST3,
5744 // LAST becomes LAST2, and INTNL_OVERLAY becomes LAST.
5745 cpi->refresh_last_frame = 1;
5746 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -07005747 // Don't refresh the last buffer for an ARF overlay frame. It will
5748 // become the GF so preserve last as an alternative prediction option.
5749 cpi->refresh_last_frame = 0;
Zoe Liue9b15e22017-07-19 15:53:01 -07005750 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005751 }
5752}
5753
5754#if CONFIG_INTERNAL_STATS
Yaowu Xuf883b422016-08-30 14:01:10 -07005755extern double av1_get_blockiness(const unsigned char *img1, int img1_pitch,
5756 const unsigned char *img2, int img2_pitch,
5757 int width, int height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005758
5759static void adjust_image_stat(double y, double u, double v, double all,
5760 ImageStat *s) {
Wan-Teh Changc25c92a2018-04-23 15:04:14 -07005761 s->stat[STAT_Y] += y;
5762 s->stat[STAT_U] += u;
5763 s->stat[STAT_V] += v;
5764 s->stat[STAT_ALL] += all;
Yaowu Xuf883b422016-08-30 14:01:10 -07005765 s->worst = AOMMIN(s->worst, all);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005766}
5767
Angie Chiang08a22a62017-07-17 17:29:17 -07005768static void compute_internal_stats(AV1_COMP *cpi, int frame_bytes) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005769 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005770 double samples = 0.0;
5771 uint32_t in_bit_depth = 8;
5772 uint32_t bit_depth = 8;
5773
Angie Chiang08a22a62017-07-17 17:29:17 -07005774#if CONFIG_INTER_STATS_ONLY
David Turnerd2a592e2018-11-16 14:59:31 +00005775 if (cm->current_frame.frame_type == KEY_FRAME) return; // skip key frame
Angie Chiang08a22a62017-07-17 17:29:17 -07005776#endif
5777 cpi->bytes += frame_bytes;
5778
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005779 if (cm->seq_params.use_highbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005780 in_bit_depth = cpi->oxcf.input_bit_depth;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005781 bit_depth = cm->seq_params.bit_depth;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005782 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005783 if (cm->show_frame) {
Alex Conversef77fd0b2017-04-20 11:00:24 -07005784 const YV12_BUFFER_CONFIG *orig = cpi->source;
David Turnerc29e1a92018-12-06 14:10:14 +00005785 const YV12_BUFFER_CONFIG *recon = &cpi->common.cur_frame->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005786 double y, u, v, frame_all;
5787
5788 cpi->count++;
5789 if (cpi->b_calculate_psnr) {
5790 PSNR_STATS psnr;
5791 double frame_ssim2 = 0.0, weight = 0.0;
Yaowu Xuf883b422016-08-30 14:01:10 -07005792 aom_clear_system_state();
Yaowu Xud3e7c682017-12-21 14:08:25 -08005793 // TODO(yaowu): unify these two versions into one.
Yaowu Xuf883b422016-08-30 14:01:10 -07005794 aom_calc_highbd_psnr(orig, recon, &psnr, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005795
5796 adjust_image_stat(psnr.psnr[1], psnr.psnr[2], psnr.psnr[3], psnr.psnr[0],
5797 &cpi->psnr);
5798 cpi->total_sq_error += psnr.sse[0];
5799 cpi->total_samples += psnr.samples[0];
5800 samples = psnr.samples[0];
Yaowu Xud3e7c682017-12-21 14:08:25 -08005801 // TODO(yaowu): unify these two versions into one.
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005802 if (cm->seq_params.use_highbitdepth)
Yaowu Xuc27fc142016-08-22 16:08:15 -07005803 frame_ssim2 =
Yaowu Xuf883b422016-08-30 14:01:10 -07005804 aom_highbd_calc_ssim(orig, recon, &weight, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005805 else
Yaowu Xuf883b422016-08-30 14:01:10 -07005806 frame_ssim2 = aom_calc_ssim(orig, recon, &weight);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005807
Yaowu Xuf883b422016-08-30 14:01:10 -07005808 cpi->worst_ssim = AOMMIN(cpi->worst_ssim, frame_ssim2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005809 cpi->summed_quality += frame_ssim2 * weight;
5810 cpi->summed_weights += weight;
5811
5812#if 0
5813 {
5814 FILE *f = fopen("q_used.stt", "a");
Zoe Liuee202be2017-11-17 12:14:33 -08005815 double y2 = psnr.psnr[1];
5816 double u2 = psnr.psnr[2];
5817 double v2 = psnr.psnr[3];
5818 double frame_psnr2 = psnr.psnr[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07005819 fprintf(f, "%5d : Y%f7.3:U%f7.3:V%f7.3:F%f7.3:S%7.3f\n",
David Turnerd2a592e2018-11-16 14:59:31 +00005820 cm->current_frame.frame_number, y2, u2, v2,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005821 frame_psnr2, frame_ssim2);
5822 fclose(f);
5823 }
5824#endif
5825 }
5826 if (cpi->b_calculate_blockiness) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005827 if (!cm->seq_params.use_highbitdepth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005828 const double frame_blockiness =
Yaowu Xuf883b422016-08-30 14:01:10 -07005829 av1_get_blockiness(orig->y_buffer, orig->y_stride, recon->y_buffer,
5830 recon->y_stride, orig->y_width, orig->y_height);
5831 cpi->worst_blockiness = AOMMAX(cpi->worst_blockiness, frame_blockiness);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005832 cpi->total_blockiness += frame_blockiness;
5833 }
5834
5835 if (cpi->b_calculate_consistency) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07005836 if (!cm->seq_params.use_highbitdepth) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005837 const double this_inconsistency = aom_get_ssim_metrics(
Yaowu Xuc27fc142016-08-22 16:08:15 -07005838 orig->y_buffer, orig->y_stride, recon->y_buffer, recon->y_stride,
5839 orig->y_width, orig->y_height, cpi->ssim_vars, &cpi->metrics, 1);
5840
5841 const double peak = (double)((1 << in_bit_depth) - 1);
5842 const double consistency =
Yaowu Xuf883b422016-08-30 14:01:10 -07005843 aom_sse_to_psnr(samples, peak, cpi->total_inconsistency);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005844 if (consistency > 0.0)
5845 cpi->worst_consistency =
Yaowu Xuf883b422016-08-30 14:01:10 -07005846 AOMMIN(cpi->worst_consistency, consistency);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005847 cpi->total_inconsistency += this_inconsistency;
5848 }
5849 }
5850 }
5851
5852 frame_all =
Yaowu Xuf883b422016-08-30 14:01:10 -07005853 aom_calc_fastssim(orig, recon, &y, &u, &v, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005854 adjust_image_stat(y, u, v, frame_all, &cpi->fastssim);
Yaowu Xuf883b422016-08-30 14:01:10 -07005855 frame_all = aom_psnrhvs(orig, recon, &y, &u, &v, bit_depth, in_bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005856 adjust_image_stat(y, u, v, frame_all, &cpi->psnrhvs);
5857 }
5858}
5859#endif // CONFIG_INTERNAL_STATS
5860
RogerZhou3b635242017-09-19 10:06:46 -07005861static int is_integer_mv(AV1_COMP *cpi, const YV12_BUFFER_CONFIG *cur_picture,
5862 const YV12_BUFFER_CONFIG *last_picture,
5863 hash_table *last_hash_table) {
5864 aom_clear_system_state();
5865 // check use hash ME
5866 int k;
5867 uint32_t hash_value_1;
5868 uint32_t hash_value_2;
5869
5870 const int block_size = 8;
5871 const double threshold_current = 0.8;
5872 const double threshold_average = 0.95;
5873 const int max_history_size = 32;
5874 int T = 0; // total block
5875 int C = 0; // match with collocated block
5876 int S = 0; // smooth region but not match with collocated block
5877 int M = 0; // match with other block
5878
5879 const int pic_width = cur_picture->y_width;
5880 const int pic_height = cur_picture->y_height;
5881 for (int i = 0; i + block_size <= pic_height; i += block_size) {
5882 for (int j = 0; j + block_size <= pic_width; j += block_size) {
5883 const int x_pos = j;
5884 const int y_pos = i;
5885 int match = 1;
5886 T++;
5887
5888 // check whether collocated block match with current
5889 uint8_t *p_cur = cur_picture->y_buffer;
5890 uint8_t *p_ref = last_picture->y_buffer;
5891 int stride_cur = cur_picture->y_stride;
5892 int stride_ref = last_picture->y_stride;
5893 p_cur += (y_pos * stride_cur + x_pos);
5894 p_ref += (y_pos * stride_ref + x_pos);
5895
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005896 if (cur_picture->flags & YV12_FLAG_HIGHBITDEPTH) {
5897 uint16_t *p16_cur = CONVERT_TO_SHORTPTR(p_cur);
5898 uint16_t *p16_ref = CONVERT_TO_SHORTPTR(p_ref);
5899 for (int tmpY = 0; tmpY < block_size && match; tmpY++) {
5900 for (int tmpX = 0; tmpX < block_size && match; tmpX++) {
5901 if (p16_cur[tmpX] != p16_ref[tmpX]) {
5902 match = 0;
5903 }
RogerZhou3b635242017-09-19 10:06:46 -07005904 }
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005905 p16_cur += stride_cur;
5906 p16_ref += stride_ref;
RogerZhou3b635242017-09-19 10:06:46 -07005907 }
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005908 } else {
5909 for (int tmpY = 0; tmpY < block_size && match; tmpY++) {
5910 for (int tmpX = 0; tmpX < block_size && match; tmpX++) {
5911 if (p_cur[tmpX] != p_ref[tmpX]) {
5912 match = 0;
5913 }
5914 }
5915 p_cur += stride_cur;
5916 p_ref += stride_ref;
5917 }
RogerZhou3b635242017-09-19 10:06:46 -07005918 }
5919
5920 if (match) {
5921 C++;
5922 continue;
5923 }
5924
5925 if (av1_hash_is_horizontal_perfect(cur_picture, block_size, x_pos,
5926 y_pos) ||
5927 av1_hash_is_vertical_perfect(cur_picture, block_size, x_pos, y_pos)) {
5928 S++;
5929 continue;
5930 }
5931
5932 av1_get_block_hash_value(
5933 cur_picture->y_buffer + y_pos * stride_cur + x_pos, stride_cur,
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005934 block_size, &hash_value_1, &hash_value_2,
Ravi Chaudhary783d6a32018-08-28 18:21:02 +05305935 (cur_picture->flags & YV12_FLAG_HIGHBITDEPTH), &cpi->td.mb);
Debargha Mukherjee1c583012018-02-28 22:16:16 -08005936 // Hashing does not work for highbitdepth currently.
5937 // TODO(Roger): Make it work for highbitdepth.
5938 if (av1_use_hash_me(&cpi->common)) {
5939 if (av1_has_exact_match(last_hash_table, hash_value_1, hash_value_2)) {
5940 M++;
5941 }
RogerZhou3b635242017-09-19 10:06:46 -07005942 }
5943 }
5944 }
5945
5946 assert(T > 0);
5947 double csm_rate = ((double)(C + S + M)) / ((double)(T));
5948 double m_rate = ((double)(M)) / ((double)(T));
5949
5950 cpi->csm_rate_array[cpi->rate_index] = csm_rate;
5951 cpi->m_rate_array[cpi->rate_index] = m_rate;
5952
5953 cpi->rate_index = (cpi->rate_index + 1) % max_history_size;
5954 cpi->rate_size++;
5955 cpi->rate_size = AOMMIN(cpi->rate_size, max_history_size);
5956
5957 if (csm_rate < threshold_current) {
5958 return 0;
5959 }
5960
5961 if (C == T) {
5962 return 1;
5963 }
5964
5965 double csm_average = 0.0;
5966 double m_average = 0.0;
5967
5968 for (k = 0; k < cpi->rate_size; k++) {
5969 csm_average += cpi->csm_rate_array[k];
5970 m_average += cpi->m_rate_array[k];
5971 }
5972 csm_average /= cpi->rate_size;
5973 m_average /= cpi->rate_size;
5974
5975 if (csm_average < threshold_average) {
5976 return 0;
5977 }
5978
5979 if (M > (T - C - S) / 3) {
5980 return 1;
5981 }
5982
5983 if (csm_rate > 0.99 && m_rate > 0.01) {
5984 return 1;
5985 }
5986
5987 if (csm_average + m_average > 1.01) {
5988 return 1;
5989 }
5990
5991 return 0;
5992}
RogerZhou3b635242017-09-19 10:06:46 -07005993
Yue Chen7cae98f2018-08-24 10:43:16 -07005994// Code for temporal dependency model
5995typedef struct GF_PICTURE {
5996 YV12_BUFFER_CONFIG *frame;
5997 int ref_frame[7];
5998} GF_PICTURE;
5999
Sarah Parkercf644442018-10-11 15:23:44 -07006000static void init_gop_frames(AV1_COMP *cpi, GF_PICTURE *gf_picture,
6001 const GF_GROUP *gf_group, int *tpl_group_frames) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006002 AV1_COMMON *cm = &cpi->common;
6003 const SequenceHeader *const seq_params = &cm->seq_params;
6004 int frame_idx = 0;
6005 int i;
6006 int gld_index = -1;
6007 int alt_index = -1;
6008 int lst_index = -1;
6009 int extend_frame_count = 0;
6010 int pframe_qindex = cpi->tpl_stats[2].base_qindex;
6011
6012 RefCntBuffer *frame_bufs = cm->buffer_pool->frame_bufs;
6013 int recon_frame_index[INTER_REFS_PER_FRAME + 1] = { -1, -1, -1, -1,
6014 -1, -1, -1, -1 };
6015
6016 // TODO(jingning): To be used later for gf frame type parsing.
6017 (void)gf_group;
6018
6019 for (i = 0; i < FRAME_BUFFERS && frame_idx < INTER_REFS_PER_FRAME + 1; ++i) {
6020 if (frame_bufs[i].ref_count == 0) {
David Turnere7ebf902018-12-04 14:04:55 +00006021 alloc_frame_mvs(cm, &frame_bufs[i]);
Yue Chen7cae98f2018-08-24 10:43:16 -07006022 if (aom_realloc_frame_buffer(
6023 &frame_bufs[i].buf, cm->width, cm->height,
6024 seq_params->subsampling_x, seq_params->subsampling_y,
Satish Kumar Suman29909962019-01-09 10:31:21 +05306025 seq_params->use_highbitdepth, cpi->oxcf.border_in_pixels,
Yue Chen7cae98f2018-08-24 10:43:16 -07006026 cm->byte_alignment, NULL, NULL, NULL))
6027 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
6028 "Failed to allocate frame buffer");
6029
6030 recon_frame_index[frame_idx] = i;
6031 ++frame_idx;
6032 }
6033 }
6034
6035 for (i = 0; i < INTER_REFS_PER_FRAME + 1; ++i) {
6036 assert(recon_frame_index[i] >= 0);
6037 cpi->tpl_recon_frames[i] = &frame_bufs[recon_frame_index[i]].buf;
6038 }
6039
6040 *tpl_group_frames = 0;
6041
6042 // Initialize Golden reference frame.
David Turnera21966b2018-12-05 14:48:49 +00006043 gf_picture[0].frame = NULL;
6044 RefCntBuffer *ref_buf = get_ref_frame_buf(cm, GOLDEN_FRAME);
6045 if (ref_buf) gf_picture[0].frame = &ref_buf->buf;
Yue Chen7cae98f2018-08-24 10:43:16 -07006046 for (i = 0; i < 7; ++i) gf_picture[0].ref_frame[i] = -1;
6047 gld_index = 0;
6048 ++*tpl_group_frames;
6049
6050 // Initialize ARF frame
6051 gf_picture[1].frame = cpi->source;
6052 gf_picture[1].ref_frame[0] = gld_index;
6053 gf_picture[1].ref_frame[1] = lst_index;
6054 gf_picture[1].ref_frame[2] = alt_index;
6055 // TODO(yuec) Need o figure out full AV1 reference model
6056 for (i = 3; i < 7; ++i) gf_picture[1].ref_frame[i] = -1;
6057 alt_index = 1;
6058 ++*tpl_group_frames;
6059
6060 // Initialize P frames
6061 for (frame_idx = 2; frame_idx < MAX_LAG_BUFFERS; ++frame_idx) {
6062 struct lookahead_entry *buf =
6063 av1_lookahead_peek(cpi->lookahead, frame_idx - 2);
6064
6065 if (buf == NULL) break;
6066
6067 gf_picture[frame_idx].frame = &buf->img;
6068 gf_picture[frame_idx].ref_frame[0] = gld_index;
6069 gf_picture[frame_idx].ref_frame[1] = lst_index;
6070 gf_picture[frame_idx].ref_frame[2] = alt_index;
6071 for (i = 3; i < 7; ++i) gf_picture[frame_idx].ref_frame[i] = -1;
6072
6073 ++*tpl_group_frames;
6074 lst_index = frame_idx;
6075
6076 if (frame_idx == cpi->rc.baseline_gf_interval + 1) break;
6077 }
6078
6079 gld_index = frame_idx;
6080 lst_index = AOMMAX(0, frame_idx - 1);
6081 alt_index = -1;
6082 ++frame_idx;
6083
6084 // Extend two frames outside the current gf group.
6085 for (; frame_idx < MAX_LAG_BUFFERS && extend_frame_count < 2; ++frame_idx) {
6086 struct lookahead_entry *buf =
6087 av1_lookahead_peek(cpi->lookahead, frame_idx - 2);
6088
6089 if (buf == NULL) break;
6090
6091 cpi->tpl_stats[frame_idx].base_qindex = pframe_qindex;
6092
6093 gf_picture[frame_idx].frame = &buf->img;
6094 gf_picture[frame_idx].ref_frame[0] = gld_index;
6095 gf_picture[frame_idx].ref_frame[1] = lst_index;
6096 gf_picture[frame_idx].ref_frame[2] = alt_index;
6097 for (i = 3; i < 7; ++i) gf_picture[frame_idx].ref_frame[i] = -1;
6098 lst_index = frame_idx;
6099 ++*tpl_group_frames;
6100 ++extend_frame_count;
6101 }
6102}
6103
Sarah Parkercf644442018-10-11 15:23:44 -07006104static void init_tpl_stats(AV1_COMP *cpi) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006105 int frame_idx;
6106 for (frame_idx = 0; frame_idx < MAX_LAG_BUFFERS; ++frame_idx) {
6107 TplDepFrame *tpl_frame = &cpi->tpl_stats[frame_idx];
6108 memset(tpl_frame->tpl_stats_ptr, 0,
6109 tpl_frame->height * tpl_frame->width *
6110 sizeof(*tpl_frame->tpl_stats_ptr));
6111 tpl_frame->is_valid = 0;
6112 }
6113}
6114
Sarah Parkercf644442018-10-11 15:23:44 -07006115static uint32_t motion_compensated_prediction(AV1_COMP *cpi, ThreadData *td,
6116 uint8_t *cur_frame_buf,
6117 uint8_t *ref_frame_buf,
6118 int stride, BLOCK_SIZE bsize,
6119 int mi_row, int mi_col) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006120 AV1_COMMON *cm = &cpi->common;
6121 MACROBLOCK *const x = &td->mb;
6122 MACROBLOCKD *const xd = &x->e_mbd;
6123 MV_SPEED_FEATURES *const mv_sf = &cpi->sf.mv;
6124 const SEARCH_METHODS search_method = NSTEP;
6125 int step_param;
6126 int sadpb = x->sadperbit16;
6127 uint32_t bestsme = UINT_MAX;
6128 int distortion;
6129 uint32_t sse;
6130 int cost_list[5];
6131 const MvLimits tmp_mv_limits = x->mv_limits;
6132
6133 MV best_ref_mv1 = { 0, 0 };
6134 MV best_ref_mv1_full; /* full-pixel value of best_ref_mv1 */
6135
6136 best_ref_mv1_full.col = best_ref_mv1.col >> 3;
6137 best_ref_mv1_full.row = best_ref_mv1.row >> 3;
6138
6139 // Setup frame pointers
6140 x->plane[0].src.buf = cur_frame_buf;
6141 x->plane[0].src.stride = stride;
6142 xd->plane[0].pre[0].buf = ref_frame_buf;
6143 xd->plane[0].pre[0].stride = stride;
6144
6145 step_param = mv_sf->reduce_first_step_size;
6146 step_param = AOMMIN(step_param, MAX_MVSEARCH_STEPS - 2);
6147
6148 av1_set_mv_search_range(&x->mv_limits, &best_ref_mv1);
6149
6150 av1_full_pixel_search(cpi, x, bsize, &best_ref_mv1_full, step_param,
6151 search_method, 0, sadpb, cond_cost_list(cpi, cost_list),
6152 &best_ref_mv1, INT_MAX, 0, (MI_SIZE * mi_col),
6153 (MI_SIZE * mi_row), 0);
6154
6155 /* restore UMV window */
6156 x->mv_limits = tmp_mv_limits;
6157
6158 const int pw = block_size_wide[bsize];
6159 const int ph = block_size_high[bsize];
6160 bestsme = cpi->find_fractional_mv_step(
6161 x, cm, mi_row, mi_col, &best_ref_mv1, cpi->common.allow_high_precision_mv,
6162 x->errorperbit, &cpi->fn_ptr[bsize], 0, mv_sf->subpel_iters_per_step,
6163 cond_cost_list(cpi, cost_list), NULL, NULL, &distortion, &sse, NULL, NULL,
6164 0, 0, pw, ph, 1, 1);
6165
6166 return bestsme;
6167}
6168
Sarah Parkercf644442018-10-11 15:23:44 -07006169static int get_overlap_area(int grid_pos_row, int grid_pos_col, int ref_pos_row,
6170 int ref_pos_col, int block, BLOCK_SIZE bsize) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006171 int width = 0, height = 0;
6172 int bw = 4 << mi_size_wide_log2[bsize];
6173 int bh = 4 << mi_size_high_log2[bsize];
6174
6175 switch (block) {
6176 case 0:
6177 width = grid_pos_col + bw - ref_pos_col;
6178 height = grid_pos_row + bh - ref_pos_row;
6179 break;
6180 case 1:
6181 width = ref_pos_col + bw - grid_pos_col;
6182 height = grid_pos_row + bh - ref_pos_row;
6183 break;
6184 case 2:
6185 width = grid_pos_col + bw - ref_pos_col;
6186 height = ref_pos_row + bh - grid_pos_row;
6187 break;
6188 case 3:
6189 width = ref_pos_col + bw - grid_pos_col;
6190 height = ref_pos_row + bh - grid_pos_row;
6191 break;
6192 default: assert(0);
6193 }
6194
6195 return width * height;
6196}
6197
Sarah Parkercf644442018-10-11 15:23:44 -07006198static int round_floor(int ref_pos, int bsize_pix) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006199 int round;
6200 if (ref_pos < 0)
6201 round = -(1 + (-ref_pos - 1) / bsize_pix);
6202 else
6203 round = ref_pos / bsize_pix;
6204
6205 return round;
6206}
6207
Sarah Parkercf644442018-10-11 15:23:44 -07006208static void tpl_model_store(TplDepStats *tpl_stats, int mi_row, int mi_col,
6209 BLOCK_SIZE bsize, int stride,
6210 const TplDepStats *src_stats) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006211 const int mi_height = mi_size_high[bsize];
6212 const int mi_width = mi_size_wide[bsize];
6213 int idx, idy;
6214
6215 int64_t intra_cost = src_stats->intra_cost / (mi_height * mi_width);
6216 int64_t inter_cost = src_stats->inter_cost / (mi_height * mi_width);
6217
6218 TplDepStats *tpl_ptr;
6219
6220 intra_cost = AOMMAX(1, intra_cost);
6221 inter_cost = AOMMAX(1, inter_cost);
6222
6223 for (idy = 0; idy < mi_height; ++idy) {
6224 tpl_ptr = &tpl_stats[(mi_row + idy) * stride + mi_col];
6225 for (idx = 0; idx < mi_width; ++idx) {
6226 tpl_ptr->intra_cost = intra_cost;
6227 tpl_ptr->inter_cost = inter_cost;
6228 tpl_ptr->mc_dep_cost = tpl_ptr->intra_cost + tpl_ptr->mc_flow;
6229 tpl_ptr->ref_frame_index = src_stats->ref_frame_index;
6230 tpl_ptr->mv.as_int = src_stats->mv.as_int;
6231 ++tpl_ptr;
6232 }
6233 }
6234}
6235
Sarah Parkercf644442018-10-11 15:23:44 -07006236static void tpl_model_update_b(TplDepFrame *tpl_frame, TplDepStats *tpl_stats,
6237 int mi_row, int mi_col, const BLOCK_SIZE bsize) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006238 TplDepFrame *ref_tpl_frame = &tpl_frame[tpl_stats->ref_frame_index];
6239 TplDepStats *ref_stats = ref_tpl_frame->tpl_stats_ptr;
6240 MV mv = tpl_stats->mv.as_mv;
6241 int mv_row = mv.row >> 3;
6242 int mv_col = mv.col >> 3;
6243
6244 int ref_pos_row = mi_row * MI_SIZE + mv_row;
6245 int ref_pos_col = mi_col * MI_SIZE + mv_col;
6246
6247 const int bw = 4 << mi_size_wide_log2[bsize];
6248 const int bh = 4 << mi_size_high_log2[bsize];
6249 const int mi_height = mi_size_high[bsize];
6250 const int mi_width = mi_size_wide[bsize];
6251 const int pix_num = bw * bh;
6252
6253 // top-left on grid block location in pixel
6254 int grid_pos_row_base = round_floor(ref_pos_row, bh) * bh;
6255 int grid_pos_col_base = round_floor(ref_pos_col, bw) * bw;
6256 int block;
6257
6258 for (block = 0; block < 4; ++block) {
6259 int grid_pos_row = grid_pos_row_base + bh * (block >> 1);
6260 int grid_pos_col = grid_pos_col_base + bw * (block & 0x01);
6261
6262 if (grid_pos_row >= 0 && grid_pos_row < ref_tpl_frame->mi_rows * MI_SIZE &&
6263 grid_pos_col >= 0 && grid_pos_col < ref_tpl_frame->mi_cols * MI_SIZE) {
6264 int overlap_area = get_overlap_area(
6265 grid_pos_row, grid_pos_col, ref_pos_row, ref_pos_col, block, bsize);
6266 int ref_mi_row = round_floor(grid_pos_row, bh) * mi_height;
6267 int ref_mi_col = round_floor(grid_pos_col, bw) * mi_width;
6268
6269 int64_t mc_flow = tpl_stats->mc_dep_cost -
6270 (tpl_stats->mc_dep_cost * tpl_stats->inter_cost) /
6271 tpl_stats->intra_cost;
6272
6273 int idx, idy;
6274
6275 for (idy = 0; idy < mi_height; ++idy) {
6276 for (idx = 0; idx < mi_width; ++idx) {
6277 TplDepStats *des_stats =
6278 &ref_stats[(ref_mi_row + idy) * ref_tpl_frame->stride +
6279 (ref_mi_col + idx)];
6280
6281 des_stats->mc_flow += (mc_flow * overlap_area) / pix_num;
6282 des_stats->mc_ref_cost +=
6283 ((tpl_stats->intra_cost - tpl_stats->inter_cost) * overlap_area) /
6284 pix_num;
6285 assert(overlap_area >= 0);
6286 }
6287 }
6288 }
6289 }
6290}
6291
Sarah Parkercf644442018-10-11 15:23:44 -07006292static void tpl_model_update(TplDepFrame *tpl_frame, TplDepStats *tpl_stats,
6293 int mi_row, int mi_col, const BLOCK_SIZE bsize) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006294 int idx, idy;
6295 const int mi_height = mi_size_high[bsize];
6296 const int mi_width = mi_size_wide[bsize];
6297
6298 for (idy = 0; idy < mi_height; ++idy) {
6299 for (idx = 0; idx < mi_width; ++idx) {
6300 TplDepStats *tpl_ptr =
6301 &tpl_stats[(mi_row + idy) * tpl_frame->stride + (mi_col + idx)];
6302 tpl_model_update_b(tpl_frame, tpl_ptr, mi_row + idy, mi_col + idx,
6303 BLOCK_4X4);
6304 }
6305 }
6306}
6307
Sarah Parkercf644442018-10-11 15:23:44 -07006308static void get_quantize_error(MACROBLOCK *x, int plane, tran_low_t *coeff,
6309 tran_low_t *qcoeff, tran_low_t *dqcoeff,
6310 TX_SIZE tx_size, int64_t *recon_error,
6311 int64_t *sse) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006312 const struct macroblock_plane *const p = &x->plane[plane];
6313 const SCAN_ORDER *const scan_order = &av1_default_scan_orders[tx_size];
6314 uint16_t eob;
6315 int pix_num = 1 << num_pels_log2_lookup[txsize_to_bsize[tx_size]];
6316 const int shift = tx_size == TX_32X32 ? 0 : 2;
6317
6318 av1_quantize_fp_32x32(coeff, pix_num, p->zbin_QTX, p->round_fp_QTX,
6319 p->quant_fp_QTX, p->quant_shift_QTX, qcoeff, dqcoeff,
6320 p->dequant_QTX, &eob, scan_order->scan,
6321 scan_order->iscan);
6322
6323 *recon_error = av1_block_error(coeff, dqcoeff, pix_num, sse) >> shift;
6324 *recon_error = AOMMAX(*recon_error, 1);
6325
6326 *sse = (*sse) >> shift;
6327 *sse = AOMMAX(*sse, 1);
6328}
6329
Sarah Parkercf644442018-10-11 15:23:44 -07006330static void wht_fwd_txfm(int16_t *src_diff, int bw, tran_low_t *coeff,
6331 TX_SIZE tx_size) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006332 switch (tx_size) {
6333 case TX_8X8: aom_hadamard_8x8(src_diff, bw, coeff); break;
6334 case TX_16X16: aom_hadamard_16x16(src_diff, bw, coeff); break;
6335 case TX_32X32: aom_hadamard_32x32(src_diff, bw, coeff); break;
6336 default: assert(0);
6337 }
6338}
6339
Sarah Parkercf644442018-10-11 15:23:44 -07006340static void mode_estimation(AV1_COMP *cpi, MACROBLOCK *x, MACROBLOCKD *xd,
6341 struct scale_factors *sf, GF_PICTURE *gf_picture,
6342 int frame_idx, int16_t *src_diff, tran_low_t *coeff,
6343 tran_low_t *qcoeff, tran_low_t *dqcoeff, int mi_row,
6344 int mi_col, BLOCK_SIZE bsize, TX_SIZE tx_size,
6345 YV12_BUFFER_CONFIG *ref_frame[], uint8_t *predictor,
6346 int64_t *recon_error, int64_t *sse,
6347 TplDepStats *tpl_stats) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006348 AV1_COMMON *cm = &cpi->common;
6349 ThreadData *td = &cpi->td;
6350
6351 const int bw = 4 << mi_size_wide_log2[bsize];
6352 const int bh = 4 << mi_size_high_log2[bsize];
6353 const int pix_num = bw * bh;
6354 int best_rf_idx = -1;
6355 int_mv best_mv;
6356 int64_t best_inter_cost = INT64_MAX;
6357 int64_t inter_cost;
6358 int rf_idx;
6359 const InterpFilters kernel =
6360 av1_make_interp_filters(EIGHTTAP_REGULAR, EIGHTTAP_REGULAR);
6361
6362 int64_t best_intra_cost = INT64_MAX;
6363 int64_t intra_cost;
6364 PREDICTION_MODE mode;
6365 int mb_y_offset = mi_row * MI_SIZE * xd->cur_buf->y_stride + mi_col * MI_SIZE;
6366 MB_MODE_INFO mi_above, mi_left;
6367
6368 memset(tpl_stats, 0, sizeof(*tpl_stats));
6369
6370 xd->mb_to_top_edge = -((mi_row * MI_SIZE) * 8);
6371 xd->mb_to_bottom_edge = ((cm->mi_rows - 1 - mi_row) * MI_SIZE) * 8;
6372 xd->mb_to_left_edge = -((mi_col * MI_SIZE) * 8);
6373 xd->mb_to_right_edge = ((cm->mi_cols - 1 - mi_col) * MI_SIZE) * 8;
6374 xd->above_mbmi = (mi_row > 0) ? &mi_above : NULL;
6375 xd->left_mbmi = (mi_col > 0) ? &mi_left : NULL;
6376
6377 // Intra prediction search
6378 for (mode = DC_PRED; mode <= PAETH_PRED; ++mode) {
6379 uint8_t *src, *dst;
6380 int src_stride, dst_stride;
6381
6382 src = xd->cur_buf->y_buffer + mb_y_offset;
6383 src_stride = xd->cur_buf->y_stride;
6384
6385 dst = &predictor[0];
6386 dst_stride = bw;
6387
6388 xd->mi[0]->sb_type = bsize;
6389 xd->mi[0]->ref_frame[0] = INTRA_FRAME;
6390
6391 av1_predict_intra_block(
6392 cm, xd, block_size_wide[bsize], block_size_high[bsize], tx_size, mode,
6393 0, 0, FILTER_INTRA_MODES, src, src_stride, dst, dst_stride, 0, 0, 0);
6394
6395 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
6396 aom_highbd_subtract_block(bh, bw, src_diff, bw, src, src_stride, dst,
6397 dst_stride, xd->bd);
6398 } else {
6399 aom_subtract_block(bh, bw, src_diff, bw, src, src_stride, dst,
6400 dst_stride);
6401 }
6402
6403 wht_fwd_txfm(src_diff, bw, coeff, tx_size);
6404
6405 intra_cost = aom_satd(coeff, pix_num);
6406
6407 if (intra_cost < best_intra_cost) best_intra_cost = intra_cost;
6408 }
6409
6410 // Motion compensated prediction
6411 best_mv.as_int = 0;
6412
6413 (void)mb_y_offset;
6414 // Motion estimation column boundary
6415 x->mv_limits.col_min = -((mi_col * MI_SIZE) + (17 - 2 * AOM_INTERP_EXTEND));
6416 x->mv_limits.col_max =
6417 ((cm->mi_cols - 1 - mi_col) * MI_SIZE) + (17 - 2 * AOM_INTERP_EXTEND);
6418
6419 for (rf_idx = 0; rf_idx < 7; ++rf_idx) {
6420 if (ref_frame[rf_idx] == NULL) continue;
6421
6422 motion_compensated_prediction(cpi, td, xd->cur_buf->y_buffer + mb_y_offset,
6423 ref_frame[rf_idx]->y_buffer + mb_y_offset,
6424 xd->cur_buf->y_stride, bsize, mi_row, mi_col);
6425
6426 // TODO(jingning): Not yet support high bit-depth in the next three
6427 // steps.
6428 ConvolveParams conv_params = get_conv_params(0, 0, xd->bd);
6429 WarpTypesAllowed warp_types;
6430 memset(&warp_types, 0, sizeof(WarpTypesAllowed));
6431
6432 av1_build_inter_predictor(
6433 ref_frame[rf_idx]->y_buffer + mb_y_offset, ref_frame[rf_idx]->y_stride,
6434 &predictor[0], bw, &x->best_mv.as_mv, sf, bw, bh, &conv_params, kernel,
6435 &warp_types, mi_col * MI_SIZE, mi_row * MI_SIZE, 0, 0, MV_PRECISION_Q3,
6436 mi_col * MI_SIZE, mi_row * MI_SIZE, xd, 0);
6437 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
6438 aom_highbd_subtract_block(
6439 bh, bw, src_diff, bw, xd->cur_buf->y_buffer + mb_y_offset,
6440 xd->cur_buf->y_stride, &predictor[0], bw, xd->bd);
6441 } else {
6442 aom_subtract_block(bh, bw, src_diff, bw,
6443 xd->cur_buf->y_buffer + mb_y_offset,
6444 xd->cur_buf->y_stride, &predictor[0], bw);
6445 }
6446 wht_fwd_txfm(src_diff, bw, coeff, tx_size);
6447
6448 inter_cost = aom_satd(coeff, pix_num);
6449 if (inter_cost < best_inter_cost) {
6450 best_rf_idx = rf_idx;
6451 best_inter_cost = inter_cost;
6452 best_mv.as_int = x->best_mv.as_int;
6453 get_quantize_error(x, 0, coeff, qcoeff, dqcoeff, tx_size, recon_error,
6454 sse);
6455 }
6456 }
6457 best_intra_cost = AOMMAX(best_intra_cost, 1);
6458 best_inter_cost = AOMMIN(best_intra_cost, best_inter_cost);
6459 tpl_stats->inter_cost = best_inter_cost << TPL_DEP_COST_SCALE_LOG2;
6460 tpl_stats->intra_cost = best_intra_cost << TPL_DEP_COST_SCALE_LOG2;
6461 tpl_stats->mc_dep_cost = tpl_stats->intra_cost + tpl_stats->mc_flow;
6462
6463 tpl_stats->ref_frame_index = gf_picture[frame_idx].ref_frame[best_rf_idx];
6464 tpl_stats->mv.as_int = best_mv.as_int;
6465}
6466
Sarah Parkercf644442018-10-11 15:23:44 -07006467static void mc_flow_dispenser(AV1_COMP *cpi, GF_PICTURE *gf_picture,
6468 int frame_idx) {
Yue Chen7cae98f2018-08-24 10:43:16 -07006469 TplDepFrame *tpl_frame = &cpi->tpl_stats[frame_idx];
6470 YV12_BUFFER_CONFIG *this_frame = gf_picture[frame_idx].frame;
6471 YV12_BUFFER_CONFIG *ref_frame[7] = {
6472 NULL, NULL, NULL, NULL, NULL, NULL, NULL
6473 };
6474
6475 AV1_COMMON *cm = &cpi->common;
6476 struct scale_factors sf;
6477 int rdmult, idx;
6478 ThreadData *td = &cpi->td;
6479 MACROBLOCK *x = &td->mb;
6480 MACROBLOCKD *xd = &x->e_mbd;
6481 int mi_row, mi_col;
6482
6483 DECLARE_ALIGNED(16, uint16_t, predictor16[32 * 32 * 3]);
6484 DECLARE_ALIGNED(16, uint8_t, predictor8[32 * 32 * 3]);
6485 uint8_t *predictor;
6486 DECLARE_ALIGNED(16, int16_t, src_diff[32 * 32]);
6487 DECLARE_ALIGNED(16, tran_low_t, coeff[32 * 32]);
6488 DECLARE_ALIGNED(16, tran_low_t, qcoeff[32 * 32]);
6489 DECLARE_ALIGNED(16, tran_low_t, dqcoeff[32 * 32]);
6490
6491 const BLOCK_SIZE bsize = BLOCK_32X32;
6492 const TX_SIZE tx_size = max_txsize_lookup[bsize];
6493 const int mi_height = mi_size_high[bsize];
6494 const int mi_width = mi_size_wide[bsize];
6495 int64_t recon_error, sse;
6496
6497 // Setup scaling factor
6498 av1_setup_scale_factors_for_frame(
6499 &sf, this_frame->y_crop_width, this_frame->y_crop_height,
6500 this_frame->y_crop_width, this_frame->y_crop_height);
6501
6502 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH)
6503 predictor = CONVERT_TO_BYTEPTR(predictor16);
6504 else
6505 predictor = predictor8;
6506
6507 // Prepare reference frame pointers. If any reference frame slot is
6508 // unavailable, the pointer will be set to Null.
6509 for (idx = 0; idx < 7; ++idx) {
6510 int rf_idx = gf_picture[frame_idx].ref_frame[idx];
6511 if (rf_idx != -1) ref_frame[idx] = gf_picture[rf_idx].frame;
6512 }
6513
6514 xd->mi = cm->mi_grid_visible;
6515 xd->mi[0] = cm->mi;
6516 xd->cur_buf = this_frame;
6517
6518 // Get rd multiplier set up.
6519 rdmult = (int)av1_compute_rd_mult(cpi, tpl_frame->base_qindex);
6520 if (rdmult < 1) rdmult = 1;
6521 set_error_per_bit(&cpi->td.mb, rdmult);
6522 av1_initialize_me_consts(cpi, &cpi->td.mb, tpl_frame->base_qindex);
6523
6524 tpl_frame->is_valid = 1;
6525
6526 cm->base_qindex = tpl_frame->base_qindex;
6527 av1_frame_init_quantizer(cpi);
6528
6529 for (mi_row = 0; mi_row < cm->mi_rows; mi_row += mi_height) {
6530 // Motion estimation row boundary
6531 x->mv_limits.row_min = -((mi_row * MI_SIZE) + (17 - 2 * AOM_INTERP_EXTEND));
6532 x->mv_limits.row_max =
6533 (cm->mi_rows - 1 - mi_row) * MI_SIZE + (17 - 2 * AOM_INTERP_EXTEND);
6534 for (mi_col = 0; mi_col < cm->mi_cols; mi_col += mi_width) {
6535 TplDepStats tpl_stats;
6536 mode_estimation(cpi, x, xd, &sf, gf_picture, frame_idx, src_diff, coeff,
6537 qcoeff, dqcoeff, mi_row, mi_col, bsize, tx_size,
6538 ref_frame, predictor, &recon_error, &sse, &tpl_stats);
6539
6540 // Motion flow dependency dispenser.
6541 tpl_model_store(tpl_frame->tpl_stats_ptr, mi_row, mi_col, bsize,
6542 tpl_frame->stride, &tpl_stats);
6543
6544 tpl_model_update(cpi->tpl_stats, tpl_frame->tpl_stats_ptr, mi_row, mi_col,
6545 bsize);
6546 }
6547 }
6548}
6549
6550static void setup_tpl_stats(AV1_COMP *cpi) {
6551 GF_PICTURE gf_picture[MAX_LAG_BUFFERS];
6552 const GF_GROUP *gf_group = &cpi->twopass.gf_group;
6553 int tpl_group_frames = 0;
6554 int frame_idx;
6555
6556 init_gop_frames(cpi, gf_picture, gf_group, &tpl_group_frames);
6557
6558 init_tpl_stats(cpi);
6559
6560 // Backward propagation from tpl_group_frames to 1.
6561 for (frame_idx = tpl_group_frames - 1; frame_idx > 0; --frame_idx)
6562 mc_flow_dispenser(cpi, gf_picture, frame_idx);
6563}
6564
David Turner0308a5a2019-01-07 10:36:16 +00006565// Determine whether there is a forced keyframe pending in the lookahead buffer
6566static int is_forced_keyframe_pending(struct lookahead_ctx *lookahead,
6567 const int up_to_index) {
6568 for (int i = 0; i <= up_to_index; i++) {
6569 const struct lookahead_entry *e = av1_lookahead_peek(lookahead, i);
6570 if (e == NULL) {
6571 // We have reached the end of the lookahead buffer and not early-returned
6572 // so there isn't a forced key-frame pending.
6573 return 0;
6574 } else if (e->flags == AOM_EFLAG_FORCE_KF) {
6575 return 1;
6576 } else {
6577 continue;
6578 }
6579 }
6580 return 0; // Never reached
6581}
6582
6583// Don't allow a show_existing_frame to coincide with an error resilient or
6584// S-Frame. An exception can be made in the case of a keyframe, since it does
6585// not depend on any previous frames.
6586static int allow_show_existing(const AV1_COMP *const cpi) {
6587 if (cpi->common.current_frame.frame_number == 0) return 0;
6588
6589 const struct lookahead_entry *lookahead_src =
6590 av1_lookahead_peek(cpi->lookahead, 0);
6591 if (lookahead_src == NULL) return 1;
6592
6593 const int is_error_resilient =
6594 cpi->oxcf.error_resilient_mode ||
6595 (lookahead_src->flags & AOM_EFLAG_ERROR_RESILIENT);
6596 const int is_s_frame =
6597 cpi->oxcf.s_frame_mode || (lookahead_src->flags & AOM_EFLAG_SET_S_FRAME);
6598 const int is_key_frame =
6599 (cpi->rc.frames_to_key == 0) || (cpi->frame_flags & FRAMEFLAGS_KEY);
6600 return !(is_error_resilient || is_s_frame) || is_key_frame;
6601}
6602
Andrey Norkin795ba872018-03-06 13:24:14 -08006603int av1_get_compressed_data(AV1_COMP *cpi, unsigned int *frame_flags,
6604 size_t *size, uint8_t *dest, int64_t *time_stamp,
6605 int64_t *time_end, int flush,
6606 const aom_rational_t *timebase) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006607 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
6608 AV1_COMMON *const cm = &cpi->common;
David Turnerd2a592e2018-11-16 14:59:31 +00006609 CurrentFrame *const current_frame = &cm->current_frame;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00006610 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006611 RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -07006612 struct aom_usec_timer cmptimer;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006613 YV12_BUFFER_CONFIG *force_src_buffer = NULL;
6614 struct lookahead_entry *last_source = NULL;
6615 struct lookahead_entry *source = NULL;
6616 int arf_src_index;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006617 int brf_src_index;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006618 int i;
6619
6620#if CONFIG_BITSTREAM_DEBUG
6621 assert(cpi->oxcf.max_threads == 0 &&
6622 "bitstream debug tool does not support multithreading");
6623 bitstream_queue_record_write();
David Turnerd2a592e2018-11-16 14:59:31 +00006624 bitstream_queue_set_frame_write(current_frame->frame_number * 2 +
6625 cm->show_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006626#endif
6627
Dominic Symesd4929012018-01-31 17:32:01 +01006628 cm->showable_frame = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07006629 aom_usec_timer_start(&cmptimer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006630
RogerZhou3b635242017-09-19 10:06:46 -07006631 set_high_precision_mv(cpi, ALTREF_HIGH_PRECISION_MV, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006632
Debargha Mukherjeeba7b8fe2018-03-15 23:10:07 -07006633 // Normal defaults
sarahparker27d686a2018-03-30 17:43:44 -07006634 cm->refresh_frame_context = oxcf->frame_parallel_decoding_mode
6635 ? REFRESH_FRAME_CONTEXT_DISABLED
6636 : REFRESH_FRAME_CONTEXT_BACKWARD;
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01006637 if (oxcf->large_scale_tile)
James Zernf34dfc82018-02-23 16:53:33 -08006638 cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_DISABLED;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006639
Wei-Ting Lin2e8d0452018-06-27 09:32:39 -07006640 // default reference buffers update config
6641 av1_configure_buffer_updates_firstpass(cpi, LF_UPDATE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006642
Sarah Parkerb9041612018-05-22 19:06:47 -07006643 // Initialize fields related to forward keyframes
Sarah Parkeraf32a7b2018-06-29 14:59:05 -07006644 cpi->no_show_kf = 0;
Zoe Liub4991202017-12-21 15:31:06 -08006645 cm->reset_decoder_state = 0;
Zoe Liub4991202017-12-21 15:31:06 -08006646
David Turner0308a5a2019-01-07 10:36:16 +00006647 if (oxcf->pass == 2 && cm->show_existing_frame && allow_show_existing(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07006648 // Manage the source buffer and flush out the source frame that has been
6649 // coded already; Also get prepared for PSNR calculation if needed.
Yaowu Xuf883b422016-08-30 14:01:10 -07006650 if ((source = av1_lookahead_pop(cpi->lookahead, flush)) == NULL) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07006651 *size = 0;
6652 return -1;
6653 }
sarahparker21dbca42018-03-30 17:43:44 -07006654 av1_apply_encoding_flags(cpi, source->flags);
Alex Conversef77fd0b2017-04-20 11:00:24 -07006655 cpi->source = &source->img;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006656 // TODO(zoeliu): To track down to determine whether it's needed to adjust
6657 // the frame rate.
6658 *time_stamp = source->ts_start;
6659 *time_end = source->ts_end;
6660
6661 // We need to adjust frame rate for an overlay frame
Zoe Liue04abf72017-04-19 15:37:11 -07006662 if (cpi->rc.is_src_frame_alt_ref) adjust_frame_rate(cpi, source);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006663
David Turner0308a5a2019-01-07 10:36:16 +00006664 if (assign_cur_frame_new_fb(cm) == NULL) return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006665
6666 // Clear down mmx registers
Yaowu Xuf883b422016-08-30 14:01:10 -07006667 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006668
6669 // Start with a 0 size frame.
6670 *size = 0;
6671
6672 // We need to update the gf_group for show_existing overlay frame
Zoe Liue04abf72017-04-19 15:37:11 -07006673 if (cpi->rc.is_src_frame_alt_ref) av1_rc_get_second_pass_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006674
Tom Finegane4099e32018-01-23 12:01:51 -08006675 if (Pass2Encode(cpi, size, dest, frame_flags) != AOM_CODEC_OK)
6676 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006677
6678 if (cpi->b_calculate_psnr) generate_psnr_packet(cpi);
6679
6680#if CONFIG_INTERNAL_STATS
Angie Chiang08a22a62017-07-17 17:29:17 -07006681 compute_internal_stats(cpi, (int)(*size));
Yaowu Xuc27fc142016-08-22 16:08:15 -07006682#endif // CONFIG_INTERNAL_STATS
6683
6684 // Clear down mmx registers
Yaowu Xuf883b422016-08-30 14:01:10 -07006685 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006686
6687 cm->show_existing_frame = 0;
6688 return 0;
6689 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006690
6691 // Should we encode an arf frame.
6692 arf_src_index = get_arf_src_index(cpi);
David Turner0308a5a2019-01-07 10:36:16 +00006693 if (arf_src_index &&
6694 is_forced_keyframe_pending(cpi->lookahead, arf_src_index)) {
6695 arf_src_index = 0;
6696 flush = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006697 }
6698
6699 if (arf_src_index) {
6700 assert(arf_src_index <= rc->frames_to_key);
6701
Yaowu Xuf883b422016-08-30 14:01:10 -07006702 if ((source = av1_lookahead_peek(cpi->lookahead, arf_src_index)) != NULL) {
Dominic Symesd4929012018-01-31 17:32:01 +01006703 cm->showable_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006704 cpi->alt_ref_source = source;
Sarah Parkeraf32a7b2018-06-29 14:59:05 -07006705 // When arf_src_index == rc->frames_to_key, it indicates a fwd_kf
Sarah Parkerb9041612018-05-22 19:06:47 -07006706 if (arf_src_index == rc->frames_to_key) {
6707 // Skip temporal filtering and mark as intra_only if we have a fwd_kf
6708 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
6709 int which_arf = gf_group->arf_update_idx[gf_group->index];
6710 cpi->is_arf_filter_off[which_arf] = 1;
Sarah Parkeraf32a7b2018-06-29 14:59:05 -07006711 cpi->no_show_kf = 1;
Sarah Parkerb9041612018-05-22 19:06:47 -07006712 } else {
6713 if (oxcf->arnr_max_frames > 0) {
6714 // Produce the filtered ARF frame.
6715 av1_temporal_filter(cpi, arf_src_index);
6716 aom_extend_frame_borders(&cpi->alt_ref_buffer, num_planes);
6717 force_src_buffer = &cpi->alt_ref_buffer;
6718 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006719 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006720 cm->show_frame = 0;
Wei-Ting Lin2e8d0452018-06-27 09:32:39 -07006721
6722 if (oxcf->pass < 2) {
6723 // In second pass, the buffer updates configure will be set
6724 // in the function av1_rc_get_second_pass_params
6725 av1_configure_buffer_updates_firstpass(cpi, ARF_UPDATE);
6726 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006727 }
6728 rc->source_alt_ref_pending = 0;
6729 }
6730
Zoe Liue9b15e22017-07-19 15:53:01 -07006731 // Should we encode an arf2 frame.
6732 arf_src_index = get_arf2_src_index(cpi);
David Turner0308a5a2019-01-07 10:36:16 +00006733 if (arf_src_index &&
6734 is_forced_keyframe_pending(cpi->lookahead, arf_src_index)) {
6735 arf_src_index = 0;
6736 flush = 1;
Zoe Liue9b15e22017-07-19 15:53:01 -07006737 }
6738
6739 if (arf_src_index) {
6740 assert(arf_src_index <= rc->frames_to_key);
6741
6742 if ((source = av1_lookahead_peek(cpi->lookahead, arf_src_index)) != NULL) {
Dominic Symesd4929012018-01-31 17:32:01 +01006743 cm->showable_frame = 1;
Zoe Liue9b15e22017-07-19 15:53:01 -07006744 cpi->alt_ref_source = source;
6745
6746 if (oxcf->arnr_max_frames > 0) {
6747 // Produce the filtered ARF frame.
Sebastien Alaiwan6697acf2018-02-21 16:59:17 +01006748 av1_temporal_filter(cpi, arf_src_index);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00006749 aom_extend_frame_borders(&cpi->alt_ref_buffer, num_planes);
Zoe Liue9b15e22017-07-19 15:53:01 -07006750 force_src_buffer = &cpi->alt_ref_buffer;
6751 }
6752
6753 cm->show_frame = 0;
Wei-Ting Lin2e8d0452018-06-27 09:32:39 -07006754
6755 if (oxcf->pass < 2) {
6756 // In second pass, the buffer updates configure will be set
6757 // in the function av1_rc_get_second_pass_params
6758 av1_configure_buffer_updates_firstpass(cpi, INTNL_ARF_UPDATE);
6759 }
Zoe Liue9b15e22017-07-19 15:53:01 -07006760 }
6761 rc->source_alt_ref_pending = 0;
6762 }
Zoe Liue9b15e22017-07-19 15:53:01 -07006763
Yaowu Xuc27fc142016-08-22 16:08:15 -07006764 rc->is_bwd_ref_frame = 0;
6765 brf_src_index = get_brf_src_index(cpi);
6766 if (brf_src_index) {
6767 assert(brf_src_index <= rc->frames_to_key);
Yaowu Xuf883b422016-08-30 14:01:10 -07006768 if ((source = av1_lookahead_peek(cpi->lookahead, brf_src_index)) != NULL) {
Dominic Symesd4929012018-01-31 17:32:01 +01006769 cm->showable_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006770 cm->show_frame = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006771
Wei-Ting Lin2e8d0452018-06-27 09:32:39 -07006772 if (oxcf->pass < 2) {
6773 // In second pass, the buffer updates configure will be set
6774 // in the function av1_rc_get_second_pass_params
6775 av1_configure_buffer_updates_firstpass(cpi, BIPRED_UPDATE);
6776 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006777 }
6778 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006779
6780 if (!source) {
6781 // Get last frame source.
David Turnerd2a592e2018-11-16 14:59:31 +00006782 if (current_frame->frame_number > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006783 if ((last_source = av1_lookahead_peek(cpi->lookahead, -1)) == NULL)
Yaowu Xuc27fc142016-08-22 16:08:15 -07006784 return -1;
6785 }
David Turnerd2a592e2018-11-16 14:59:31 +00006786 if (current_frame->frame_number > 0) assert(last_source != NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006787 // Read in the source frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07006788 source = av1_lookahead_pop(cpi->lookahead, flush);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006789
6790 if (source != NULL) {
6791 cm->show_frame = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006792
6793 // Check to see if the frame should be encoded as an arf overlay.
6794 check_src_altref(cpi, source);
6795 }
6796 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006797 if (source) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07006798 cpi->unscaled_source = cpi->source =
Yaowu Xuc27fc142016-08-22 16:08:15 -07006799 force_src_buffer ? force_src_buffer : &source->img;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006800 cpi->unscaled_last_source = last_source != NULL ? &last_source->img : NULL;
6801
6802 *time_stamp = source->ts_start;
6803 *time_end = source->ts_end;
Sarah Parker73556772018-03-28 18:28:05 -07006804 av1_apply_encoding_flags(cpi, source->flags);
Yaowu Xuf883b422016-08-30 14:01:10 -07006805 *frame_flags = (source->flags & AOM_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006806
6807 } else {
6808 *size = 0;
6809 if (flush && oxcf->pass == 1 && !cpi->twopass.first_pass_done) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006810 av1_end_first_pass(cpi); /* get last stats packet */
Yaowu Xuc27fc142016-08-22 16:08:15 -07006811 cpi->twopass.first_pass_done = 1;
6812 }
6813 return -1;
6814 }
6815
6816 if (source->ts_start < cpi->first_time_stamp_ever) {
6817 cpi->first_time_stamp_ever = source->ts_start;
6818 cpi->last_end_time_stamp_seen = source->ts_start;
6819 }
6820
6821 // Clear down mmx registers
Yaowu Xuf883b422016-08-30 14:01:10 -07006822 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006823
6824 // adjust frame rates based on timestamps given
6825 if (cm->show_frame) adjust_frame_rate(cpi, source);
6826
David Turner0308a5a2019-01-07 10:36:16 +00006827 if (assign_cur_frame_new_fb(cm) == NULL) return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006828
Zoe Liuf452fdf2017-11-02 23:08:12 -07006829 // Retain the RF_LEVEL for the current newly coded frame.
David Turner1bcefb32018-11-19 17:54:00 +00006830 cm->cur_frame->frame_rf_level =
Zoe Liuf452fdf2017-11-02 23:08:12 -07006831 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index];
Zoe Liuf452fdf2017-11-02 23:08:12 -07006832
Yaowu Xu9b0f7032017-07-31 11:01:19 -07006833 cm->cur_frame->buf.buf_8bit_valid = 0;
Neil Birkbeckeb895ef2018-03-14 17:51:03 -07006834
Neil Birkbecka2893ab2018-06-08 14:45:13 -07006835 if (cpi->film_grain_table) {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07006836 cm->seq_params.film_grain_params_present = aom_film_grain_table_lookup(
Neil Birkbecka2893ab2018-06-08 14:45:13 -07006837 cpi->film_grain_table, *time_stamp, *time_end, 0 /* =erase */,
Neil Birkbeckeb895ef2018-03-14 17:51:03 -07006838 &cm->film_grain_params);
6839 }
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07006840 cm->cur_frame->film_grain_params_present =
6841 cm->seq_params.film_grain_params_present;
Zoe Liu6cfaff92016-10-18 17:12:11 -07006842
Andrey Norkin795ba872018-03-06 13:24:14 -08006843 // only one operating point supported now
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07006844 const int64_t pts64 = ticks_to_timebase_units(timebase, *time_stamp);
6845 if (pts64 < 0 || pts64 > UINT32_MAX) return AOM_CODEC_ERROR;
6846 cpi->common.frame_presentation_time = (uint32_t)pts64;
Andrey Norkin795ba872018-03-06 13:24:14 -08006847
Yaowu Xuc27fc142016-08-22 16:08:15 -07006848 // Start with a 0 size frame.
6849 *size = 0;
6850
6851 cpi->frame_flags = *frame_flags;
6852
6853 if (oxcf->pass == 2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07006854 av1_rc_get_second_pass_params(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006855 } else if (oxcf->pass == 1) {
Fergus Simpsonbc189932017-05-16 17:02:39 -07006856 setup_frame_size(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006857 }
6858
6859 if (cpi->oxcf.pass != 0 || frame_is_intra_only(cm) == 1) {
David Turnere7ebf902018-12-04 14:04:55 +00006860 for (i = 0; i < INTER_REFS_PER_FRAME; ++i) cpi->scaled_ref_buf[i] = NULL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006861 }
6862
Yaowu Xuc27fc142016-08-22 16:08:15 -07006863 cm->using_qmatrix = cpi->oxcf.using_qm;
6864 cm->min_qmlevel = cpi->oxcf.qm_minlevel;
6865 cm->max_qmlevel = cpi->oxcf.qm_maxlevel;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006866
David Turner936235c2018-11-28 13:42:01 +00006867 if (cm->seq_params.frame_id_numbers_present_flag && *time_stamp == 0) {
6868 cpi->common.current_frame_id = -1;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01006869 }
Zoe Liuca0cd3f2018-02-26 15:07:50 -08006870
Debargha Mukherjeee41a6672018-02-27 11:56:31 -08006871 if (oxcf->pass != 1 && cpi->common.allow_screen_content_tools &&
6872 !frame_is_intra_only(cm)) {
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00006873 if (cpi->common.seq_params.force_integer_mv == 2) {
RogerZhou3b635242017-09-19 10:06:46 -07006874 struct lookahead_entry *previous_entry =
Debargha Mukherjeea71e3db2018-02-28 07:47:17 -08006875 av1_lookahead_peek(cpi->lookahead, cpi->previous_index);
6876 if (!previous_entry)
6877 cpi->common.cur_frame_force_integer_mv = 0;
6878 else
6879 cpi->common.cur_frame_force_integer_mv = is_integer_mv(
6880 cpi, cpi->source, &previous_entry->img, cpi->previous_hash_table);
RogerZhou3b635242017-09-19 10:06:46 -07006881 } else {
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00006882 cpi->common.cur_frame_force_integer_mv =
6883 cpi->common.seq_params.force_integer_mv;
RogerZhou3b635242017-09-19 10:06:46 -07006884 }
6885 } else {
RogerZhou10a03802017-10-26 11:49:48 -07006886 cpi->common.cur_frame_force_integer_mv = 0;
RogerZhou3b635242017-09-19 10:06:46 -07006887 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01006888
Yue Chen7cae98f2018-08-24 10:43:16 -07006889 if (cpi->twopass.gf_group.index == 1 && cpi->oxcf.enable_tpl_model) {
6890 set_frame_size(cpi, cm->width, cm->height);
6891 setup_tpl_stats(cpi);
6892 }
6893
Yaowu Xuc27fc142016-08-22 16:08:15 -07006894 if (oxcf->pass == 1) {
6895 cpi->td.mb.e_mbd.lossless[0] = is_lossless_requested(oxcf);
Yaowu Xuf883b422016-08-30 14:01:10 -07006896 av1_first_pass(cpi, source);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006897 } else if (oxcf->pass == 2) {
Tom Finegane4099e32018-01-23 12:01:51 -08006898 if (Pass2Encode(cpi, size, dest, frame_flags) != AOM_CODEC_OK)
6899 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006900 } else {
6901 // One pass encode
David Turner056f7cd2019-01-07 17:48:13 +00006902 if (Pass0Encode(cpi, size, dest, frame_flags) != AOM_CODEC_OK)
Tom Finegane4099e32018-01-23 12:01:51 -08006903 return AOM_CODEC_ERROR;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006904 }
RogerZhoucc5d35d2017-08-07 22:20:15 -07006905 if (oxcf->pass != 1 && cpi->common.allow_screen_content_tools) {
Debargha Mukherjeee41a6672018-02-27 11:56:31 -08006906 cpi->previous_hash_table = &cm->cur_frame->hash_table;
RogerZhou3b635242017-09-19 10:06:46 -07006907 {
6908 int l;
6909 for (l = -MAX_PRE_FRAMES; l < cpi->lookahead->max_sz; l++) {
6910 if ((cpi->lookahead->buf + l) == source) {
Debargha Mukherjeee41a6672018-02-27 11:56:31 -08006911 cpi->previous_index = l;
RogerZhou3b635242017-09-19 10:06:46 -07006912 break;
6913 }
6914 }
6915
6916 if (l == cpi->lookahead->max_sz) {
6917 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
6918 "Failed to find last frame original buffer");
6919 }
6920 }
RogerZhoucc5d35d2017-08-07 22:20:15 -07006921 }
6922
Yunqing Wang267e3272017-11-09 14:23:22 -08006923 if (!cm->large_scale_tile) {
David Turner1bcefb32018-11-19 17:54:00 +00006924 cm->cur_frame->frame_context = *cm->fc;
Yunqing Wang267e3272017-11-09 14:23:22 -08006925 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07006926
Yunqing Wangb041d8a2017-11-15 12:31:18 -08006927#define EXT_TILE_DEBUG 0
6928#if EXT_TILE_DEBUG
6929 if (cm->large_scale_tile && oxcf->pass == 2) {
6930 char fn[20] = "./fc";
David Turnerd2a592e2018-11-16 14:59:31 +00006931 fn[4] = current_frame->frame_number / 100 + '0';
6932 fn[5] = (current_frame->frame_number % 100) / 10 + '0';
6933 fn[6] = (current_frame->frame_number % 10) + '0';
Yunqing Wangb041d8a2017-11-15 12:31:18 -08006934 fn[7] = '\0';
6935 av1_print_frame_contexts(cm->fc, fn);
6936 }
6937#endif // EXT_TILE_DEBUG
6938#undef EXT_TILE_DEBUG
Yaowu Xuc7119a72018-03-29 09:59:37 -07006939
Dominic Symesd4929012018-01-31 17:32:01 +01006940 cm->showable_frame = !cm->show_frame && cm->showable_frame;
Yunqing Wangb041d8a2017-11-15 12:31:18 -08006941
Yaowu Xuc27fc142016-08-22 16:08:15 -07006942 // No frame encoded, or frame was dropped, release scaled references.
6943 if ((*size == 0) && (frame_is_intra_only(cm) == 0)) {
6944 release_scaled_references(cpi);
6945 }
6946
6947 if (*size > 0) {
Debargha Mukherjee8adee102018-09-25 11:01:00 -07006948 cpi->droppable = is_frame_droppable(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006949 }
6950
Yaowu Xuf883b422016-08-30 14:01:10 -07006951 aom_usec_timer_mark(&cmptimer);
6952 cpi->time_compress_data += aom_usec_timer_elapsed(&cmptimer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07006953
6954 if (cpi->b_calculate_psnr && oxcf->pass != 1 && cm->show_frame)
6955 generate_psnr_packet(cpi);
6956
6957#if CONFIG_INTERNAL_STATS
6958 if (oxcf->pass != 1) {
Angie Chiang08a22a62017-07-17 17:29:17 -07006959 compute_internal_stats(cpi, (int)(*size));
Yaowu Xuc27fc142016-08-22 16:08:15 -07006960 }
6961#endif // CONFIG_INTERNAL_STATS
Debargha Mukherjee0857e662019-01-04 16:22:09 -08006962#if CONFIG_SPEED_STATS
6963 if (cpi->oxcf.pass != 1) {
6964 cpi->tx_search_count += cpi->td.mb.tx_search_count;
6965 cpi->td.mb.tx_search_count = 0;
6966 }
6967#endif // CONFIG_SPEED_STATS
Yaowu Xuc27fc142016-08-22 16:08:15 -07006968
Yaowu Xuf883b422016-08-30 14:01:10 -07006969 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006970
6971 return 0;
6972}
6973
Yaowu Xuf883b422016-08-30 14:01:10 -07006974int av1_get_preview_raw_frame(AV1_COMP *cpi, YV12_BUFFER_CONFIG *dest) {
6975 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006976 if (!cm->show_frame) {
6977 return -1;
6978 } else {
6979 int ret;
David Turnerc29e1a92018-12-06 14:10:14 +00006980 if (cm->cur_frame != NULL) {
6981 *dest = cm->cur_frame->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006982 dest->y_width = cm->width;
6983 dest->y_height = cm->height;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07006984 dest->uv_width = cm->width >> cm->seq_params.subsampling_x;
6985 dest->uv_height = cm->height >> cm->seq_params.subsampling_y;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006986 ret = 0;
6987 } else {
6988 ret = -1;
6989 }
Yaowu Xuf883b422016-08-30 14:01:10 -07006990 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07006991 return ret;
6992 }
6993}
6994
Yaowu Xuf883b422016-08-30 14:01:10 -07006995int av1_get_last_show_frame(AV1_COMP *cpi, YV12_BUFFER_CONFIG *frame) {
David Turnere7ebf902018-12-04 14:04:55 +00006996 if (cpi->last_show_frame_buf == NULL) return -1;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006997
David Turnere7ebf902018-12-04 14:04:55 +00006998 *frame = cpi->last_show_frame_buf->buf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07006999 return 0;
7000}
7001
Yunqing Wangff9bfca2018-06-06 11:46:08 -07007002static int equal_dimensions_and_border(const YV12_BUFFER_CONFIG *a,
7003 const YV12_BUFFER_CONFIG *b) {
7004 return a->y_height == b->y_height && a->y_width == b->y_width &&
7005 a->uv_height == b->uv_height && a->uv_width == b->uv_width &&
7006 a->y_stride == b->y_stride && a->uv_stride == b->uv_stride &&
7007 a->border == b->border &&
7008 (a->flags & YV12_FLAG_HIGHBITDEPTH) ==
7009 (b->flags & YV12_FLAG_HIGHBITDEPTH);
7010}
7011
Yunqing Wang93b18f32018-06-08 21:08:29 -07007012aom_codec_err_t av1_copy_new_frame_enc(AV1_COMMON *cm,
7013 YV12_BUFFER_CONFIG *new_frame,
7014 YV12_BUFFER_CONFIG *sd) {
Yunqing Wangff9bfca2018-06-06 11:46:08 -07007015 const int num_planes = av1_num_planes(cm);
7016 if (!equal_dimensions_and_border(new_frame, sd))
7017 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
7018 "Incorrect buffer dimensions");
7019 else
7020 aom_yv12_copy_frame(new_frame, sd, num_planes);
7021
7022 return cm->error.error_code;
7023}
7024
Yaowu Xuf883b422016-08-30 14:01:10 -07007025int av1_set_internal_size(AV1_COMP *cpi, AOM_SCALING horiz_mode,
7026 AOM_SCALING vert_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07007027 int hr = 0, hs = 0, vr = 0, vs = 0;
7028
7029 if (horiz_mode > ONETWO || vert_mode > ONETWO) return -1;
7030
7031 Scale2Ratio(horiz_mode, &hr, &hs);
7032 Scale2Ratio(vert_mode, &vr, &vs);
7033
7034 // always go to the next whole number
Debargha Mukherjeeccb27262017-09-25 14:19:46 -07007035 cpi->resize_pending_width = (hs - 1 + cpi->oxcf.width * hr) / hs;
7036 cpi->resize_pending_height = (vs - 1 + cpi->oxcf.height * vr) / vs;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007037
7038 return 0;
7039}
7040
Yaowu Xuf883b422016-08-30 14:01:10 -07007041int av1_get_quantizer(AV1_COMP *cpi) { return cpi->common.base_qindex; }
Yaowu Xuc27fc142016-08-22 16:08:15 -07007042
Soo-Chul Han29c46fb2018-03-23 16:02:00 -04007043int av1_convert_sect5obus_to_annexb(uint8_t *buffer, size_t *frame_size) {
7044 size_t output_size = 0;
7045 size_t total_bytes_read = 0;
7046 size_t remaining_size = *frame_size;
7047 uint8_t *buff_ptr = buffer;
7048
7049 // go through each OBUs
7050 while (total_bytes_read < *frame_size) {
7051 uint8_t saved_obu_header[2];
7052 uint64_t obu_payload_size;
7053 size_t length_of_payload_size;
7054 size_t length_of_obu_size;
7055 uint32_t obu_header_size = (buff_ptr[0] >> 2) & 0x1 ? 2 : 1;
7056 size_t obu_bytes_read = obu_header_size; // bytes read for current obu
7057
7058 // save the obu header (1 or 2 bytes)
7059 memmove(saved_obu_header, buff_ptr, obu_header_size);
7060 // clear the obu_has_size_field
7061 saved_obu_header[0] = saved_obu_header[0] & (~0x2);
7062
7063 // get the payload_size and length of payload_size
7064 if (aom_uleb_decode(buff_ptr + obu_header_size, remaining_size,
7065 &obu_payload_size, &length_of_payload_size) != 0) {
7066 return AOM_CODEC_ERROR;
7067 }
7068 obu_bytes_read += length_of_payload_size;
7069
7070 // calculate the length of size of the obu header plus payload
7071 length_of_obu_size =
7072 aom_uleb_size_in_bytes((uint64_t)(obu_header_size + obu_payload_size));
7073
7074 // move the rest of data to new location
7075 memmove(buff_ptr + length_of_obu_size + obu_header_size,
7076 buff_ptr + obu_bytes_read, remaining_size - obu_bytes_read);
Yaowu Xu9e494202018-04-03 11:19:49 -07007077 obu_bytes_read += (size_t)obu_payload_size;
Soo-Chul Han29c46fb2018-03-23 16:02:00 -04007078
7079 // write the new obu size
7080 const uint64_t obu_size = obu_header_size + obu_payload_size;
7081 size_t coded_obu_size;
7082 if (aom_uleb_encode(obu_size, sizeof(obu_size), buff_ptr,
7083 &coded_obu_size) != 0) {
7084 return AOM_CODEC_ERROR;
7085 }
7086
7087 // write the saved (modified) obu_header following obu size
7088 memmove(buff_ptr + length_of_obu_size, saved_obu_header, obu_header_size);
7089
7090 total_bytes_read += obu_bytes_read;
7091 remaining_size -= obu_bytes_read;
7092 buff_ptr += length_of_obu_size + obu_size;
Yaowu Xu9e494202018-04-03 11:19:49 -07007093 output_size += length_of_obu_size + (size_t)obu_size;
Soo-Chul Han29c46fb2018-03-23 16:02:00 -04007094 }
7095
7096 *frame_size = output_size;
7097 return AOM_CODEC_OK;
7098}
7099
Yaowu Xuf883b422016-08-30 14:01:10 -07007100void av1_apply_encoding_flags(AV1_COMP *cpi, aom_enc_frame_flags_t flags) {
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007101 // TODO(yunqingwang): For what references to use, external encoding flags
7102 // should be consistent with internal reference frame selection. Need to
7103 // ensure that there is not conflict between the two. In AV1 encoder, the
7104 // priority rank for 7 reference frames are: LAST, ALTREF, LAST2, LAST3,
7105 // GOLDEN, BWDREF, ALTREF2. If only one reference frame is used, it must be
7106 // LAST.
Yunqing Wangf2e7a392017-11-08 00:27:21 -08007107 cpi->ext_ref_frame_flags = AOM_REFFRAME_ALL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007108 if (flags &
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007109 (AOM_EFLAG_NO_REF_LAST | AOM_EFLAG_NO_REF_LAST2 | AOM_EFLAG_NO_REF_LAST3 |
7110 AOM_EFLAG_NO_REF_GF | AOM_EFLAG_NO_REF_ARF | AOM_EFLAG_NO_REF_BWD |
7111 AOM_EFLAG_NO_REF_ARF2)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07007112 if (flags & AOM_EFLAG_NO_REF_LAST) {
Yunqing Wangf2e7a392017-11-08 00:27:21 -08007113 cpi->ext_ref_frame_flags = 0;
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007114 } else {
7115 int ref = AOM_REFFRAME_ALL;
7116
7117 if (flags & AOM_EFLAG_NO_REF_LAST2) ref ^= AOM_LAST2_FLAG;
7118 if (flags & AOM_EFLAG_NO_REF_LAST3) ref ^= AOM_LAST3_FLAG;
7119
7120 if (flags & AOM_EFLAG_NO_REF_GF) ref ^= AOM_GOLD_FLAG;
7121
7122 if (flags & AOM_EFLAG_NO_REF_ARF) {
7123 ref ^= AOM_ALT_FLAG;
7124 ref ^= AOM_BWD_FLAG;
7125 ref ^= AOM_ALT2_FLAG;
7126 } else {
7127 if (flags & AOM_EFLAG_NO_REF_BWD) ref ^= AOM_BWD_FLAG;
7128 if (flags & AOM_EFLAG_NO_REF_ARF2) ref ^= AOM_ALT2_FLAG;
7129 }
7130
7131 av1_use_as_reference(cpi, ref);
Yaowu Xuc27fc142016-08-22 16:08:15 -07007132 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07007133 }
7134
7135 if (flags &
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007136 (AOM_EFLAG_NO_UPD_LAST | AOM_EFLAG_NO_UPD_GF | AOM_EFLAG_NO_UPD_ARF)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07007137 int upd = AOM_REFFRAME_ALL;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007138
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007139 // Refreshing LAST/LAST2/LAST3 is handled by 1 common flag.
7140 if (flags & AOM_EFLAG_NO_UPD_LAST) upd ^= AOM_LAST_FLAG;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007141
Yaowu Xuf883b422016-08-30 14:01:10 -07007142 if (flags & AOM_EFLAG_NO_UPD_GF) upd ^= AOM_GOLD_FLAG;
Yaowu Xuc27fc142016-08-22 16:08:15 -07007143
Yunqing Wang9a50fec2017-11-02 17:02:00 -07007144 if (flags & AOM_EFLAG_NO_UPD_ARF) {
7145 upd ^= AOM_ALT_FLAG;
7146 upd ^= AOM_BWD_FLAG;
7147 upd ^= AOM_ALT2_FLAG;
7148 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07007149
Yaowu Xuf883b422016-08-30 14:01:10 -07007150 av1_update_reference(cpi, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07007151 }
7152
sarahparker21dbca42018-03-30 17:43:44 -07007153 cpi->ext_use_ref_frame_mvs = cpi->oxcf.allow_ref_frame_mvs &
7154 ((flags & AOM_EFLAG_NO_REF_FRAME_MVS) == 0);
sarahparker27d686a2018-03-30 17:43:44 -07007155 cpi->ext_use_error_resilient = cpi->oxcf.error_resilient_mode |
7156 ((flags & AOM_EFLAG_ERROR_RESILIENT) != 0);
sarahparker9806fed2018-03-30 17:43:44 -07007157 cpi->ext_use_s_frame =
7158 cpi->oxcf.s_frame_mode | ((flags & AOM_EFLAG_SET_S_FRAME) != 0);
Sarah Parker50b6d6e2018-04-11 19:21:54 -07007159 cpi->ext_use_primary_ref_none = (flags & AOM_EFLAG_SET_PRIMARY_REF_NONE) != 0;
sarahparker21dbca42018-03-30 17:43:44 -07007160
Yaowu Xuf883b422016-08-30 14:01:10 -07007161 if (flags & AOM_EFLAG_NO_UPD_ENTROPY) {
7162 av1_update_entropy(cpi, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07007163 }
7164}
Andrey Norkin795ba872018-03-06 13:24:14 -08007165
Andrey Norkin795ba872018-03-06 13:24:14 -08007166int64_t timebase_units_to_ticks(const aom_rational_t *timebase, int64_t n) {
7167 return n * TICKS_PER_SEC * timebase->num / timebase->den;
7168}
7169
7170int64_t ticks_to_timebase_units(const aom_rational_t *timebase, int64_t n) {
7171 const int64_t round = TICKS_PER_SEC * timebase->num / 2 - 1;
7172 return (n * timebase->den + round) / timebase->num / TICKS_PER_SEC;
7173}
Tom Fineganf8d6a162018-08-21 10:47:55 -07007174
7175aom_fixed_buf_t *av1_get_global_headers(AV1_COMP *cpi) {
7176 if (!cpi) return NULL;
7177
7178 uint8_t header_buf[512] = { 0 };
7179 const uint32_t sequence_header_size =
7180 write_sequence_header_obu(cpi, &header_buf[0]);
7181 assert(sequence_header_size <= sizeof(header_buf));
7182 if (sequence_header_size == 0) return NULL;
7183
7184 const size_t obu_header_size = 1;
7185 const size_t size_field_size = aom_uleb_size_in_bytes(sequence_header_size);
7186 const size_t payload_offset = obu_header_size + size_field_size;
7187
7188 if (payload_offset + sequence_header_size > sizeof(header_buf)) return NULL;
7189 memmove(&header_buf[payload_offset], &header_buf[0], sequence_header_size);
7190
7191 if (write_obu_header(OBU_SEQUENCE_HEADER, 0, &header_buf[0]) !=
7192 obu_header_size) {
7193 return NULL;
7194 }
7195
7196 size_t coded_size_field_size = 0;
7197 if (aom_uleb_encode(sequence_header_size, size_field_size,
7198 &header_buf[obu_header_size],
7199 &coded_size_field_size) != 0) {
7200 return NULL;
7201 }
7202 assert(coded_size_field_size == size_field_size);
7203
7204 aom_fixed_buf_t *global_headers =
7205 (aom_fixed_buf_t *)malloc(sizeof(*global_headers));
7206 if (!global_headers) return NULL;
7207
7208 const size_t global_header_buf_size =
7209 obu_header_size + size_field_size + sequence_header_size;
7210
7211 global_headers->buf = malloc(global_header_buf_size);
7212 if (!global_headers->buf) {
7213 free(global_headers);
7214 return NULL;
7215 }
7216
7217 memcpy(global_headers->buf, &header_buf[0], global_header_buf_size);
7218 global_headers->sz = global_header_buf_size;
7219 return global_headers;
7220}