blob: 842d611761db4668aeef6d8d2fb0e8a8f5552dc4 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <math.h>
15#include <stdio.h>
16#include <stdlib.h>
17#include <string.h>
18
Yaowu Xuf883b422016-08-30 14:01:10 -070019#include "aom_dsp/aom_dsp_common.h"
20#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070021#include "aom_ports/mem.h"
22#include "aom_ports/system_state.h"
23
24#include "av1/common/alloccommon.h"
25#include "av1/encoder/aq_cyclicrefresh.h"
26#include "av1/common/common.h"
27#include "av1/common/entropymode.h"
28#include "av1/common/quant_common.h"
29#include "av1/common/seg_common.h"
30
31#include "av1/encoder/encodemv.h"
Alex Converse9d068c12017-08-03 11:48:19 -070032#include "av1/encoder/random.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070033#include "av1/encoder/ratectrl.h"
34
35// Max rate target for 1080P and below encodes under normal circumstances
36// (1920 * 1080 / (16 * 16)) * MAX_MB_RATE bits per MB
37#define MAX_MB_RATE 250
38#define MAXRATE_1080P 2025000
39
40#define DEFAULT_KF_BOOST 2000
41#define DEFAULT_GF_BOOST 2000
42
43#define MIN_BPB_FACTOR 0.005
44#define MAX_BPB_FACTOR 50
45
46#define FRAME_OVERHEAD_BITS 200
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#define ASSIGN_MINQ_TABLE(bit_depth, name) \
48 do { \
49 switch (bit_depth) { \
Yaowu Xuf883b422016-08-30 14:01:10 -070050 case AOM_BITS_8: name = name##_8; break; \
51 case AOM_BITS_10: name = name##_10; break; \
52 case AOM_BITS_12: name = name##_12; break; \
Yaowu Xuc27fc142016-08-22 16:08:15 -070053 default: \
54 assert(0 && \
Yaowu Xuf883b422016-08-30 14:01:10 -070055 "bit_depth should be AOM_BITS_8, AOM_BITS_10" \
56 " or AOM_BITS_12"); \
Yaowu Xuc27fc142016-08-22 16:08:15 -070057 name = NULL; \
58 } \
59 } while (0)
Yaowu Xuc27fc142016-08-22 16:08:15 -070060
61// Tables relating active max Q to active min Q
62static int kf_low_motion_minq_8[QINDEX_RANGE];
63static int kf_high_motion_minq_8[QINDEX_RANGE];
64static int arfgf_low_motion_minq_8[QINDEX_RANGE];
65static int arfgf_high_motion_minq_8[QINDEX_RANGE];
66static int inter_minq_8[QINDEX_RANGE];
67static int rtc_minq_8[QINDEX_RANGE];
68
Yaowu Xuc27fc142016-08-22 16:08:15 -070069static int kf_low_motion_minq_10[QINDEX_RANGE];
70static int kf_high_motion_minq_10[QINDEX_RANGE];
71static int arfgf_low_motion_minq_10[QINDEX_RANGE];
72static int arfgf_high_motion_minq_10[QINDEX_RANGE];
73static int inter_minq_10[QINDEX_RANGE];
74static int rtc_minq_10[QINDEX_RANGE];
75static int kf_low_motion_minq_12[QINDEX_RANGE];
76static int kf_high_motion_minq_12[QINDEX_RANGE];
77static int arfgf_low_motion_minq_12[QINDEX_RANGE];
78static int arfgf_high_motion_minq_12[QINDEX_RANGE];
79static int inter_minq_12[QINDEX_RANGE];
80static int rtc_minq_12[QINDEX_RANGE];
Yaowu Xuc27fc142016-08-22 16:08:15 -070081
82static int gf_high = 2000;
83static int gf_low = 400;
84static int kf_high = 5000;
85static int kf_low = 400;
86
Debargha Mukherjee7166f222017-09-05 21:32:42 -070087// How many times less pixels there are to encode given the current scaling.
88// Temporary replacement for rcf_mult and rate_thresh_mult.
89static double resize_rate_factor(const AV1_COMP *cpi, int width, int height) {
Debargha Mukherjee7166f222017-09-05 21:32:42 -070090 return (double)(cpi->oxcf.width * cpi->oxcf.height) / (width * height);
Fergus Simpsonddc846e2017-04-24 18:09:13 -070091}
92
Yaowu Xuc27fc142016-08-22 16:08:15 -070093// Functions to compute the active minq lookup table entries based on a
94// formulaic approach to facilitate easier adjustment of the Q tables.
95// The formulae were derived from computing a 3rd order polynomial best
96// fit to the original data (after plotting real maxq vs minq (not q index))
97static int get_minq_index(double maxq, double x3, double x2, double x1,
Yaowu Xuf883b422016-08-30 14:01:10 -070098 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070099 int i;
Yaowu Xuf883b422016-08-30 14:01:10 -0700100 const double minqtarget = AOMMIN(((x3 * maxq + x2) * maxq + x1) * maxq, maxq);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700101
102 // Special case handling to deal with the step from q2.0
103 // down to lossless mode represented by q 1.0.
104 if (minqtarget <= 2.0) return 0;
105
106 for (i = 0; i < QINDEX_RANGE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700107 if (minqtarget <= av1_convert_qindex_to_q(i, bit_depth)) return i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700108 }
109
110 return QINDEX_RANGE - 1;
111}
112
113static void init_minq_luts(int *kf_low_m, int *kf_high_m, int *arfgf_low,
114 int *arfgf_high, int *inter, int *rtc,
Yaowu Xuf883b422016-08-30 14:01:10 -0700115 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700116 int i;
117 for (i = 0; i < QINDEX_RANGE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700118 const double maxq = av1_convert_qindex_to_q(i, bit_depth);
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000119 kf_low_m[i] = get_minq_index(maxq, 0.000001, -0.0004, 0.150, bit_depth);
Debargha Mukherjee0de09a82018-08-25 19:03:38 -0700120 kf_high_m[i] = get_minq_index(maxq, 0.0000021, -0.00125, 0.45, bit_depth);
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000121 arfgf_low[i] = get_minq_index(maxq, 0.0000015, -0.0009, 0.30, bit_depth);
122 arfgf_high[i] = get_minq_index(maxq, 0.0000021, -0.00125, 0.55, bit_depth);
Debargha Mukherjeeb3db2062018-02-05 18:50:16 +0000123 inter[i] = get_minq_index(maxq, 0.00000271, -0.00113, 0.90, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700124 rtc[i] = get_minq_index(maxq, 0.00000271, -0.00113, 0.70, bit_depth);
125 }
126}
127
Yaowu Xuf883b422016-08-30 14:01:10 -0700128void av1_rc_init_minq_luts(void) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700129 init_minq_luts(kf_low_motion_minq_8, kf_high_motion_minq_8,
130 arfgf_low_motion_minq_8, arfgf_high_motion_minq_8,
Yaowu Xuf883b422016-08-30 14:01:10 -0700131 inter_minq_8, rtc_minq_8, AOM_BITS_8);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700132 init_minq_luts(kf_low_motion_minq_10, kf_high_motion_minq_10,
133 arfgf_low_motion_minq_10, arfgf_high_motion_minq_10,
Yaowu Xuf883b422016-08-30 14:01:10 -0700134 inter_minq_10, rtc_minq_10, AOM_BITS_10);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700135 init_minq_luts(kf_low_motion_minq_12, kf_high_motion_minq_12,
136 arfgf_low_motion_minq_12, arfgf_high_motion_minq_12,
Yaowu Xuf883b422016-08-30 14:01:10 -0700137 inter_minq_12, rtc_minq_12, AOM_BITS_12);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700138}
139
140// These functions use formulaic calculations to make playing with the
141// quantizer tables easier. If necessary they can be replaced by lookup
142// tables if and when things settle down in the experimental bitstream
Yaowu Xuf883b422016-08-30 14:01:10 -0700143double av1_convert_qindex_to_q(int qindex, aom_bit_depth_t bit_depth) {
Yaowu Xud3e7c682017-12-21 14:08:25 -0800144 // Convert the index to a real Q value (scaled down to match old Q values)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700145 switch (bit_depth) {
Monty Montgomery60f2a222017-11-01 19:48:38 -0400146 case AOM_BITS_8: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 4.0;
147 case AOM_BITS_10: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 16.0;
148 case AOM_BITS_12: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 64.0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700149 default:
Yaowu Xuf883b422016-08-30 14:01:10 -0700150 assert(0 && "bit_depth should be AOM_BITS_8, AOM_BITS_10 or AOM_BITS_12");
Yaowu Xuc27fc142016-08-22 16:08:15 -0700151 return -1.0;
152 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700153}
154
Yaowu Xuf883b422016-08-30 14:01:10 -0700155int av1_rc_bits_per_mb(FRAME_TYPE frame_type, int qindex,
156 double correction_factor, aom_bit_depth_t bit_depth) {
157 const double q = av1_convert_qindex_to_q(qindex, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700158 int enumerator = frame_type == KEY_FRAME ? 2700000 : 1800000;
159
160 assert(correction_factor <= MAX_BPB_FACTOR &&
161 correction_factor >= MIN_BPB_FACTOR);
162
163 // q based adjustment to baseline enumerator
164 enumerator += (int)(enumerator * q) >> 12;
165 return (int)(enumerator * correction_factor / q);
166}
167
Yaowu Xuf883b422016-08-30 14:01:10 -0700168int av1_estimate_bits_at_q(FRAME_TYPE frame_type, int q, int mbs,
169 double correction_factor,
170 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700171 const int bpm =
Yaowu Xuf883b422016-08-30 14:01:10 -0700172 (int)(av1_rc_bits_per_mb(frame_type, q, correction_factor, bit_depth));
173 return AOMMAX(FRAME_OVERHEAD_BITS,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700174 (int)((uint64_t)bpm * mbs) >> BPER_MB_NORMBITS);
175}
176
Yaowu Xuf883b422016-08-30 14:01:10 -0700177int av1_rc_clamp_pframe_target_size(const AV1_COMP *const cpi, int target) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700178 const RATE_CONTROL *rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700179 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000180 const int min_frame_target =
181 AOMMAX(rc->min_frame_bandwidth, rc->avg_frame_bandwidth >> 5);
182 // Clip the frame target to the minimum setup value.
183 if (cpi->rc.is_src_frame_alt_ref) {
184 // If there is an active ARF at this location use the minimum
185 // bits on this frame even if it is a constructed arf.
186 // The active maximum quantizer insures that an appropriate
187 // number of bits will be spent if needed for constructed ARFs.
188 target = min_frame_target;
189 } else if (target < min_frame_target) {
190 target = min_frame_target;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700191 }
192
193 // Clip the frame target to the maximum allowed value.
194 if (target > rc->max_frame_bandwidth) target = rc->max_frame_bandwidth;
195 if (oxcf->rc_max_inter_bitrate_pct) {
196 const int max_rate =
197 rc->avg_frame_bandwidth * oxcf->rc_max_inter_bitrate_pct / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -0700198 target = AOMMIN(target, max_rate);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700199 }
200
201 return target;
202}
203
Yaowu Xuf883b422016-08-30 14:01:10 -0700204int av1_rc_clamp_iframe_target_size(const AV1_COMP *const cpi, int target) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700205 const RATE_CONTROL *rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700206 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207 if (oxcf->rc_max_intra_bitrate_pct) {
208 const int max_rate =
209 rc->avg_frame_bandwidth * oxcf->rc_max_intra_bitrate_pct / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -0700210 target = AOMMIN(target, max_rate);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700211 }
212 if (target > rc->max_frame_bandwidth) target = rc->max_frame_bandwidth;
213 return target;
214}
215
216// Update the buffer level: leaky bucket model.
Yaowu Xuf883b422016-08-30 14:01:10 -0700217static void update_buffer_level(AV1_COMP *cpi, int encoded_frame_size) {
218 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 RATE_CONTROL *const rc = &cpi->rc;
220
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200221 // Non-viewable frames are a special case and are treated as pure overhead.
Yaowu Xuc27fc142016-08-22 16:08:15 -0700222 // TODO(zoeliu): To further explore whether we should treat BWDREF_FRAME
223 // differently, since it is a no-show frame.
224 if (!cm->show_frame && !rc->is_bwd_ref_frame)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700225 rc->bits_off_target -= encoded_frame_size;
226 else
227 rc->bits_off_target += rc->avg_frame_bandwidth - encoded_frame_size;
228
229 // Clip the buffer level to the maximum specified buffer size.
Yaowu Xuf883b422016-08-30 14:01:10 -0700230 rc->bits_off_target = AOMMIN(rc->bits_off_target, rc->maximum_buffer_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700231 rc->buffer_level = rc->bits_off_target;
232}
233
Yaowu Xuf883b422016-08-30 14:01:10 -0700234int av1_rc_get_default_min_gf_interval(int width, int height,
235 double framerate) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700236 // Assume we do not need any constraint lower than 4K 20 fps
237 static const double factor_safe = 3840 * 2160 * 20.0;
238 const double factor = width * height * framerate;
239 const int default_interval =
240 clamp((int)(framerate * 0.125), MIN_GF_INTERVAL, MAX_GF_INTERVAL);
241
242 if (factor <= factor_safe)
243 return default_interval;
244 else
Yaowu Xuf883b422016-08-30 14:01:10 -0700245 return AOMMAX(default_interval,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700246 (int)(MIN_GF_INTERVAL * factor / factor_safe + 0.5));
247 // Note this logic makes:
248 // 4K24: 5
249 // 4K30: 6
250 // 4K60: 12
251}
252
Yaowu Xuf883b422016-08-30 14:01:10 -0700253int av1_rc_get_default_max_gf_interval(double framerate, int min_gf_interval) {
254 int interval = AOMMIN(MAX_GF_INTERVAL, (int)(framerate * 0.75));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700255 interval += (interval & 0x01); // Round to even value
Wei-Ting Linaebd92f2018-07-26 15:00:41 -0700256#if CONFIG_FIX_GF_LENGTH
257 interval = AOMMAX(FIXED_GF_LENGTH, interval);
258#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700259 return AOMMAX(interval, min_gf_interval);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700260}
261
Yaowu Xuf883b422016-08-30 14:01:10 -0700262void av1_rc_init(const AV1EncoderConfig *oxcf, int pass, RATE_CONTROL *rc) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700263 int i;
264
Yaowu Xuf883b422016-08-30 14:01:10 -0700265 if (pass == 0 && oxcf->rc_mode == AOM_CBR) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700266 rc->avg_frame_qindex[KEY_FRAME] = oxcf->worst_allowed_q;
267 rc->avg_frame_qindex[INTER_FRAME] = oxcf->worst_allowed_q;
268 } else {
269 rc->avg_frame_qindex[KEY_FRAME] =
270 (oxcf->worst_allowed_q + oxcf->best_allowed_q) / 2;
271 rc->avg_frame_qindex[INTER_FRAME] =
272 (oxcf->worst_allowed_q + oxcf->best_allowed_q) / 2;
273 }
274
275 rc->last_q[KEY_FRAME] = oxcf->best_allowed_q;
276 rc->last_q[INTER_FRAME] = oxcf->worst_allowed_q;
277
278 rc->buffer_level = rc->starting_buffer_level;
279 rc->bits_off_target = rc->starting_buffer_level;
280
281 rc->rolling_target_bits = rc->avg_frame_bandwidth;
282 rc->rolling_actual_bits = rc->avg_frame_bandwidth;
283 rc->long_rolling_target_bits = rc->avg_frame_bandwidth;
284 rc->long_rolling_actual_bits = rc->avg_frame_bandwidth;
285
286 rc->total_actual_bits = 0;
287 rc->total_target_bits = 0;
288 rc->total_target_vs_actual = 0;
289
290 rc->frames_since_key = 8; // Sensible default for first frame.
291 rc->this_key_frame_forced = 0;
292 rc->next_key_frame_forced = 0;
293 rc->source_alt_ref_pending = 0;
294 rc->source_alt_ref_active = 0;
295
296 rc->frames_till_gf_update_due = 0;
297 rc->ni_av_qi = oxcf->worst_allowed_q;
298 rc->ni_tot_qi = 0;
299 rc->ni_frames = 0;
300
301 rc->tot_q = 0.0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700302 rc->avg_q = av1_convert_qindex_to_q(oxcf->worst_allowed_q, oxcf->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700303
304 for (i = 0; i < RATE_FACTOR_LEVELS; ++i) {
Peng Binfc837912018-08-23 16:34:35 +0800305 rc->rate_correction_factors[i] = 0.7;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700306 }
Peng Binfc837912018-08-23 16:34:35 +0800307 rc->rate_correction_factors[KF_STD] = 1.0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700308 rc->min_gf_interval = oxcf->min_gf_interval;
309 rc->max_gf_interval = oxcf->max_gf_interval;
310 if (rc->min_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -0700311 rc->min_gf_interval = av1_rc_get_default_min_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700312 oxcf->width, oxcf->height, oxcf->init_framerate);
313 if (rc->max_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -0700314 rc->max_gf_interval = av1_rc_get_default_max_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700315 oxcf->init_framerate, rc->min_gf_interval);
316 rc->baseline_gf_interval = (rc->min_gf_interval + rc->max_gf_interval) / 2;
317}
318
Yaowu Xuf883b422016-08-30 14:01:10 -0700319int av1_rc_drop_frame(AV1_COMP *cpi) {
320 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700321 RATE_CONTROL *const rc = &cpi->rc;
322
323 if (!oxcf->drop_frames_water_mark) {
324 return 0;
325 } else {
326 if (rc->buffer_level < 0) {
327 // Always drop if buffer is below 0.
328 return 1;
329 } else {
330 // If buffer is below drop_mark, for now just drop every other frame
331 // (starting with the next frame) until it increases back over drop_mark.
332 int drop_mark =
333 (int)(oxcf->drop_frames_water_mark * rc->optimal_buffer_level / 100);
334 if ((rc->buffer_level > drop_mark) && (rc->decimation_factor > 0)) {
335 --rc->decimation_factor;
336 } else if (rc->buffer_level <= drop_mark && rc->decimation_factor == 0) {
337 rc->decimation_factor = 1;
338 }
339 if (rc->decimation_factor > 0) {
340 if (rc->decimation_count > 0) {
341 --rc->decimation_count;
342 return 1;
343 } else {
344 rc->decimation_count = rc->decimation_factor;
345 return 0;
346 }
347 } else {
348 rc->decimation_count = 0;
349 return 0;
350 }
351 }
352 }
353}
354
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700355static double get_rate_correction_factor(const AV1_COMP *cpi, int width,
356 int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700357 const RATE_CONTROL *const rc = &cpi->rc;
358 double rcf;
359
360 if (cpi->common.frame_type == KEY_FRAME) {
361 rcf = rc->rate_correction_factors[KF_STD];
362 } else if (cpi->oxcf.pass == 2) {
363 RATE_FACTOR_LEVEL rf_lvl =
364 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index];
365 rcf = rc->rate_correction_factors[rf_lvl];
366 } else {
367 if ((cpi->refresh_alt_ref_frame || cpi->refresh_golden_frame) &&
368 !rc->is_src_frame_alt_ref &&
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000369 (cpi->oxcf.rc_mode != AOM_CBR || cpi->oxcf.gf_cbr_boost_pct > 20))
Yaowu Xuc27fc142016-08-22 16:08:15 -0700370 rcf = rc->rate_correction_factors[GF_ARF_STD];
371 else
372 rcf = rc->rate_correction_factors[INTER_NORMAL];
373 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700374 rcf *= resize_rate_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700375 return fclamp(rcf, MIN_BPB_FACTOR, MAX_BPB_FACTOR);
376}
377
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700378static void set_rate_correction_factor(AV1_COMP *cpi, double factor, int width,
379 int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700380 RATE_CONTROL *const rc = &cpi->rc;
381
382 // Normalize RCF to account for the size-dependent scaling factor.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700383 factor /= resize_rate_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700384
385 factor = fclamp(factor, MIN_BPB_FACTOR, MAX_BPB_FACTOR);
386
387 if (cpi->common.frame_type == KEY_FRAME) {
388 rc->rate_correction_factors[KF_STD] = factor;
389 } else if (cpi->oxcf.pass == 2) {
390 RATE_FACTOR_LEVEL rf_lvl =
391 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index];
392 rc->rate_correction_factors[rf_lvl] = factor;
393 } else {
394 if ((cpi->refresh_alt_ref_frame || cpi->refresh_golden_frame) &&
395 !rc->is_src_frame_alt_ref &&
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000396 (cpi->oxcf.rc_mode != AOM_CBR || cpi->oxcf.gf_cbr_boost_pct > 20))
Yaowu Xuc27fc142016-08-22 16:08:15 -0700397 rc->rate_correction_factors[GF_ARF_STD] = factor;
398 else
399 rc->rate_correction_factors[INTER_NORMAL] = factor;
400 }
401}
402
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700403void av1_rc_update_rate_correction_factors(AV1_COMP *cpi, int width,
404 int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700405 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700406 int correction_factor = 100;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700407 double rate_correction_factor =
408 get_rate_correction_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700409 double adjustment_limit;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700410 const int MBs = av1_get_MBs(width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700411
412 int projected_size_based_on_q = 0;
413
414 // Do not update the rate factors for arf overlay frames.
415 if (cpi->rc.is_src_frame_alt_ref) return;
416
417 // Clear down mmx registers to allow floating point in what follows
Yaowu Xuf883b422016-08-30 14:01:10 -0700418 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700419
420 // Work out how big we would have expected the frame to be at this Q given
421 // the current correction factor.
422 // Stay in double to avoid int overflow when values are large
423 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cpi->common.seg.enabled) {
424 projected_size_based_on_q =
Yaowu Xuf883b422016-08-30 14:01:10 -0700425 av1_cyclic_refresh_estimate_bits_at_q(cpi, rate_correction_factor);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700426 } else {
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700427 projected_size_based_on_q = av1_estimate_bits_at_q(
428 cpi->common.frame_type, cm->base_qindex, MBs, rate_correction_factor,
429 cm->seq_params.bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700430 }
431 // Work out a size correction factor.
432 if (projected_size_based_on_q > FRAME_OVERHEAD_BITS)
433 correction_factor = (int)((100 * (int64_t)cpi->rc.projected_frame_size) /
434 projected_size_based_on_q);
435
436 // More heavily damped adjustment used if we have been oscillating either side
437 // of target.
James Zernd2c42f02017-03-10 11:13:10 -0800438 if (correction_factor > 0) {
439 adjustment_limit =
440 0.25 + 0.5 * AOMMIN(1, fabs(log10(0.01 * correction_factor)));
441 } else {
442 adjustment_limit = 0.75;
443 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700444
445 cpi->rc.q_2_frame = cpi->rc.q_1_frame;
446 cpi->rc.q_1_frame = cm->base_qindex;
447 cpi->rc.rc_2_frame = cpi->rc.rc_1_frame;
448 if (correction_factor > 110)
449 cpi->rc.rc_1_frame = -1;
450 else if (correction_factor < 90)
451 cpi->rc.rc_1_frame = 1;
452 else
453 cpi->rc.rc_1_frame = 0;
454
455 if (correction_factor > 102) {
456 // We are not already at the worst allowable quality
457 correction_factor =
458 (int)(100 + ((correction_factor - 100) * adjustment_limit));
459 rate_correction_factor = (rate_correction_factor * correction_factor) / 100;
460 // Keep rate_correction_factor within limits
461 if (rate_correction_factor > MAX_BPB_FACTOR)
462 rate_correction_factor = MAX_BPB_FACTOR;
463 } else if (correction_factor < 99) {
464 // We are not already at the best allowable quality
465 correction_factor =
466 (int)(100 - ((100 - correction_factor) * adjustment_limit));
467 rate_correction_factor = (rate_correction_factor * correction_factor) / 100;
468
469 // Keep rate_correction_factor within limits
470 if (rate_correction_factor < MIN_BPB_FACTOR)
471 rate_correction_factor = MIN_BPB_FACTOR;
472 }
473
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700474 set_rate_correction_factor(cpi, rate_correction_factor, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700475}
476
Yaowu Xuf883b422016-08-30 14:01:10 -0700477int av1_rc_regulate_q(const AV1_COMP *cpi, int target_bits_per_frame,
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700478 int active_best_quality, int active_worst_quality,
479 int width, int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700480 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700481 int q = active_worst_quality;
482 int last_error = INT_MAX;
483 int i, target_bits_per_mb, bits_per_mb_at_this_q;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700484 const int MBs = av1_get_MBs(width, height);
485 const double correction_factor =
486 get_rate_correction_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700487
488 // Calculate required scaling factor based on target frame size and size of
489 // frame produced using previous Q.
490 target_bits_per_mb =
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700491 (int)((uint64_t)(target_bits_per_frame) << BPER_MB_NORMBITS) / MBs;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700492
493 i = active_best_quality;
494
495 do {
496 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cm->seg.enabled) {
497 bits_per_mb_at_this_q =
Yaowu Xuf883b422016-08-30 14:01:10 -0700498 (int)av1_cyclic_refresh_rc_bits_per_mb(cpi, i, correction_factor);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700499 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700500 bits_per_mb_at_this_q = (int)av1_rc_bits_per_mb(
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700501 cm->frame_type, i, correction_factor, cm->seq_params.bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700502 }
503
504 if (bits_per_mb_at_this_q <= target_bits_per_mb) {
505 if ((target_bits_per_mb - bits_per_mb_at_this_q) <= last_error)
506 q = i;
507 else
508 q = i - 1;
509
510 break;
511 } else {
512 last_error = bits_per_mb_at_this_q - target_bits_per_mb;
513 }
514 } while (++i <= active_worst_quality);
515
516 // In CBR mode, this makes sure q is between oscillating Qs to prevent
517 // resonance.
Yaowu Xuf883b422016-08-30 14:01:10 -0700518 if (cpi->oxcf.rc_mode == AOM_CBR &&
Yaowu Xuc27fc142016-08-22 16:08:15 -0700519 (cpi->rc.rc_1_frame * cpi->rc.rc_2_frame == -1) &&
520 cpi->rc.q_1_frame != cpi->rc.q_2_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700521 q = clamp(q, AOMMIN(cpi->rc.q_1_frame, cpi->rc.q_2_frame),
522 AOMMAX(cpi->rc.q_1_frame, cpi->rc.q_2_frame));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700523 }
524 return q;
525}
526
527static int get_active_quality(int q, int gfu_boost, int low, int high,
528 int *low_motion_minq, int *high_motion_minq) {
529 if (gfu_boost > high) {
530 return low_motion_minq[q];
531 } else if (gfu_boost < low) {
532 return high_motion_minq[q];
533 } else {
534 const int gap = high - low;
535 const int offset = high - gfu_boost;
536 const int qdiff = high_motion_minq[q] - low_motion_minq[q];
537 const int adjustment = ((offset * qdiff) + (gap >> 1)) / gap;
538 return low_motion_minq[q] + adjustment;
539 }
540}
541
542static int get_kf_active_quality(const RATE_CONTROL *const rc, int q,
Yaowu Xuf883b422016-08-30 14:01:10 -0700543 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700544 int *kf_low_motion_minq;
545 int *kf_high_motion_minq;
546 ASSIGN_MINQ_TABLE(bit_depth, kf_low_motion_minq);
547 ASSIGN_MINQ_TABLE(bit_depth, kf_high_motion_minq);
548 return get_active_quality(q, rc->kf_boost, kf_low, kf_high,
549 kf_low_motion_minq, kf_high_motion_minq);
550}
551
552static int get_gf_active_quality(const RATE_CONTROL *const rc, int q,
Yaowu Xuf883b422016-08-30 14:01:10 -0700553 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700554 int *arfgf_low_motion_minq;
555 int *arfgf_high_motion_minq;
556 ASSIGN_MINQ_TABLE(bit_depth, arfgf_low_motion_minq);
557 ASSIGN_MINQ_TABLE(bit_depth, arfgf_high_motion_minq);
558 return get_active_quality(q, rc->gfu_boost, gf_low, gf_high,
559 arfgf_low_motion_minq, arfgf_high_motion_minq);
560}
561
Wei-Ting Linebff3772018-07-24 11:59:40 -0700562#if REDUCE_LAST_ALT_BOOST
563static int get_gf_high_motion_quality(int q, aom_bit_depth_t bit_depth) {
564 int *arfgf_high_motion_minq;
565 ASSIGN_MINQ_TABLE(bit_depth, arfgf_high_motion_minq);
566 return arfgf_high_motion_minq[q];
567}
568#endif
569
Yaowu Xuf883b422016-08-30 14:01:10 -0700570static int calc_active_worst_quality_one_pass_vbr(const AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700571 const RATE_CONTROL *const rc = &cpi->rc;
572 const unsigned int curr_frame = cpi->common.current_video_frame;
573 int active_worst_quality;
574
575 if (cpi->common.frame_type == KEY_FRAME) {
576 active_worst_quality =
577 curr_frame == 0 ? rc->worst_quality : rc->last_q[KEY_FRAME] * 2;
578 } else {
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200579 if (!rc->is_src_frame_alt_ref &&
580 (cpi->refresh_golden_frame || cpi->refresh_alt2_ref_frame ||
581 cpi->refresh_alt_ref_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700582 active_worst_quality = curr_frame == 1 ? rc->last_q[KEY_FRAME] * 5 / 4
583 : rc->last_q[INTER_FRAME];
584 } else {
585 active_worst_quality = curr_frame == 1 ? rc->last_q[KEY_FRAME] * 2
586 : rc->last_q[INTER_FRAME] * 2;
587 }
588 }
Yaowu Xuf883b422016-08-30 14:01:10 -0700589 return AOMMIN(active_worst_quality, rc->worst_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700590}
591
592// Adjust active_worst_quality level based on buffer level.
Yaowu Xuf883b422016-08-30 14:01:10 -0700593static int calc_active_worst_quality_one_pass_cbr(const AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700594 // Adjust active_worst_quality: If buffer is above the optimal/target level,
595 // bring active_worst_quality down depending on fullness of buffer.
596 // If buffer is below the optimal level, let the active_worst_quality go from
597 // ambient Q (at buffer = optimal level) to worst_quality level
598 // (at buffer = critical level).
Yaowu Xuf883b422016-08-30 14:01:10 -0700599 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700600 const RATE_CONTROL *rc = &cpi->rc;
601 // Buffer level below which we push active_worst to worst_quality.
602 int64_t critical_level = rc->optimal_buffer_level >> 3;
603 int64_t buff_lvl_step = 0;
604 int adjustment = 0;
605 int active_worst_quality;
606 int ambient_qp;
607 if (cm->frame_type == KEY_FRAME) return rc->worst_quality;
608 // For ambient_qp we use minimum of avg_frame_qindex[KEY_FRAME/INTER_FRAME]
609 // for the first few frames following key frame. These are both initialized
610 // to worst_quality and updated with (3/4, 1/4) average in postencode_update.
611 // So for first few frames following key, the qp of that key frame is weighted
612 // into the active_worst_quality setting.
613 ambient_qp = (cm->current_video_frame < 5)
Yaowu Xuf883b422016-08-30 14:01:10 -0700614 ? AOMMIN(rc->avg_frame_qindex[INTER_FRAME],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700615 rc->avg_frame_qindex[KEY_FRAME])
616 : rc->avg_frame_qindex[INTER_FRAME];
Yaowu Xuf883b422016-08-30 14:01:10 -0700617 active_worst_quality = AOMMIN(rc->worst_quality, ambient_qp * 5 / 4);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700618 if (rc->buffer_level > rc->optimal_buffer_level) {
619 // Adjust down.
620 // Maximum limit for down adjustment, ~30%.
621 int max_adjustment_down = active_worst_quality / 3;
622 if (max_adjustment_down) {
623 buff_lvl_step = ((rc->maximum_buffer_size - rc->optimal_buffer_level) /
624 max_adjustment_down);
625 if (buff_lvl_step)
626 adjustment = (int)((rc->buffer_level - rc->optimal_buffer_level) /
627 buff_lvl_step);
628 active_worst_quality -= adjustment;
629 }
630 } else if (rc->buffer_level > critical_level) {
631 // Adjust up from ambient Q.
632 if (critical_level) {
633 buff_lvl_step = (rc->optimal_buffer_level - critical_level);
634 if (buff_lvl_step) {
635 adjustment = (int)((rc->worst_quality - ambient_qp) *
636 (rc->optimal_buffer_level - rc->buffer_level) /
637 buff_lvl_step);
638 }
639 active_worst_quality = ambient_qp + adjustment;
640 }
641 } else {
642 // Set to worst_quality if buffer is below critical level.
643 active_worst_quality = rc->worst_quality;
644 }
645 return active_worst_quality;
646}
647
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700648static int rc_pick_q_and_bounds_one_pass_cbr(const AV1_COMP *cpi, int width,
649 int height, int *bottom_index,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700650 int *top_index) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700651 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700652 const RATE_CONTROL *const rc = &cpi->rc;
653 int active_best_quality;
654 int active_worst_quality = calc_active_worst_quality_one_pass_cbr(cpi);
655 int q;
656 int *rtc_minq;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700657 const int bit_depth = cm->seq_params.bit_depth;
658 ASSIGN_MINQ_TABLE(bit_depth, rtc_minq);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700659
660 if (frame_is_intra_only(cm)) {
661 active_best_quality = rc->best_quality;
662 // Handle the special case for key frames forced when we have reached
663 // the maximum key frame interval. Here force the Q to a range
664 // based on the ambient Q to reduce the risk of popping.
665 if (rc->this_key_frame_forced) {
666 int qindex = rc->last_boosted_qindex;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700667 double last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
668 int delta_qindex = av1_compute_qdelta(rc, last_boosted_q,
669 (last_boosted_q * 0.75), bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700670 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700671 } else if (cm->current_video_frame > 0) {
672 // not first frame of one pass and kf_boost is set
673 double q_adj_factor = 1.0;
674 double q_val;
675
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700676 active_best_quality =
677 get_kf_active_quality(rc, rc->avg_frame_qindex[KEY_FRAME], bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700678
679 // Allow somewhat lower kf minq with small image formats.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700680 if ((width * height) <= (352 * 288)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700681 q_adj_factor -= 0.25;
682 }
683
684 // Convert the adjustment factor to a qindex delta
685 // on active_best_quality.
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700686 q_val = av1_convert_qindex_to_q(active_best_quality, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700687 active_best_quality +=
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700688 av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700689 }
690 } else if (!rc->is_src_frame_alt_ref &&
691 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) {
692 // Use the lower of active_worst_quality and recent
693 // average Q as basis for GF/ARF best Q limit unless last frame was
694 // a key frame.
695 if (rc->frames_since_key > 1 &&
696 rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) {
697 q = rc->avg_frame_qindex[INTER_FRAME];
698 } else {
699 q = active_worst_quality;
700 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700701 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700702 } else {
703 // Use the lower of active_worst_quality and recent/average Q.
704 if (cm->current_video_frame > 1) {
705 if (rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality)
706 active_best_quality = rtc_minq[rc->avg_frame_qindex[INTER_FRAME]];
707 else
708 active_best_quality = rtc_minq[active_worst_quality];
709 } else {
710 if (rc->avg_frame_qindex[KEY_FRAME] < active_worst_quality)
711 active_best_quality = rtc_minq[rc->avg_frame_qindex[KEY_FRAME]];
712 else
713 active_best_quality = rtc_minq[active_worst_quality];
714 }
715 }
716
717 // Clip the active best and worst quality values to limits
718 active_best_quality =
719 clamp(active_best_quality, rc->best_quality, rc->worst_quality);
720 active_worst_quality =
721 clamp(active_worst_quality, active_best_quality, rc->worst_quality);
722
723 *top_index = active_worst_quality;
724 *bottom_index = active_best_quality;
725
726 // Limit Q range for the adaptive loop.
727 if (cm->frame_type == KEY_FRAME && !rc->this_key_frame_forced &&
728 !(cm->current_video_frame == 0)) {
729 int qdelta = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700730 aom_clear_system_state();
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700731 qdelta = av1_compute_qdelta_by_rate(&cpi->rc, cm->frame_type,
732 active_worst_quality, 2.0, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700733 *top_index = active_worst_quality + qdelta;
Yaowu Xuf883b422016-08-30 14:01:10 -0700734 *top_index = AOMMAX(*top_index, *bottom_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700735 }
736
737 // Special case code to try and match quality with forced key frames
738 if (cm->frame_type == KEY_FRAME && rc->this_key_frame_forced) {
739 q = rc->last_boosted_qindex;
740 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700741 q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700742 active_worst_quality, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700743 if (q > *top_index) {
744 // Special case when we are targeting the max allowed rate
745 if (rc->this_frame_target >= rc->max_frame_bandwidth)
746 *top_index = q;
747 else
748 q = *top_index;
749 }
750 }
751
752 assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality);
753 assert(*bottom_index <= rc->worst_quality &&
754 *bottom_index >= rc->best_quality);
755 assert(q <= rc->worst_quality && q >= rc->best_quality);
756 return q;
757}
758
759static int get_active_cq_level(const RATE_CONTROL *rc,
Yaowu Xuf883b422016-08-30 14:01:10 -0700760 const AV1EncoderConfig *const oxcf) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700761 static const double cq_adjust_threshold = 0.1;
762 int active_cq_level = oxcf->cq_level;
Yaowu Xuf883b422016-08-30 14:01:10 -0700763 if (oxcf->rc_mode == AOM_CQ && rc->total_target_bits > 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700764 const double x = (double)rc->total_actual_bits / rc->total_target_bits;
765 if (x < cq_adjust_threshold) {
766 active_cq_level = (int)(active_cq_level * x / cq_adjust_threshold);
767 }
768 }
769 return active_cq_level;
770}
771
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700772static int rc_pick_q_and_bounds_one_pass_vbr(const AV1_COMP *cpi, int width,
773 int height, int *bottom_index,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700774 int *top_index) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700775 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700776 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700777 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700778 const int cq_level = get_active_cq_level(rc, oxcf);
779 int active_best_quality;
780 int active_worst_quality = calc_active_worst_quality_one_pass_vbr(cpi);
781 int q;
782 int *inter_minq;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700783 const int bit_depth = cm->seq_params.bit_depth;
784 ASSIGN_MINQ_TABLE(bit_depth, inter_minq);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700785
786 if (frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700787 if (oxcf->rc_mode == AOM_Q) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700788 const int qindex = cq_level;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700789 const double q_val = av1_convert_qindex_to_q(qindex, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700790 const int delta_qindex =
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700791 av1_compute_qdelta(rc, q_val, q_val * 0.25, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700792 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700793 } else if (rc->this_key_frame_forced) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700794 const int qindex = rc->last_boosted_qindex;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700795 const double last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700796 const int delta_qindex = av1_compute_qdelta(
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700797 rc, last_boosted_q, last_boosted_q * 0.75, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700798 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Urvang Joshi454280d2016-10-14 16:51:44 -0700799 } else { // not first frame of one pass and kf_boost is set
Yaowu Xuc27fc142016-08-22 16:08:15 -0700800 double q_adj_factor = 1.0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700801
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700802 active_best_quality =
803 get_kf_active_quality(rc, rc->avg_frame_qindex[KEY_FRAME], bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700804
805 // Allow somewhat lower kf minq with small image formats.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700806 if ((width * height) <= (352 * 288)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700807 q_adj_factor -= 0.25;
808 }
809
Urvang Joshi454280d2016-10-14 16:51:44 -0700810 // Convert the adjustment factor to a qindex delta on active_best_quality.
811 {
812 const double q_val =
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700813 av1_convert_qindex_to_q(active_best_quality, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700814 active_best_quality +=
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700815 av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700816 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700817 }
818 } else if (!rc->is_src_frame_alt_ref &&
819 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) {
820 // Use the lower of active_worst_quality and recent
821 // average Q as basis for GF/ARF best Q limit unless last frame was
822 // a key frame.
Urvang Joshi454280d2016-10-14 16:51:44 -0700823 q = (rc->frames_since_key > 1 &&
824 rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality)
825 ? rc->avg_frame_qindex[INTER_FRAME]
826 : rc->avg_frame_qindex[KEY_FRAME];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700827 // For constrained quality dont allow Q less than the cq level
Yaowu Xuf883b422016-08-30 14:01:10 -0700828 if (oxcf->rc_mode == AOM_CQ) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700829 if (q < cq_level) q = cq_level;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700830 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700831 // Constrained quality use slightly lower active best.
832 active_best_quality = active_best_quality * 15 / 16;
Yaowu Xuf883b422016-08-30 14:01:10 -0700833 } else if (oxcf->rc_mode == AOM_Q) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700834 const int qindex = cq_level;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700835 const double q_val = av1_convert_qindex_to_q(qindex, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700836 const int delta_qindex =
837 (cpi->refresh_alt_ref_frame)
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700838 ? av1_compute_qdelta(rc, q_val, q_val * 0.40, bit_depth)
839 : av1_compute_qdelta(rc, q_val, q_val * 0.50, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700840 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700841 } else {
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700842 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700843 }
844 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700845 if (oxcf->rc_mode == AOM_Q) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700846 const int qindex = cq_level;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700847 const double q_val = av1_convert_qindex_to_q(qindex, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700848 const double delta_rate[FIXED_GF_INTERVAL] = { 0.50, 1.0, 0.85, 1.0,
849 0.70, 1.0, 0.85, 1.0 };
850 const int delta_qindex = av1_compute_qdelta(
851 rc, q_val,
852 q_val * delta_rate[cm->current_video_frame % FIXED_GF_INTERVAL],
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700853 bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700854 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700855 } else {
856 // Use the lower of active_worst_quality and recent/average Q.
Urvang Joshi454280d2016-10-14 16:51:44 -0700857 active_best_quality = (cm->current_video_frame > 1)
858 ? inter_minq[rc->avg_frame_qindex[INTER_FRAME]]
859 : inter_minq[rc->avg_frame_qindex[KEY_FRAME]];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700860 // For the constrained quality mode we don't want
861 // q to fall below the cq level.
Yaowu Xuf883b422016-08-30 14:01:10 -0700862 if ((oxcf->rc_mode == AOM_CQ) && (active_best_quality < cq_level)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700863 active_best_quality = cq_level;
864 }
865 }
866 }
867
868 // Clip the active best and worst quality values to limits
869 active_best_quality =
870 clamp(active_best_quality, rc->best_quality, rc->worst_quality);
871 active_worst_quality =
872 clamp(active_worst_quality, active_best_quality, rc->worst_quality);
873
874 *top_index = active_worst_quality;
875 *bottom_index = active_best_quality;
876
877 // Limit Q range for the adaptive loop.
878 {
879 int qdelta = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700880 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700881 if (cm->frame_type == KEY_FRAME && !rc->this_key_frame_forced &&
882 !(cm->current_video_frame == 0)) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700883 qdelta = av1_compute_qdelta_by_rate(&cpi->rc, cm->frame_type,
884 active_worst_quality, 2.0, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700885 } else if (!rc->is_src_frame_alt_ref &&
886 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700887 qdelta = av1_compute_qdelta_by_rate(
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700888 &cpi->rc, cm->frame_type, active_worst_quality, 1.75, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700889 }
890 *top_index = active_worst_quality + qdelta;
Yaowu Xuf883b422016-08-30 14:01:10 -0700891 *top_index = AOMMAX(*top_index, *bottom_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700892 }
893
Yaowu Xuf883b422016-08-30 14:01:10 -0700894 if (oxcf->rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700895 q = active_best_quality;
896 // Special case code to try and match quality with forced key frames
897 } else if ((cm->frame_type == KEY_FRAME) && rc->this_key_frame_forced) {
898 q = rc->last_boosted_qindex;
899 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700900 q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700901 active_worst_quality, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700902 if (q > *top_index) {
903 // Special case when we are targeting the max allowed rate
904 if (rc->this_frame_target >= rc->max_frame_bandwidth)
905 *top_index = q;
906 else
907 q = *top_index;
908 }
909 }
910
911 assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality);
912 assert(*bottom_index <= rc->worst_quality &&
913 *bottom_index >= rc->best_quality);
914 assert(q <= rc->worst_quality && q >= rc->best_quality);
915 return q;
916}
917
Yaowu Xuf883b422016-08-30 14:01:10 -0700918int av1_frame_type_qdelta(const AV1_COMP *cpi, int rf_level, int q) {
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200919 static const FRAME_TYPE frame_type[RATE_FACTOR_LEVELS] = {
920 INTER_FRAME, INTER_FRAME, INTER_FRAME, INTER_FRAME, INTER_FRAME, KEY_FRAME
921 };
Yaowu Xuf883b422016-08-30 14:01:10 -0700922 const AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700923 int qdelta = av1_compute_qdelta_by_rate(&cpi->rc, frame_type[rf_level], q,
924 rate_factor_deltas[rf_level],
925 cm->seq_params.bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700926 return qdelta;
927}
928
929#define STATIC_MOTION_THRESH 95
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700930static int rc_pick_q_and_bounds_two_pass(const AV1_COMP *cpi, int width,
931 int height, int *bottom_index,
Wei-Ting Linebff3772018-07-24 11:59:40 -0700932 int *top_index, int *arf_q) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700933 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700934 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700935 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700936 const GF_GROUP *gf_group = &cpi->twopass.gf_group;
937 const int cq_level = get_active_cq_level(rc, oxcf);
938 int active_best_quality;
939 int active_worst_quality = cpi->twopass.active_worst_quality;
940 int q;
941 int *inter_minq;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700942 const int bit_depth = cm->seq_params.bit_depth;
943 ASSIGN_MINQ_TABLE(bit_depth, inter_minq);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700944
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700945#if CUSTOMIZED_GF
Wei-Ting Lin240d9b42018-07-12 11:48:02 -0700946 const int is_intrl_arf_boost =
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700947 gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE;
948#else
949 const int is_intrl_arf_boost = cpi->refresh_alt2_ref_frame;
Wei-Ting Lincffe49d2018-07-10 14:15:46 -0700950#endif // CUSTOMIZED_GF
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700951
Yaowu Xuc27fc142016-08-22 16:08:15 -0700952 if (frame_is_intra_only(cm)) {
953 // Handle the special case for key frames forced when we have reached
954 // the maximum key frame interval. Here force the Q to a range
955 // based on the ambient Q to reduce the risk of popping.
956 if (rc->this_key_frame_forced) {
957 double last_boosted_q;
958 int delta_qindex;
959 int qindex;
960
961 if (cpi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700962 qindex = AOMMIN(rc->last_kf_qindex, rc->last_boosted_qindex);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700963 active_best_quality = qindex;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700964 last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700965 delta_qindex = av1_compute_qdelta(rc, last_boosted_q,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700966 last_boosted_q * 1.25, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700967 active_worst_quality =
Yaowu Xuf883b422016-08-30 14:01:10 -0700968 AOMMIN(qindex + delta_qindex, active_worst_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700969 } else {
970 qindex = rc->last_boosted_qindex;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700971 last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700972 delta_qindex = av1_compute_qdelta(rc, last_boosted_q,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700973 last_boosted_q * 0.75, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700974 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700975 }
976 } else {
977 // Not forced keyframe.
978 double q_adj_factor = 1.0;
979 double q_val;
980
981 // Baseline value derived from cpi->active_worst_quality and kf boost.
982 active_best_quality =
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700983 get_kf_active_quality(rc, active_worst_quality, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700984
985 // Allow somewhat lower kf minq with small image formats.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700986 if ((width * height) <= (352 * 288)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700987 q_adj_factor -= 0.25;
988 }
989
990 // Make a further adjustment based on the kf zero motion measure.
991 q_adj_factor += 0.05 - (0.001 * (double)cpi->twopass.kf_zeromotion_pct);
992
993 // Convert the adjustment factor to a qindex delta
994 // on active_best_quality.
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700995 q_val = av1_convert_qindex_to_q(active_best_quality, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700996 active_best_quality +=
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700997 av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700998 }
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200999 } else if (!rc->is_src_frame_alt_ref &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001000 (cpi->refresh_golden_frame || is_intrl_arf_boost ||
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02001001 cpi->refresh_alt_ref_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001002 // Use the lower of active_worst_quality and recent
1003 // average Q as basis for GF/ARF best Q limit unless last frame was
1004 // a key frame.
1005 if (rc->frames_since_key > 1 &&
1006 rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) {
1007 q = rc->avg_frame_qindex[INTER_FRAME];
1008 } else {
1009 q = active_worst_quality;
1010 }
1011 // For constrained quality dont allow Q less than the cq level
Yaowu Xuf883b422016-08-30 14:01:10 -07001012 if (oxcf->rc_mode == AOM_CQ) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001013 if (q < cq_level) q = cq_level;
1014
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001015 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001016
1017 // Constrained quality use slightly lower active best.
1018 active_best_quality = active_best_quality * 15 / 16;
1019
Yaowu Xuf883b422016-08-30 14:01:10 -07001020 } else if (oxcf->rc_mode == AOM_Q) {
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001021 if (!cpi->refresh_alt_ref_frame && !is_intrl_arf_boost) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001022 active_best_quality = cq_level;
1023 } else {
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001024 if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
1025 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Wei-Ting Linebff3772018-07-24 11:59:40 -07001026 *arf_q = active_best_quality;
1027#if REDUCE_LAST_ALT_BOOST
Wei-Ting Lin993e24f2018-08-13 10:34:49 -07001028 const int min_boost = get_gf_high_motion_quality(q, bit_depth);
1029 const int boost = min_boost - active_best_quality;
Wei-Ting Linebff3772018-07-24 11:59:40 -07001030
1031 active_best_quality = min_boost - (int)(boost * rc->arf_boost_factor);
1032#endif
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001033 } else {
1034 active_best_quality = rc->arf_q;
1035 }
Wei-Ting Lin050fb022018-07-13 11:45:05 -07001036#if USE_SYMM_MULTI_LAYER
1037 if (cpi->new_bwdref_update_rule && is_intrl_arf_boost) {
1038 int this_height = gf_group->pyramid_level[gf_group->index];
1039 while (this_height < gf_group->pyramid_height) {
1040 active_best_quality = (active_best_quality + cq_level + 1) / 2;
1041 ++this_height;
1042 }
1043 } else {
1044#endif
1045 // Modify best quality for second level arfs. For mode AOM_Q this
1046 // becomes the baseline frame q.
1047 if (gf_group->rf_level[gf_group->index] == GF_ARF_LOW)
1048 active_best_quality = (active_best_quality + cq_level + 1) / 2;
1049#if USE_SYMM_MULTI_LAYER
1050 }
1051#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001052 }
1053 } else {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001054 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Wei-Ting Lin050fb022018-07-13 11:45:05 -07001055#if USE_SYMM_MULTI_LAYER
1056 if (cpi->new_bwdref_update_rule && is_intrl_arf_boost) {
1057 int this_height = gf_group->pyramid_level[gf_group->index];
1058 while (this_height < gf_group->pyramid_height) {
1059 active_best_quality =
1060 (active_best_quality + active_worst_quality + 1) / 2;
1061 ++this_height;
1062 }
1063 }
1064#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001065 }
1066 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001067 if (oxcf->rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001068 active_best_quality = cq_level;
1069 } else {
1070 active_best_quality = inter_minq[active_worst_quality];
1071
1072 // For the constrained quality mode we don't want
1073 // q to fall below the cq level.
Yaowu Xuf883b422016-08-30 14:01:10 -07001074 if ((oxcf->rc_mode == AOM_CQ) && (active_best_quality < cq_level)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001075 active_best_quality = cq_level;
1076 }
1077 }
1078 }
1079
1080 // Extension to max or min Q if undershoot or overshoot is outside
1081 // the permitted range.
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +00001082 if ((cpi->oxcf.rc_mode != AOM_Q) &&
1083 (cpi->twopass.gf_zeromotion_pct < VLOW_MOTION_THRESHOLD)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001084 if (frame_is_intra_only(cm) ||
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02001085 (!rc->is_src_frame_alt_ref &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001086 (cpi->refresh_golden_frame || is_intrl_arf_boost ||
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02001087 cpi->refresh_alt_ref_frame))) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001088 active_best_quality -=
1089 (cpi->twopass.extend_minq + cpi->twopass.extend_minq_fast);
1090 active_worst_quality += (cpi->twopass.extend_maxq / 2);
1091 } else {
1092 active_best_quality -=
1093 (cpi->twopass.extend_minq + cpi->twopass.extend_minq_fast) / 2;
1094 active_worst_quality += cpi->twopass.extend_maxq;
1095 }
1096 }
1097
Yaowu Xuf883b422016-08-30 14:01:10 -07001098 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07001099 // Static forced key frames Q restrictions dealt with elsewhere.
1100 if (!(frame_is_intra_only(cm)) || !rc->this_key_frame_forced ||
1101 (cpi->twopass.last_kfgroup_zeromotion_pct < STATIC_MOTION_THRESH)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001102 int qdelta = av1_frame_type_qdelta(cpi, gf_group->rf_level[gf_group->index],
1103 active_worst_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001104 active_worst_quality =
Yaowu Xuf883b422016-08-30 14:01:10 -07001105 AOMMAX(active_worst_quality + qdelta, active_best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001106 }
1107
1108 // Modify active_best_quality for downscaled normal frames.
Cheng Chen09c83a52018-06-05 12:27:36 -07001109 if (av1_frame_scaled(cm) && !frame_is_kf_gf_arf(cpi)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001110 int qdelta = av1_compute_qdelta_by_rate(
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001111 rc, cm->frame_type, active_best_quality, 2.0, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001112 active_best_quality =
Yaowu Xuf883b422016-08-30 14:01:10 -07001113 AOMMAX(active_best_quality + qdelta, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001114 }
1115
1116 active_best_quality =
1117 clamp(active_best_quality, rc->best_quality, rc->worst_quality);
1118 active_worst_quality =
1119 clamp(active_worst_quality, active_best_quality, rc->worst_quality);
1120
Yaowu Xuf883b422016-08-30 14:01:10 -07001121 if (oxcf->rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001122 q = active_best_quality;
1123 // Special case code to try and match quality with forced key frames.
1124 } else if (frame_is_intra_only(cm) && rc->this_key_frame_forced) {
1125 // If static since last kf use better of last boosted and last kf q.
1126 if (cpi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001127 q = AOMMIN(rc->last_kf_qindex, rc->last_boosted_qindex);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001128 } else {
1129 q = rc->last_boosted_qindex;
1130 }
1131 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001132 q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001133 active_worst_quality, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001134 if (q > active_worst_quality) {
1135 // Special case when we are targeting the max allowed rate.
1136 if (rc->this_frame_target >= rc->max_frame_bandwidth)
1137 active_worst_quality = q;
1138 else
1139 q = active_worst_quality;
1140 }
1141 }
1142 clamp(q, active_best_quality, active_worst_quality);
1143
1144 *top_index = active_worst_quality;
1145 *bottom_index = active_best_quality;
1146
1147 assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality);
1148 assert(*bottom_index <= rc->worst_quality &&
1149 *bottom_index >= rc->best_quality);
1150 assert(q <= rc->worst_quality && q >= rc->best_quality);
1151 return q;
1152}
1153
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001154int av1_rc_pick_q_and_bounds(AV1_COMP *cpi, int width, int height,
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001155 int *bottom_index, int *top_index) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001156 int q;
1157 if (cpi->oxcf.pass == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001158 if (cpi->oxcf.rc_mode == AOM_CBR)
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001159 q = rc_pick_q_and_bounds_one_pass_cbr(cpi, width, height, bottom_index,
1160 top_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001161 else
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001162 q = rc_pick_q_and_bounds_one_pass_vbr(cpi, width, height, bottom_index,
1163 top_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001164 } else {
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001165 assert(cpi->oxcf.pass == 2 && "invalid encode pass");
1166
1167 GF_GROUP *gf_group = &cpi->twopass.gf_group;
Wei-Ting Linebff3772018-07-24 11:59:40 -07001168 int arf_q = 0;
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001169
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001170 q = rc_pick_q_and_bounds_two_pass(cpi, width, height, bottom_index,
Wei-Ting Linebff3772018-07-24 11:59:40 -07001171 top_index, &arf_q);
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001172
1173 if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
Wei-Ting Linebff3772018-07-24 11:59:40 -07001174 cpi->rc.arf_q = arf_q;
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001175 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001176 }
1177
1178 return q;
1179}
1180
Yaowu Xuf883b422016-08-30 14:01:10 -07001181void av1_rc_compute_frame_size_bounds(const AV1_COMP *cpi, int frame_target,
1182 int *frame_under_shoot_limit,
1183 int *frame_over_shoot_limit) {
1184 if (cpi->oxcf.rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001185 *frame_under_shoot_limit = 0;
1186 *frame_over_shoot_limit = INT_MAX;
1187 } else {
1188 // For very small rate targets where the fractional adjustment
1189 // may be tiny make sure there is at least a minimum range.
1190 const int tolerance = (cpi->sf.recode_tolerance * frame_target) / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -07001191 *frame_under_shoot_limit = AOMMAX(frame_target - tolerance - 200, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001192 *frame_over_shoot_limit =
Yaowu Xuf883b422016-08-30 14:01:10 -07001193 AOMMIN(frame_target + tolerance + 200, cpi->rc.max_frame_bandwidth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001194 }
1195}
1196
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001197static void rc_set_frame_target(AV1_COMP *cpi, int target, int width,
1198 int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001199 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001200 RATE_CONTROL *const rc = &cpi->rc;
1201
1202 rc->this_frame_target = target;
1203
Fergus Simpsonfecb2ab2017-04-30 15:49:57 -07001204 // Modify frame size target when down-scaled.
Cheng Chen09c83a52018-06-05 12:27:36 -07001205 if (av1_frame_scaled(cm))
Fergus Simpsonddc846e2017-04-24 18:09:13 -07001206 rc->this_frame_target =
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001207 (int)(rc->this_frame_target * resize_rate_factor(cpi, width, height));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001208
1209 // Target rate per SB64 (including partial SB64s.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001210 rc->sb64_target_rate =
1211 (int)((int64_t)rc->this_frame_target * 64 * 64) / (width * height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001212}
1213
Yaowu Xuf883b422016-08-30 14:01:10 -07001214static void update_alt_ref_frame_stats(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001215 // this frame refreshes means next frames don't unless specified by user
1216 RATE_CONTROL *const rc = &cpi->rc;
1217 rc->frames_since_golden = 0;
1218
1219 // Mark the alt ref as done (setting to 0 means no further alt refs pending).
1220 rc->source_alt_ref_pending = 0;
1221
1222 // Set the alternate reference frame active flag
1223 rc->source_alt_ref_active = 1;
1224}
1225
Yaowu Xuf883b422016-08-30 14:01:10 -07001226static void update_golden_frame_stats(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001227 RATE_CONTROL *const rc = &cpi->rc;
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001228#if CUSTOMIZED_GF
1229 const TWO_PASS *const twopass = &cpi->twopass;
1230 const GF_GROUP *const gf_group = &twopass->gf_group;
1231 const int is_intrnl_arf =
1232 cpi->oxcf.pass == 2
1233 ? gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE
1234 : cpi->refresh_alt2_ref_frame;
1235#else
1236 const int is_intnl_arf = cpi->refresh_alt2_ref_frame;
1237#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001238
Yaowu Xuc27fc142016-08-22 16:08:15 -07001239 // Update the Golden frame usage counts.
1240 // NOTE(weitinglin): If we use show_existing_frame for an OVERLAY frame,
1241 // only the virtual indices for the reference frame will be
1242 // updated and cpi->refresh_golden_frame will still be zero.
1243 if (cpi->refresh_golden_frame || rc->is_src_frame_alt_ref) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001244 // We will not use internal overlay frames to replace the golden frame
1245 if (!rc->is_src_frame_ext_arf)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001246 // this frame refreshes means next frames don't unless specified by user
1247 rc->frames_since_golden = 0;
1248
1249 // If we are not using alt ref in the up and coming group clear the arf
1250 // active flag. In multi arf group case, if the index is not 0 then
1251 // we are overlaying a mid group arf so should not reset the flag.
1252 if (cpi->oxcf.pass == 2) {
1253 if (!rc->source_alt_ref_pending && (cpi->twopass.gf_group.index == 0))
1254 rc->source_alt_ref_active = 0;
1255 } else if (!rc->source_alt_ref_pending) {
1256 rc->source_alt_ref_active = 0;
1257 }
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001258 } else if (!cpi->refresh_alt_ref_frame && !is_intrnl_arf) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001259 rc->frames_since_golden++;
1260 }
1261}
1262
Yaowu Xuf883b422016-08-30 14:01:10 -07001263void av1_rc_postencode_update(AV1_COMP *cpi, uint64_t bytes_used) {
1264 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001265 RATE_CONTROL *const rc = &cpi->rc;
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001266#if CUSTOMIZED_GF
1267 const TWO_PASS *const twopass = &cpi->twopass;
1268 const GF_GROUP *const gf_group = &twopass->gf_group;
1269 const int is_intrnl_arf =
1270 cpi->oxcf.pass == 2
1271 ? gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE
1272 : cpi->refresh_alt2_ref_frame;
1273#else
1274 const int is_intrnl_arf = cpi->refresh_alt2_ref_frame;
1275#endif
1276
Yaowu Xuc27fc142016-08-22 16:08:15 -07001277 const int qindex = cm->base_qindex;
1278
1279 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cm->seg.enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001280 av1_cyclic_refresh_postencode(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001281 }
1282
1283 // Update rate control heuristics
1284 rc->projected_frame_size = (int)(bytes_used << 3);
1285
1286 // Post encode loop adjustment of Q prediction.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001287 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001288
1289 // Keep a record of last Q and ambient average Q.
1290 if (cm->frame_type == KEY_FRAME) {
1291 rc->last_q[KEY_FRAME] = qindex;
1292 rc->avg_frame_qindex[KEY_FRAME] =
1293 ROUND_POWER_OF_TWO(3 * rc->avg_frame_qindex[KEY_FRAME] + qindex, 2);
1294 } else {
1295 if (!rc->is_src_frame_alt_ref &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001296 !(cpi->refresh_golden_frame || is_intrnl_arf ||
Zoe Liue9b15e22017-07-19 15:53:01 -07001297 cpi->refresh_alt_ref_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001298 rc->last_q[INTER_FRAME] = qindex;
1299 rc->avg_frame_qindex[INTER_FRAME] =
1300 ROUND_POWER_OF_TWO(3 * rc->avg_frame_qindex[INTER_FRAME] + qindex, 2);
1301 rc->ni_frames++;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001302 rc->tot_q += av1_convert_qindex_to_q(qindex, cm->seq_params.bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001303 rc->avg_q = rc->tot_q / rc->ni_frames;
1304 // Calculate the average Q for normal inter frames (not key or GFU
1305 // frames).
1306 rc->ni_tot_qi += qindex;
1307 rc->ni_av_qi = rc->ni_tot_qi / rc->ni_frames;
1308 }
1309 }
1310
1311 // Keep record of last boosted (KF/GF/ARF) Q value.
1312 // If the current frame is coded at a lower Q then we also update it.
1313 // If all mbs in this group are skipped only update if the Q value is
1314 // better than that already stored.
1315 // This is used to help set quality in forced key frames to reduce popping
1316 if ((qindex < rc->last_boosted_qindex) || (cm->frame_type == KEY_FRAME) ||
1317 (!rc->constrained_gf_group &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001318 (cpi->refresh_alt_ref_frame || is_intrnl_arf ||
Yaowu Xuc27fc142016-08-22 16:08:15 -07001319 (cpi->refresh_golden_frame && !rc->is_src_frame_alt_ref)))) {
1320 rc->last_boosted_qindex = qindex;
1321 }
1322 if (cm->frame_type == KEY_FRAME) rc->last_kf_qindex = qindex;
1323
1324 update_buffer_level(cpi, rc->projected_frame_size);
1325
1326 // Rolling monitors of whether we are over or underspending used to help
1327 // regulate min and Max Q in two pass.
Cheng Chen09c83a52018-06-05 12:27:36 -07001328 if (av1_frame_scaled(cm))
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001329 rc->this_frame_target =
1330 (int)(rc->this_frame_target /
1331 resize_rate_factor(cpi, cm->width, cm->height));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001332 if (cm->frame_type != KEY_FRAME) {
1333 rc->rolling_target_bits = ROUND_POWER_OF_TWO(
1334 rc->rolling_target_bits * 3 + rc->this_frame_target, 2);
1335 rc->rolling_actual_bits = ROUND_POWER_OF_TWO(
1336 rc->rolling_actual_bits * 3 + rc->projected_frame_size, 2);
1337 rc->long_rolling_target_bits = ROUND_POWER_OF_TWO(
1338 rc->long_rolling_target_bits * 31 + rc->this_frame_target, 5);
1339 rc->long_rolling_actual_bits = ROUND_POWER_OF_TWO(
1340 rc->long_rolling_actual_bits * 31 + rc->projected_frame_size, 5);
1341 }
1342
1343 // Actual bits spent
1344 rc->total_actual_bits += rc->projected_frame_size;
Zoe Liue9b15e22017-07-19 15:53:01 -07001345 // TODO(zoeliu): To investigate whether we should treat BWDREF_FRAME
1346 // differently here for rc->avg_frame_bandwidth.
Yaowu Xuc27fc142016-08-22 16:08:15 -07001347 rc->total_target_bits +=
1348 (cm->show_frame || rc->is_bwd_ref_frame) ? rc->avg_frame_bandwidth : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001349
1350 rc->total_target_vs_actual = rc->total_actual_bits - rc->total_target_bits;
1351
1352 if (is_altref_enabled(cpi) && cpi->refresh_alt_ref_frame &&
1353 (cm->frame_type != KEY_FRAME))
1354 // Update the alternate reference frame stats as appropriate.
1355 update_alt_ref_frame_stats(cpi);
1356 else
1357 // Update the Golden frame stats as appropriate.
1358 update_golden_frame_stats(cpi);
1359
1360 if (cm->frame_type == KEY_FRAME) rc->frames_since_key = 0;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001361 // if (cm->current_video_frame == 1 && cm->show_frame)
1362 /*
1363 rc->this_frame_target =
1364 (int)(rc->this_frame_target / resize_rate_factor(cpi, cm->width,
1365 cm->height));
1366 */
Yaowu Xuc27fc142016-08-22 16:08:15 -07001367}
1368
Yaowu Xuf883b422016-08-30 14:01:10 -07001369void av1_rc_postencode_update_drop_frame(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001370 // Update buffer level with zero size, update frame counters, and return.
1371 update_buffer_level(cpi, 0);
1372 cpi->rc.frames_since_key++;
1373 cpi->rc.frames_to_key--;
1374 cpi->rc.rc_2_frame = 0;
1375 cpi->rc.rc_1_frame = 0;
1376}
1377
1378// Use this macro to turn on/off use of alt-refs in one-pass mode.
1379#define USE_ALTREF_FOR_ONE_PASS 1
1380
Yaowu Xuf883b422016-08-30 14:01:10 -07001381static int calc_pframe_target_size_one_pass_vbr(const AV1_COMP *const cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001382 static const int af_ratio = 10;
1383 const RATE_CONTROL *const rc = &cpi->rc;
1384 int target;
1385#if USE_ALTREF_FOR_ONE_PASS
1386 target =
1387 (!rc->is_src_frame_alt_ref &&
1388 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame))
1389 ? (rc->avg_frame_bandwidth * rc->baseline_gf_interval * af_ratio) /
1390 (rc->baseline_gf_interval + af_ratio - 1)
1391 : (rc->avg_frame_bandwidth * rc->baseline_gf_interval) /
1392 (rc->baseline_gf_interval + af_ratio - 1);
1393#else
1394 target = rc->avg_frame_bandwidth;
1395#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001396 return av1_rc_clamp_pframe_target_size(cpi, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001397}
1398
Yaowu Xuf883b422016-08-30 14:01:10 -07001399static int calc_iframe_target_size_one_pass_vbr(const AV1_COMP *const cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001400 static const int kf_ratio = 25;
1401 const RATE_CONTROL *rc = &cpi->rc;
1402 const int target = rc->avg_frame_bandwidth * kf_ratio;
Yaowu Xuf883b422016-08-30 14:01:10 -07001403 return av1_rc_clamp_iframe_target_size(cpi, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001404}
1405
Yaowu Xuf883b422016-08-30 14:01:10 -07001406void av1_rc_get_one_pass_vbr_params(AV1_COMP *cpi) {
1407 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001408 RATE_CONTROL *const rc = &cpi->rc;
1409 int target;
Tarek AMARAc9813852018-03-05 18:40:18 -05001410 int altref_enabled = is_altref_enabled(cpi);
1411 int sframe_dist = cpi->oxcf.sframe_dist;
1412 int sframe_mode = cpi->oxcf.sframe_mode;
1413 int sframe_enabled = cpi->oxcf.sframe_enabled;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001414 // TODO(yaowu): replace the "auto_key && 0" below with proper decision logic.
1415 if (!cpi->refresh_alt_ref_frame &&
1416 (cm->current_video_frame == 0 || (cpi->frame_flags & FRAMEFLAGS_KEY) ||
1417 rc->frames_to_key == 0 || (cpi->oxcf.auto_key && 0))) {
1418 cm->frame_type = KEY_FRAME;
1419 rc->this_key_frame_forced =
1420 cm->current_video_frame != 0 && rc->frames_to_key == 0;
1421 rc->frames_to_key = cpi->oxcf.key_freq;
1422 rc->kf_boost = DEFAULT_KF_BOOST;
1423 rc->source_alt_ref_active = 0;
1424 } else {
1425 cm->frame_type = INTER_FRAME;
Tarek AMARAc9813852018-03-05 18:40:18 -05001426 if (sframe_enabled) {
1427 if (altref_enabled) {
1428 if (sframe_mode == 1) {
1429 // sframe_mode == 1: insert sframe if it matches altref frame.
1430
1431 if (cm->current_video_frame % sframe_dist == 0 &&
1432 cm->frame_type != KEY_FRAME && cm->current_video_frame != 0 &&
1433 cpi->refresh_alt_ref_frame) {
1434 cm->frame_type = S_FRAME;
1435 }
1436 } else {
1437 // sframe_mode != 1: if sframe will be inserted at the next available
1438 // altref frame
1439
1440 if (cm->current_video_frame % sframe_dist == 0 &&
1441 cm->frame_type != KEY_FRAME && cm->current_video_frame != 0) {
1442 rc->sframe_due = 1;
1443 }
1444
1445 if (rc->sframe_due && cpi->refresh_alt_ref_frame) {
1446 cm->frame_type = S_FRAME;
1447 rc->sframe_due = 0;
1448 }
1449 }
1450 } else {
1451 if (cm->current_video_frame % sframe_dist == 0 &&
1452 cm->frame_type != KEY_FRAME && cm->current_video_frame != 0) {
1453 cm->frame_type = S_FRAME;
1454 }
1455 }
1456 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001457 }
1458 if (rc->frames_till_gf_update_due == 0) {
1459 rc->baseline_gf_interval = (rc->min_gf_interval + rc->max_gf_interval) / 2;
1460 rc->frames_till_gf_update_due = rc->baseline_gf_interval;
1461 // NOTE: frames_till_gf_update_due must be <= frames_to_key.
1462 if (rc->frames_till_gf_update_due > rc->frames_to_key) {
1463 rc->frames_till_gf_update_due = rc->frames_to_key;
1464 rc->constrained_gf_group = 1;
1465 } else {
1466 rc->constrained_gf_group = 0;
1467 }
1468 cpi->refresh_golden_frame = 1;
1469 rc->source_alt_ref_pending = USE_ALTREF_FOR_ONE_PASS;
1470 rc->gfu_boost = DEFAULT_GF_BOOST;
1471 }
Yushin Chof16b1ad2018-01-29 17:19:58 -08001472
1473 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
1474 av1_cyclic_refresh_update_parameters(cpi);
1475
Yaowu Xuc27fc142016-08-22 16:08:15 -07001476 if (cm->frame_type == KEY_FRAME)
1477 target = calc_iframe_target_size_one_pass_vbr(cpi);
1478 else
1479 target = calc_pframe_target_size_one_pass_vbr(cpi);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001480 rc_set_frame_target(cpi, target, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001481}
1482
Yaowu Xuf883b422016-08-30 14:01:10 -07001483static int calc_pframe_target_size_one_pass_cbr(const AV1_COMP *cpi) {
1484 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001485 const RATE_CONTROL *rc = &cpi->rc;
1486 const int64_t diff = rc->optimal_buffer_level - rc->buffer_level;
1487 const int64_t one_pct_bits = 1 + rc->optimal_buffer_level / 100;
1488 int min_frame_target =
Yaowu Xuf883b422016-08-30 14:01:10 -07001489 AOMMAX(rc->avg_frame_bandwidth >> 4, FRAME_OVERHEAD_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001490 int target;
1491
1492 if (oxcf->gf_cbr_boost_pct) {
1493 const int af_ratio_pct = oxcf->gf_cbr_boost_pct + 100;
1494 target = cpi->refresh_golden_frame
1495 ? (rc->avg_frame_bandwidth * rc->baseline_gf_interval *
1496 af_ratio_pct) /
1497 (rc->baseline_gf_interval * 100 + af_ratio_pct - 100)
1498 : (rc->avg_frame_bandwidth * rc->baseline_gf_interval * 100) /
1499 (rc->baseline_gf_interval * 100 + af_ratio_pct - 100);
1500 } else {
1501 target = rc->avg_frame_bandwidth;
1502 }
1503
1504 if (diff > 0) {
1505 // Lower the target bandwidth for this frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07001506 const int pct_low = (int)AOMMIN(diff / one_pct_bits, oxcf->under_shoot_pct);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001507 target -= (target * pct_low) / 200;
1508 } else if (diff < 0) {
1509 // Increase the target bandwidth for this frame.
1510 const int pct_high =
Yaowu Xuf883b422016-08-30 14:01:10 -07001511 (int)AOMMIN(-diff / one_pct_bits, oxcf->over_shoot_pct);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001512 target += (target * pct_high) / 200;
1513 }
1514 if (oxcf->rc_max_inter_bitrate_pct) {
1515 const int max_rate =
1516 rc->avg_frame_bandwidth * oxcf->rc_max_inter_bitrate_pct / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -07001517 target = AOMMIN(target, max_rate);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001518 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001519 return AOMMAX(min_frame_target, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001520}
1521
Yaowu Xuf883b422016-08-30 14:01:10 -07001522static int calc_iframe_target_size_one_pass_cbr(const AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001523 const RATE_CONTROL *rc = &cpi->rc;
1524 int target;
1525 if (cpi->common.current_video_frame == 0) {
1526 target = ((rc->starting_buffer_level / 2) > INT_MAX)
1527 ? INT_MAX
1528 : (int)(rc->starting_buffer_level / 2);
1529 } else {
1530 int kf_boost = 32;
1531 double framerate = cpi->framerate;
1532
Yaowu Xuf883b422016-08-30 14:01:10 -07001533 kf_boost = AOMMAX(kf_boost, (int)(2 * framerate - 16));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001534 if (rc->frames_since_key < framerate / 2) {
1535 kf_boost = (int)(kf_boost * rc->frames_since_key / (framerate / 2));
1536 }
1537 target = ((16 + kf_boost) * rc->avg_frame_bandwidth) >> 4;
1538 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001539 return av1_rc_clamp_iframe_target_size(cpi, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001540}
1541
Yaowu Xuf883b422016-08-30 14:01:10 -07001542void av1_rc_get_one_pass_cbr_params(AV1_COMP *cpi) {
1543 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001544 RATE_CONTROL *const rc = &cpi->rc;
1545 int target;
1546 // TODO(yaowu): replace the "auto_key && 0" below with proper decision logic.
1547 if ((cm->current_video_frame == 0 || (cpi->frame_flags & FRAMEFLAGS_KEY) ||
1548 rc->frames_to_key == 0 || (cpi->oxcf.auto_key && 0))) {
1549 cm->frame_type = KEY_FRAME;
1550 rc->this_key_frame_forced =
1551 cm->current_video_frame != 0 && rc->frames_to_key == 0;
1552 rc->frames_to_key = cpi->oxcf.key_freq;
1553 rc->kf_boost = DEFAULT_KF_BOOST;
1554 rc->source_alt_ref_active = 0;
1555 } else {
1556 cm->frame_type = INTER_FRAME;
1557 }
1558 if (rc->frames_till_gf_update_due == 0) {
1559 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
Yaowu Xuf883b422016-08-30 14:01:10 -07001560 av1_cyclic_refresh_set_golden_update(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001561 else
1562 rc->baseline_gf_interval =
1563 (rc->min_gf_interval + rc->max_gf_interval) / 2;
1564 rc->frames_till_gf_update_due = rc->baseline_gf_interval;
1565 // NOTE: frames_till_gf_update_due must be <= frames_to_key.
1566 if (rc->frames_till_gf_update_due > rc->frames_to_key)
1567 rc->frames_till_gf_update_due = rc->frames_to_key;
1568 cpi->refresh_golden_frame = 1;
1569 rc->gfu_boost = DEFAULT_GF_BOOST;
1570 }
1571
1572 // Any update/change of global cyclic refresh parameters (amount/delta-qp)
1573 // should be done here, before the frame qp is selected.
1574 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
Yaowu Xuf883b422016-08-30 14:01:10 -07001575 av1_cyclic_refresh_update_parameters(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001576
1577 if (cm->frame_type == KEY_FRAME)
1578 target = calc_iframe_target_size_one_pass_cbr(cpi);
1579 else
1580 target = calc_pframe_target_size_one_pass_cbr(cpi);
1581
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001582 rc_set_frame_target(cpi, target, cm->width, cm->height);
Fergus Simpson0757fd82017-04-28 20:14:27 -07001583 // TODO(afergs): Decide whether to scale up, down, or not at all
Yaowu Xuc27fc142016-08-22 16:08:15 -07001584}
1585
Yaowu Xuf883b422016-08-30 14:01:10 -07001586int av1_compute_qdelta(const RATE_CONTROL *rc, double qstart, double qtarget,
1587 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001588 int start_index = rc->worst_quality;
1589 int target_index = rc->worst_quality;
1590 int i;
1591
1592 // Convert the average q value to an index.
1593 for (i = rc->best_quality; i < rc->worst_quality; ++i) {
1594 start_index = i;
Yaowu Xuf883b422016-08-30 14:01:10 -07001595 if (av1_convert_qindex_to_q(i, bit_depth) >= qstart) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001596 }
1597
1598 // Convert the q target to an index
1599 for (i = rc->best_quality; i < rc->worst_quality; ++i) {
1600 target_index = i;
Yaowu Xuf883b422016-08-30 14:01:10 -07001601 if (av1_convert_qindex_to_q(i, bit_depth) >= qtarget) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001602 }
1603
1604 return target_index - start_index;
1605}
1606
Yaowu Xuf883b422016-08-30 14:01:10 -07001607int av1_compute_qdelta_by_rate(const RATE_CONTROL *rc, FRAME_TYPE frame_type,
1608 int qindex, double rate_target_ratio,
1609 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001610 int target_index = rc->worst_quality;
1611 int i;
1612
1613 // Look up the current projected bits per block for the base index
1614 const int base_bits_per_mb =
Yaowu Xuf883b422016-08-30 14:01:10 -07001615 av1_rc_bits_per_mb(frame_type, qindex, 1.0, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001616
1617 // Find the target bits per mb based on the base value and given ratio.
1618 const int target_bits_per_mb = (int)(rate_target_ratio * base_bits_per_mb);
1619
1620 // Convert the q target to an index
1621 for (i = rc->best_quality; i < rc->worst_quality; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001622 if (av1_rc_bits_per_mb(frame_type, i, 1.0, bit_depth) <=
Yaowu Xuc27fc142016-08-22 16:08:15 -07001623 target_bits_per_mb) {
1624 target_index = i;
1625 break;
1626 }
1627 }
1628 return target_index - qindex;
1629}
1630
Yaowu Xuf883b422016-08-30 14:01:10 -07001631void av1_rc_set_gf_interval_range(const AV1_COMP *const cpi,
1632 RATE_CONTROL *const rc) {
1633 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001634
1635 // Special case code for 1 pass fixed Q mode tests
Yaowu Xuf883b422016-08-30 14:01:10 -07001636 if ((oxcf->pass == 0) && (oxcf->rc_mode == AOM_Q)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001637 rc->max_gf_interval = FIXED_GF_INTERVAL;
1638 rc->min_gf_interval = FIXED_GF_INTERVAL;
1639 rc->static_scene_max_gf_interval = FIXED_GF_INTERVAL;
1640 } else {
1641 // Set Maximum gf/arf interval
1642 rc->max_gf_interval = oxcf->max_gf_interval;
1643 rc->min_gf_interval = oxcf->min_gf_interval;
1644 if (rc->min_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001645 rc->min_gf_interval = av1_rc_get_default_min_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001646 oxcf->width, oxcf->height, cpi->framerate);
1647 if (rc->max_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001648 rc->max_gf_interval = av1_rc_get_default_max_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001649 cpi->framerate, rc->min_gf_interval);
1650
1651 // Extended interval for genuinely static scenes
1652 rc->static_scene_max_gf_interval = MAX_LAG_BUFFERS * 2;
1653
1654 if (is_altref_enabled(cpi)) {
1655 if (rc->static_scene_max_gf_interval > oxcf->lag_in_frames - 1)
1656 rc->static_scene_max_gf_interval = oxcf->lag_in_frames - 1;
1657 }
1658
1659 if (rc->max_gf_interval > rc->static_scene_max_gf_interval)
1660 rc->max_gf_interval = rc->static_scene_max_gf_interval;
1661
1662 // Clamp min to max
Yaowu Xuf883b422016-08-30 14:01:10 -07001663 rc->min_gf_interval = AOMMIN(rc->min_gf_interval, rc->max_gf_interval);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001664 }
1665}
1666
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001667void av1_rc_update_framerate(AV1_COMP *cpi, int width, int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001668 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001669 RATE_CONTROL *const rc = &cpi->rc;
1670 int vbr_max_bits;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001671 const int MBs = av1_get_MBs(width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001672
1673 rc->avg_frame_bandwidth = (int)(oxcf->target_bandwidth / cpi->framerate);
1674 rc->min_frame_bandwidth =
1675 (int)(rc->avg_frame_bandwidth * oxcf->two_pass_vbrmin_section / 100);
1676
1677 rc->min_frame_bandwidth =
Yaowu Xuf883b422016-08-30 14:01:10 -07001678 AOMMAX(rc->min_frame_bandwidth, FRAME_OVERHEAD_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001679
1680 // A maximum bitrate for a frame is defined.
1681 // The baseline for this aligns with HW implementations that
1682 // can support decode of 1080P content up to a bitrate of MAX_MB_RATE bits
1683 // per 16x16 MB (averaged over a frame). However this limit is extended if
1684 // a very high rate is given on the command line or the the rate cannnot
1685 // be acheived because of a user specificed max q (e.g. when the user
1686 // specifies lossless encode.
1687 vbr_max_bits =
1688 (int)(((int64_t)rc->avg_frame_bandwidth * oxcf->two_pass_vbrmax_section) /
1689 100);
1690 rc->max_frame_bandwidth =
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001691 AOMMAX(AOMMAX((MBs * MAX_MB_RATE), MAXRATE_1080P), vbr_max_bits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001692
Yaowu Xuf883b422016-08-30 14:01:10 -07001693 av1_rc_set_gf_interval_range(cpi, rc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001694}
1695
1696#define VBR_PCT_ADJUSTMENT_LIMIT 50
1697// For VBR...adjustment to the frame target based on error from previous frames
Yaowu Xuf883b422016-08-30 14:01:10 -07001698static void vbr_rate_correction(AV1_COMP *cpi, int *this_frame_target) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001699 RATE_CONTROL *const rc = &cpi->rc;
1700 int64_t vbr_bits_off_target = rc->vbr_bits_off_target;
1701 int max_delta;
1702 double position_factor = 1.0;
1703
1704 // How far through the clip are we.
1705 // This number is used to damp the per frame rate correction.
1706 // Range 0 - 1.0
1707 if (cpi->twopass.total_stats.count != 0.) {
1708 position_factor = sqrt((double)cpi->common.current_video_frame /
1709 cpi->twopass.total_stats.count);
1710 }
1711 max_delta = (int)(position_factor *
1712 ((*this_frame_target * VBR_PCT_ADJUSTMENT_LIMIT) / 100));
1713
1714 // vbr_bits_off_target > 0 means we have extra bits to spend
1715 if (vbr_bits_off_target > 0) {
1716 *this_frame_target += (vbr_bits_off_target > max_delta)
1717 ? max_delta
1718 : (int)vbr_bits_off_target;
1719 } else {
1720 *this_frame_target -= (vbr_bits_off_target < -max_delta)
1721 ? max_delta
1722 : (int)-vbr_bits_off_target;
1723 }
1724
1725 // Fast redistribution of bits arising from massive local undershoot.
1726 // Dont do it for kf,arf,gf or overlay frames.
1727 if (!frame_is_kf_gf_arf(cpi) && !rc->is_src_frame_alt_ref &&
1728 rc->vbr_bits_off_target_fast) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001729 int one_frame_bits = AOMMAX(rc->avg_frame_bandwidth, *this_frame_target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001730 int fast_extra_bits;
Yaowu Xuf883b422016-08-30 14:01:10 -07001731 fast_extra_bits = (int)AOMMIN(rc->vbr_bits_off_target_fast, one_frame_bits);
1732 fast_extra_bits = (int)AOMMIN(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001733 fast_extra_bits,
Yaowu Xuf883b422016-08-30 14:01:10 -07001734 AOMMAX(one_frame_bits / 8, rc->vbr_bits_off_target_fast / 8));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001735 *this_frame_target += (int)fast_extra_bits;
1736 rc->vbr_bits_off_target_fast -= fast_extra_bits;
1737 }
1738}
1739
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001740void av1_set_target_rate(AV1_COMP *cpi, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001741 RATE_CONTROL *const rc = &cpi->rc;
1742 int target_rate = rc->base_frame_target;
1743
1744 // Correction to rate target based on prior over or under shoot.
Yaowu Xuf883b422016-08-30 14:01:10 -07001745 if (cpi->oxcf.rc_mode == AOM_VBR || cpi->oxcf.rc_mode == AOM_CQ)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001746 vbr_rate_correction(cpi, &target_rate);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001747 rc_set_frame_target(cpi, target_rate, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001748}