blob: f59f0e7d641b77dc652be2fe98bc500dd80dcefd [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <math.h>
15#include <stdio.h>
16#include <stdlib.h>
17#include <string.h>
18
Yaowu Xuf883b422016-08-30 14:01:10 -070019#include "aom_dsp/aom_dsp_common.h"
20#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070021#include "aom_ports/mem.h"
22#include "aom_ports/system_state.h"
23
24#include "av1/common/alloccommon.h"
25#include "av1/encoder/aq_cyclicrefresh.h"
26#include "av1/common/common.h"
27#include "av1/common/entropymode.h"
28#include "av1/common/quant_common.h"
29#include "av1/common/seg_common.h"
30
31#include "av1/encoder/encodemv.h"
Alex Converse9d068c12017-08-03 11:48:19 -070032#include "av1/encoder/random.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070033#include "av1/encoder/ratectrl.h"
34
35// Max rate target for 1080P and below encodes under normal circumstances
36// (1920 * 1080 / (16 * 16)) * MAX_MB_RATE bits per MB
37#define MAX_MB_RATE 250
38#define MAXRATE_1080P 2025000
39
40#define DEFAULT_KF_BOOST 2000
41#define DEFAULT_GF_BOOST 2000
42
43#define MIN_BPB_FACTOR 0.005
44#define MAX_BPB_FACTOR 50
45
46#define FRAME_OVERHEAD_BITS 200
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#define ASSIGN_MINQ_TABLE(bit_depth, name) \
48 do { \
49 switch (bit_depth) { \
Yaowu Xuf883b422016-08-30 14:01:10 -070050 case AOM_BITS_8: name = name##_8; break; \
51 case AOM_BITS_10: name = name##_10; break; \
52 case AOM_BITS_12: name = name##_12; break; \
Yaowu Xuc27fc142016-08-22 16:08:15 -070053 default: \
54 assert(0 && \
Yaowu Xuf883b422016-08-30 14:01:10 -070055 "bit_depth should be AOM_BITS_8, AOM_BITS_10" \
56 " or AOM_BITS_12"); \
Yaowu Xuc27fc142016-08-22 16:08:15 -070057 name = NULL; \
58 } \
59 } while (0)
Yaowu Xuc27fc142016-08-22 16:08:15 -070060
61// Tables relating active max Q to active min Q
62static int kf_low_motion_minq_8[QINDEX_RANGE];
63static int kf_high_motion_minq_8[QINDEX_RANGE];
64static int arfgf_low_motion_minq_8[QINDEX_RANGE];
65static int arfgf_high_motion_minq_8[QINDEX_RANGE];
66static int inter_minq_8[QINDEX_RANGE];
67static int rtc_minq_8[QINDEX_RANGE];
68
Yaowu Xuc27fc142016-08-22 16:08:15 -070069static int kf_low_motion_minq_10[QINDEX_RANGE];
70static int kf_high_motion_minq_10[QINDEX_RANGE];
71static int arfgf_low_motion_minq_10[QINDEX_RANGE];
72static int arfgf_high_motion_minq_10[QINDEX_RANGE];
73static int inter_minq_10[QINDEX_RANGE];
74static int rtc_minq_10[QINDEX_RANGE];
75static int kf_low_motion_minq_12[QINDEX_RANGE];
76static int kf_high_motion_minq_12[QINDEX_RANGE];
77static int arfgf_low_motion_minq_12[QINDEX_RANGE];
78static int arfgf_high_motion_minq_12[QINDEX_RANGE];
79static int inter_minq_12[QINDEX_RANGE];
80static int rtc_minq_12[QINDEX_RANGE];
Yaowu Xuc27fc142016-08-22 16:08:15 -070081
82static int gf_high = 2000;
83static int gf_low = 400;
84static int kf_high = 5000;
85static int kf_low = 400;
86
Debargha Mukherjee7166f222017-09-05 21:32:42 -070087// How many times less pixels there are to encode given the current scaling.
88// Temporary replacement for rcf_mult and rate_thresh_mult.
89static double resize_rate_factor(const AV1_COMP *cpi, int width, int height) {
Debargha Mukherjee7166f222017-09-05 21:32:42 -070090 return (double)(cpi->oxcf.width * cpi->oxcf.height) / (width * height);
Fergus Simpsonddc846e2017-04-24 18:09:13 -070091}
92
Yaowu Xuc27fc142016-08-22 16:08:15 -070093// Functions to compute the active minq lookup table entries based on a
94// formulaic approach to facilitate easier adjustment of the Q tables.
95// The formulae were derived from computing a 3rd order polynomial best
96// fit to the original data (after plotting real maxq vs minq (not q index))
97static int get_minq_index(double maxq, double x3, double x2, double x1,
Yaowu Xuf883b422016-08-30 14:01:10 -070098 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070099 int i;
Yaowu Xuf883b422016-08-30 14:01:10 -0700100 const double minqtarget = AOMMIN(((x3 * maxq + x2) * maxq + x1) * maxq, maxq);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700101
102 // Special case handling to deal with the step from q2.0
103 // down to lossless mode represented by q 1.0.
104 if (minqtarget <= 2.0) return 0;
105
106 for (i = 0; i < QINDEX_RANGE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700107 if (minqtarget <= av1_convert_qindex_to_q(i, bit_depth)) return i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700108 }
109
110 return QINDEX_RANGE - 1;
111}
112
113static void init_minq_luts(int *kf_low_m, int *kf_high_m, int *arfgf_low,
114 int *arfgf_high, int *inter, int *rtc,
Yaowu Xuf883b422016-08-30 14:01:10 -0700115 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700116 int i;
117 for (i = 0; i < QINDEX_RANGE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700118 const double maxq = av1_convert_qindex_to_q(i, bit_depth);
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000119 kf_low_m[i] = get_minq_index(maxq, 0.000001, -0.0004, 0.150, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700120 kf_high_m[i] = get_minq_index(maxq, 0.0000021, -0.00125, 0.55, bit_depth);
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000121 arfgf_low[i] = get_minq_index(maxq, 0.0000015, -0.0009, 0.30, bit_depth);
122 arfgf_high[i] = get_minq_index(maxq, 0.0000021, -0.00125, 0.55, bit_depth);
Debargha Mukherjeeb3db2062018-02-05 18:50:16 +0000123 inter[i] = get_minq_index(maxq, 0.00000271, -0.00113, 0.90, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700124 rtc[i] = get_minq_index(maxq, 0.00000271, -0.00113, 0.70, bit_depth);
125 }
126}
127
Yaowu Xuf883b422016-08-30 14:01:10 -0700128void av1_rc_init_minq_luts(void) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700129 init_minq_luts(kf_low_motion_minq_8, kf_high_motion_minq_8,
130 arfgf_low_motion_minq_8, arfgf_high_motion_minq_8,
Yaowu Xuf883b422016-08-30 14:01:10 -0700131 inter_minq_8, rtc_minq_8, AOM_BITS_8);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700132 init_minq_luts(kf_low_motion_minq_10, kf_high_motion_minq_10,
133 arfgf_low_motion_minq_10, arfgf_high_motion_minq_10,
Yaowu Xuf883b422016-08-30 14:01:10 -0700134 inter_minq_10, rtc_minq_10, AOM_BITS_10);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700135 init_minq_luts(kf_low_motion_minq_12, kf_high_motion_minq_12,
136 arfgf_low_motion_minq_12, arfgf_high_motion_minq_12,
Yaowu Xuf883b422016-08-30 14:01:10 -0700137 inter_minq_12, rtc_minq_12, AOM_BITS_12);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700138}
139
140// These functions use formulaic calculations to make playing with the
141// quantizer tables easier. If necessary they can be replaced by lookup
142// tables if and when things settle down in the experimental bitstream
Yaowu Xuf883b422016-08-30 14:01:10 -0700143double av1_convert_qindex_to_q(int qindex, aom_bit_depth_t bit_depth) {
Yaowu Xud3e7c682017-12-21 14:08:25 -0800144 // Convert the index to a real Q value (scaled down to match old Q values)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700145 switch (bit_depth) {
Monty Montgomery60f2a222017-11-01 19:48:38 -0400146 case AOM_BITS_8: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 4.0;
147 case AOM_BITS_10: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 16.0;
148 case AOM_BITS_12: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 64.0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700149 default:
Yaowu Xuf883b422016-08-30 14:01:10 -0700150 assert(0 && "bit_depth should be AOM_BITS_8, AOM_BITS_10 or AOM_BITS_12");
Yaowu Xuc27fc142016-08-22 16:08:15 -0700151 return -1.0;
152 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700153}
154
Yaowu Xuf883b422016-08-30 14:01:10 -0700155int av1_rc_bits_per_mb(FRAME_TYPE frame_type, int qindex,
156 double correction_factor, aom_bit_depth_t bit_depth) {
157 const double q = av1_convert_qindex_to_q(qindex, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700158 int enumerator = frame_type == KEY_FRAME ? 2700000 : 1800000;
159
160 assert(correction_factor <= MAX_BPB_FACTOR &&
161 correction_factor >= MIN_BPB_FACTOR);
162
163 // q based adjustment to baseline enumerator
164 enumerator += (int)(enumerator * q) >> 12;
165 return (int)(enumerator * correction_factor / q);
166}
167
Yaowu Xuf883b422016-08-30 14:01:10 -0700168int av1_estimate_bits_at_q(FRAME_TYPE frame_type, int q, int mbs,
169 double correction_factor,
170 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700171 const int bpm =
Yaowu Xuf883b422016-08-30 14:01:10 -0700172 (int)(av1_rc_bits_per_mb(frame_type, q, correction_factor, bit_depth));
173 return AOMMAX(FRAME_OVERHEAD_BITS,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700174 (int)((uint64_t)bpm * mbs) >> BPER_MB_NORMBITS);
175}
176
Yaowu Xuf883b422016-08-30 14:01:10 -0700177int av1_rc_clamp_pframe_target_size(const AV1_COMP *const cpi, int target) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700178 const RATE_CONTROL *rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700179 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000180 const int min_frame_target =
181 AOMMAX(rc->min_frame_bandwidth, rc->avg_frame_bandwidth >> 5);
182 // Clip the frame target to the minimum setup value.
183 if (cpi->rc.is_src_frame_alt_ref) {
184 // If there is an active ARF at this location use the minimum
185 // bits on this frame even if it is a constructed arf.
186 // The active maximum quantizer insures that an appropriate
187 // number of bits will be spent if needed for constructed ARFs.
188 target = min_frame_target;
189 } else if (target < min_frame_target) {
190 target = min_frame_target;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700191 }
192
193 // Clip the frame target to the maximum allowed value.
194 if (target > rc->max_frame_bandwidth) target = rc->max_frame_bandwidth;
195 if (oxcf->rc_max_inter_bitrate_pct) {
196 const int max_rate =
197 rc->avg_frame_bandwidth * oxcf->rc_max_inter_bitrate_pct / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -0700198 target = AOMMIN(target, max_rate);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700199 }
200
201 return target;
202}
203
Yaowu Xuf883b422016-08-30 14:01:10 -0700204int av1_rc_clamp_iframe_target_size(const AV1_COMP *const cpi, int target) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700205 const RATE_CONTROL *rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700206 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207 if (oxcf->rc_max_intra_bitrate_pct) {
208 const int max_rate =
209 rc->avg_frame_bandwidth * oxcf->rc_max_intra_bitrate_pct / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -0700210 target = AOMMIN(target, max_rate);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700211 }
212 if (target > rc->max_frame_bandwidth) target = rc->max_frame_bandwidth;
213 return target;
214}
215
216// Update the buffer level: leaky bucket model.
Yaowu Xuf883b422016-08-30 14:01:10 -0700217static void update_buffer_level(AV1_COMP *cpi, int encoded_frame_size) {
218 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 RATE_CONTROL *const rc = &cpi->rc;
220
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200221 // Non-viewable frames are a special case and are treated as pure overhead.
Yaowu Xuc27fc142016-08-22 16:08:15 -0700222 // TODO(zoeliu): To further explore whether we should treat BWDREF_FRAME
223 // differently, since it is a no-show frame.
224 if (!cm->show_frame && !rc->is_bwd_ref_frame)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700225 rc->bits_off_target -= encoded_frame_size;
226 else
227 rc->bits_off_target += rc->avg_frame_bandwidth - encoded_frame_size;
228
229 // Clip the buffer level to the maximum specified buffer size.
Yaowu Xuf883b422016-08-30 14:01:10 -0700230 rc->bits_off_target = AOMMIN(rc->bits_off_target, rc->maximum_buffer_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700231 rc->buffer_level = rc->bits_off_target;
232}
233
Yaowu Xuf883b422016-08-30 14:01:10 -0700234int av1_rc_get_default_min_gf_interval(int width, int height,
235 double framerate) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700236 // Assume we do not need any constraint lower than 4K 20 fps
237 static const double factor_safe = 3840 * 2160 * 20.0;
238 const double factor = width * height * framerate;
239 const int default_interval =
240 clamp((int)(framerate * 0.125), MIN_GF_INTERVAL, MAX_GF_INTERVAL);
241
242 if (factor <= factor_safe)
243 return default_interval;
244 else
Yaowu Xuf883b422016-08-30 14:01:10 -0700245 return AOMMAX(default_interval,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700246 (int)(MIN_GF_INTERVAL * factor / factor_safe + 0.5));
247 // Note this logic makes:
248 // 4K24: 5
249 // 4K30: 6
250 // 4K60: 12
251}
252
Yaowu Xuf883b422016-08-30 14:01:10 -0700253int av1_rc_get_default_max_gf_interval(double framerate, int min_gf_interval) {
254 int interval = AOMMIN(MAX_GF_INTERVAL, (int)(framerate * 0.75));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700255 interval += (interval & 0x01); // Round to even value
Yaowu Xuf883b422016-08-30 14:01:10 -0700256 return AOMMAX(interval, min_gf_interval);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700257}
258
Yaowu Xuf883b422016-08-30 14:01:10 -0700259void av1_rc_init(const AV1EncoderConfig *oxcf, int pass, RATE_CONTROL *rc) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700260 int i;
261
Yaowu Xuf883b422016-08-30 14:01:10 -0700262 if (pass == 0 && oxcf->rc_mode == AOM_CBR) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700263 rc->avg_frame_qindex[KEY_FRAME] = oxcf->worst_allowed_q;
264 rc->avg_frame_qindex[INTER_FRAME] = oxcf->worst_allowed_q;
265 } else {
266 rc->avg_frame_qindex[KEY_FRAME] =
267 (oxcf->worst_allowed_q + oxcf->best_allowed_q) / 2;
268 rc->avg_frame_qindex[INTER_FRAME] =
269 (oxcf->worst_allowed_q + oxcf->best_allowed_q) / 2;
270 }
271
272 rc->last_q[KEY_FRAME] = oxcf->best_allowed_q;
273 rc->last_q[INTER_FRAME] = oxcf->worst_allowed_q;
274
275 rc->buffer_level = rc->starting_buffer_level;
276 rc->bits_off_target = rc->starting_buffer_level;
277
278 rc->rolling_target_bits = rc->avg_frame_bandwidth;
279 rc->rolling_actual_bits = rc->avg_frame_bandwidth;
280 rc->long_rolling_target_bits = rc->avg_frame_bandwidth;
281 rc->long_rolling_actual_bits = rc->avg_frame_bandwidth;
282
283 rc->total_actual_bits = 0;
284 rc->total_target_bits = 0;
285 rc->total_target_vs_actual = 0;
286
287 rc->frames_since_key = 8; // Sensible default for first frame.
288 rc->this_key_frame_forced = 0;
289 rc->next_key_frame_forced = 0;
290 rc->source_alt_ref_pending = 0;
291 rc->source_alt_ref_active = 0;
292
293 rc->frames_till_gf_update_due = 0;
294 rc->ni_av_qi = oxcf->worst_allowed_q;
295 rc->ni_tot_qi = 0;
296 rc->ni_frames = 0;
297
298 rc->tot_q = 0.0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700299 rc->avg_q = av1_convert_qindex_to_q(oxcf->worst_allowed_q, oxcf->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700300
301 for (i = 0; i < RATE_FACTOR_LEVELS; ++i) {
302 rc->rate_correction_factors[i] = 1.0;
303 }
304
305 rc->min_gf_interval = oxcf->min_gf_interval;
306 rc->max_gf_interval = oxcf->max_gf_interval;
307 if (rc->min_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -0700308 rc->min_gf_interval = av1_rc_get_default_min_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700309 oxcf->width, oxcf->height, oxcf->init_framerate);
310 if (rc->max_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -0700311 rc->max_gf_interval = av1_rc_get_default_max_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700312 oxcf->init_framerate, rc->min_gf_interval);
313 rc->baseline_gf_interval = (rc->min_gf_interval + rc->max_gf_interval) / 2;
314}
315
Yaowu Xuf883b422016-08-30 14:01:10 -0700316int av1_rc_drop_frame(AV1_COMP *cpi) {
317 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700318 RATE_CONTROL *const rc = &cpi->rc;
319
320 if (!oxcf->drop_frames_water_mark) {
321 return 0;
322 } else {
323 if (rc->buffer_level < 0) {
324 // Always drop if buffer is below 0.
325 return 1;
326 } else {
327 // If buffer is below drop_mark, for now just drop every other frame
328 // (starting with the next frame) until it increases back over drop_mark.
329 int drop_mark =
330 (int)(oxcf->drop_frames_water_mark * rc->optimal_buffer_level / 100);
331 if ((rc->buffer_level > drop_mark) && (rc->decimation_factor > 0)) {
332 --rc->decimation_factor;
333 } else if (rc->buffer_level <= drop_mark && rc->decimation_factor == 0) {
334 rc->decimation_factor = 1;
335 }
336 if (rc->decimation_factor > 0) {
337 if (rc->decimation_count > 0) {
338 --rc->decimation_count;
339 return 1;
340 } else {
341 rc->decimation_count = rc->decimation_factor;
342 return 0;
343 }
344 } else {
345 rc->decimation_count = 0;
346 return 0;
347 }
348 }
349 }
350}
351
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700352static double get_rate_correction_factor(const AV1_COMP *cpi, int width,
353 int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700354 const RATE_CONTROL *const rc = &cpi->rc;
355 double rcf;
356
357 if (cpi->common.frame_type == KEY_FRAME) {
358 rcf = rc->rate_correction_factors[KF_STD];
359 } else if (cpi->oxcf.pass == 2) {
360 RATE_FACTOR_LEVEL rf_lvl =
361 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index];
362 rcf = rc->rate_correction_factors[rf_lvl];
363 } else {
364 if ((cpi->refresh_alt_ref_frame || cpi->refresh_golden_frame) &&
365 !rc->is_src_frame_alt_ref &&
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000366 (cpi->oxcf.rc_mode != AOM_CBR || cpi->oxcf.gf_cbr_boost_pct > 20))
Yaowu Xuc27fc142016-08-22 16:08:15 -0700367 rcf = rc->rate_correction_factors[GF_ARF_STD];
368 else
369 rcf = rc->rate_correction_factors[INTER_NORMAL];
370 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700371 rcf *= resize_rate_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700372 return fclamp(rcf, MIN_BPB_FACTOR, MAX_BPB_FACTOR);
373}
374
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700375static void set_rate_correction_factor(AV1_COMP *cpi, double factor, int width,
376 int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700377 RATE_CONTROL *const rc = &cpi->rc;
378
379 // Normalize RCF to account for the size-dependent scaling factor.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700380 factor /= resize_rate_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700381
382 factor = fclamp(factor, MIN_BPB_FACTOR, MAX_BPB_FACTOR);
383
384 if (cpi->common.frame_type == KEY_FRAME) {
385 rc->rate_correction_factors[KF_STD] = factor;
386 } else if (cpi->oxcf.pass == 2) {
387 RATE_FACTOR_LEVEL rf_lvl =
388 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index];
389 rc->rate_correction_factors[rf_lvl] = factor;
390 } else {
391 if ((cpi->refresh_alt_ref_frame || cpi->refresh_golden_frame) &&
392 !rc->is_src_frame_alt_ref &&
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000393 (cpi->oxcf.rc_mode != AOM_CBR || cpi->oxcf.gf_cbr_boost_pct > 20))
Yaowu Xuc27fc142016-08-22 16:08:15 -0700394 rc->rate_correction_factors[GF_ARF_STD] = factor;
395 else
396 rc->rate_correction_factors[INTER_NORMAL] = factor;
397 }
398}
399
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700400void av1_rc_update_rate_correction_factors(AV1_COMP *cpi, int width,
401 int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700402 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700403 int correction_factor = 100;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700404 double rate_correction_factor =
405 get_rate_correction_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700406 double adjustment_limit;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700407 const int MBs = av1_get_MBs(width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700408
409 int projected_size_based_on_q = 0;
410
411 // Do not update the rate factors for arf overlay frames.
412 if (cpi->rc.is_src_frame_alt_ref) return;
413
414 // Clear down mmx registers to allow floating point in what follows
Yaowu Xuf883b422016-08-30 14:01:10 -0700415 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700416
417 // Work out how big we would have expected the frame to be at this Q given
418 // the current correction factor.
419 // Stay in double to avoid int overflow when values are large
420 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cpi->common.seg.enabled) {
421 projected_size_based_on_q =
Yaowu Xuf883b422016-08-30 14:01:10 -0700422 av1_cyclic_refresh_estimate_bits_at_q(cpi, rate_correction_factor);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700423 } else {
424 projected_size_based_on_q =
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700425 av1_estimate_bits_at_q(cpi->common.frame_type, cm->base_qindex, MBs,
Yaowu Xuf883b422016-08-30 14:01:10 -0700426 rate_correction_factor, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700427 }
428 // Work out a size correction factor.
429 if (projected_size_based_on_q > FRAME_OVERHEAD_BITS)
430 correction_factor = (int)((100 * (int64_t)cpi->rc.projected_frame_size) /
431 projected_size_based_on_q);
432
433 // More heavily damped adjustment used if we have been oscillating either side
434 // of target.
James Zernd2c42f02017-03-10 11:13:10 -0800435 if (correction_factor > 0) {
436 adjustment_limit =
437 0.25 + 0.5 * AOMMIN(1, fabs(log10(0.01 * correction_factor)));
438 } else {
439 adjustment_limit = 0.75;
440 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700441
442 cpi->rc.q_2_frame = cpi->rc.q_1_frame;
443 cpi->rc.q_1_frame = cm->base_qindex;
444 cpi->rc.rc_2_frame = cpi->rc.rc_1_frame;
445 if (correction_factor > 110)
446 cpi->rc.rc_1_frame = -1;
447 else if (correction_factor < 90)
448 cpi->rc.rc_1_frame = 1;
449 else
450 cpi->rc.rc_1_frame = 0;
451
452 if (correction_factor > 102) {
453 // We are not already at the worst allowable quality
454 correction_factor =
455 (int)(100 + ((correction_factor - 100) * adjustment_limit));
456 rate_correction_factor = (rate_correction_factor * correction_factor) / 100;
457 // Keep rate_correction_factor within limits
458 if (rate_correction_factor > MAX_BPB_FACTOR)
459 rate_correction_factor = MAX_BPB_FACTOR;
460 } else if (correction_factor < 99) {
461 // We are not already at the best allowable quality
462 correction_factor =
463 (int)(100 - ((100 - correction_factor) * adjustment_limit));
464 rate_correction_factor = (rate_correction_factor * correction_factor) / 100;
465
466 // Keep rate_correction_factor within limits
467 if (rate_correction_factor < MIN_BPB_FACTOR)
468 rate_correction_factor = MIN_BPB_FACTOR;
469 }
470
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700471 set_rate_correction_factor(cpi, rate_correction_factor, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700472}
473
Yaowu Xuf883b422016-08-30 14:01:10 -0700474int av1_rc_regulate_q(const AV1_COMP *cpi, int target_bits_per_frame,
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700475 int active_best_quality, int active_worst_quality,
476 int width, int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700477 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700478 int q = active_worst_quality;
479 int last_error = INT_MAX;
480 int i, target_bits_per_mb, bits_per_mb_at_this_q;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700481 const int MBs = av1_get_MBs(width, height);
482 const double correction_factor =
483 get_rate_correction_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700484
485 // Calculate required scaling factor based on target frame size and size of
486 // frame produced using previous Q.
487 target_bits_per_mb =
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700488 (int)((uint64_t)(target_bits_per_frame) << BPER_MB_NORMBITS) / MBs;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700489
490 i = active_best_quality;
491
492 do {
493 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cm->seg.enabled) {
494 bits_per_mb_at_this_q =
Yaowu Xuf883b422016-08-30 14:01:10 -0700495 (int)av1_cyclic_refresh_rc_bits_per_mb(cpi, i, correction_factor);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700496 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700497 bits_per_mb_at_this_q = (int)av1_rc_bits_per_mb(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700498 cm->frame_type, i, correction_factor, cm->bit_depth);
499 }
500
501 if (bits_per_mb_at_this_q <= target_bits_per_mb) {
502 if ((target_bits_per_mb - bits_per_mb_at_this_q) <= last_error)
503 q = i;
504 else
505 q = i - 1;
506
507 break;
508 } else {
509 last_error = bits_per_mb_at_this_q - target_bits_per_mb;
510 }
511 } while (++i <= active_worst_quality);
512
513 // In CBR mode, this makes sure q is between oscillating Qs to prevent
514 // resonance.
Yaowu Xuf883b422016-08-30 14:01:10 -0700515 if (cpi->oxcf.rc_mode == AOM_CBR &&
Yaowu Xuc27fc142016-08-22 16:08:15 -0700516 (cpi->rc.rc_1_frame * cpi->rc.rc_2_frame == -1) &&
517 cpi->rc.q_1_frame != cpi->rc.q_2_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700518 q = clamp(q, AOMMIN(cpi->rc.q_1_frame, cpi->rc.q_2_frame),
519 AOMMAX(cpi->rc.q_1_frame, cpi->rc.q_2_frame));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700520 }
521 return q;
522}
523
524static int get_active_quality(int q, int gfu_boost, int low, int high,
525 int *low_motion_minq, int *high_motion_minq) {
526 if (gfu_boost > high) {
527 return low_motion_minq[q];
528 } else if (gfu_boost < low) {
529 return high_motion_minq[q];
530 } else {
531 const int gap = high - low;
532 const int offset = high - gfu_boost;
533 const int qdiff = high_motion_minq[q] - low_motion_minq[q];
534 const int adjustment = ((offset * qdiff) + (gap >> 1)) / gap;
535 return low_motion_minq[q] + adjustment;
536 }
537}
538
539static int get_kf_active_quality(const RATE_CONTROL *const rc, int q,
Yaowu Xuf883b422016-08-30 14:01:10 -0700540 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700541 int *kf_low_motion_minq;
542 int *kf_high_motion_minq;
543 ASSIGN_MINQ_TABLE(bit_depth, kf_low_motion_minq);
544 ASSIGN_MINQ_TABLE(bit_depth, kf_high_motion_minq);
545 return get_active_quality(q, rc->kf_boost, kf_low, kf_high,
546 kf_low_motion_minq, kf_high_motion_minq);
547}
548
549static int get_gf_active_quality(const RATE_CONTROL *const rc, int q,
Yaowu Xuf883b422016-08-30 14:01:10 -0700550 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700551 int *arfgf_low_motion_minq;
552 int *arfgf_high_motion_minq;
553 ASSIGN_MINQ_TABLE(bit_depth, arfgf_low_motion_minq);
554 ASSIGN_MINQ_TABLE(bit_depth, arfgf_high_motion_minq);
555 return get_active_quality(q, rc->gfu_boost, gf_low, gf_high,
556 arfgf_low_motion_minq, arfgf_high_motion_minq);
557}
558
Yaowu Xuf883b422016-08-30 14:01:10 -0700559static int calc_active_worst_quality_one_pass_vbr(const AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700560 const RATE_CONTROL *const rc = &cpi->rc;
561 const unsigned int curr_frame = cpi->common.current_video_frame;
562 int active_worst_quality;
563
564 if (cpi->common.frame_type == KEY_FRAME) {
565 active_worst_quality =
566 curr_frame == 0 ? rc->worst_quality : rc->last_q[KEY_FRAME] * 2;
567 } else {
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200568 if (!rc->is_src_frame_alt_ref &&
569 (cpi->refresh_golden_frame || cpi->refresh_alt2_ref_frame ||
570 cpi->refresh_alt_ref_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700571 active_worst_quality = curr_frame == 1 ? rc->last_q[KEY_FRAME] * 5 / 4
572 : rc->last_q[INTER_FRAME];
573 } else {
574 active_worst_quality = curr_frame == 1 ? rc->last_q[KEY_FRAME] * 2
575 : rc->last_q[INTER_FRAME] * 2;
576 }
577 }
Yaowu Xuf883b422016-08-30 14:01:10 -0700578 return AOMMIN(active_worst_quality, rc->worst_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700579}
580
581// Adjust active_worst_quality level based on buffer level.
Yaowu Xuf883b422016-08-30 14:01:10 -0700582static int calc_active_worst_quality_one_pass_cbr(const AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700583 // Adjust active_worst_quality: If buffer is above the optimal/target level,
584 // bring active_worst_quality down depending on fullness of buffer.
585 // If buffer is below the optimal level, let the active_worst_quality go from
586 // ambient Q (at buffer = optimal level) to worst_quality level
587 // (at buffer = critical level).
Yaowu Xuf883b422016-08-30 14:01:10 -0700588 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700589 const RATE_CONTROL *rc = &cpi->rc;
590 // Buffer level below which we push active_worst to worst_quality.
591 int64_t critical_level = rc->optimal_buffer_level >> 3;
592 int64_t buff_lvl_step = 0;
593 int adjustment = 0;
594 int active_worst_quality;
595 int ambient_qp;
596 if (cm->frame_type == KEY_FRAME) return rc->worst_quality;
597 // For ambient_qp we use minimum of avg_frame_qindex[KEY_FRAME/INTER_FRAME]
598 // for the first few frames following key frame. These are both initialized
599 // to worst_quality and updated with (3/4, 1/4) average in postencode_update.
600 // So for first few frames following key, the qp of that key frame is weighted
601 // into the active_worst_quality setting.
602 ambient_qp = (cm->current_video_frame < 5)
Yaowu Xuf883b422016-08-30 14:01:10 -0700603 ? AOMMIN(rc->avg_frame_qindex[INTER_FRAME],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700604 rc->avg_frame_qindex[KEY_FRAME])
605 : rc->avg_frame_qindex[INTER_FRAME];
Yaowu Xuf883b422016-08-30 14:01:10 -0700606 active_worst_quality = AOMMIN(rc->worst_quality, ambient_qp * 5 / 4);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700607 if (rc->buffer_level > rc->optimal_buffer_level) {
608 // Adjust down.
609 // Maximum limit for down adjustment, ~30%.
610 int max_adjustment_down = active_worst_quality / 3;
611 if (max_adjustment_down) {
612 buff_lvl_step = ((rc->maximum_buffer_size - rc->optimal_buffer_level) /
613 max_adjustment_down);
614 if (buff_lvl_step)
615 adjustment = (int)((rc->buffer_level - rc->optimal_buffer_level) /
616 buff_lvl_step);
617 active_worst_quality -= adjustment;
618 }
619 } else if (rc->buffer_level > critical_level) {
620 // Adjust up from ambient Q.
621 if (critical_level) {
622 buff_lvl_step = (rc->optimal_buffer_level - critical_level);
623 if (buff_lvl_step) {
624 adjustment = (int)((rc->worst_quality - ambient_qp) *
625 (rc->optimal_buffer_level - rc->buffer_level) /
626 buff_lvl_step);
627 }
628 active_worst_quality = ambient_qp + adjustment;
629 }
630 } else {
631 // Set to worst_quality if buffer is below critical level.
632 active_worst_quality = rc->worst_quality;
633 }
634 return active_worst_quality;
635}
636
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700637static int rc_pick_q_and_bounds_one_pass_cbr(const AV1_COMP *cpi, int width,
638 int height, int *bottom_index,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700639 int *top_index) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700640 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700641 const RATE_CONTROL *const rc = &cpi->rc;
642 int active_best_quality;
643 int active_worst_quality = calc_active_worst_quality_one_pass_cbr(cpi);
644 int q;
645 int *rtc_minq;
646 ASSIGN_MINQ_TABLE(cm->bit_depth, rtc_minq);
647
648 if (frame_is_intra_only(cm)) {
649 active_best_quality = rc->best_quality;
650 // Handle the special case for key frames forced when we have reached
651 // the maximum key frame interval. Here force the Q to a range
652 // based on the ambient Q to reduce the risk of popping.
653 if (rc->this_key_frame_forced) {
654 int qindex = rc->last_boosted_qindex;
Yaowu Xuf883b422016-08-30 14:01:10 -0700655 double last_boosted_q = av1_convert_qindex_to_q(qindex, cm->bit_depth);
656 int delta_qindex = av1_compute_qdelta(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700657 rc, last_boosted_q, (last_boosted_q * 0.75), cm->bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700658 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700659 } else if (cm->current_video_frame > 0) {
660 // not first frame of one pass and kf_boost is set
661 double q_adj_factor = 1.0;
662 double q_val;
663
664 active_best_quality = get_kf_active_quality(
665 rc, rc->avg_frame_qindex[KEY_FRAME], cm->bit_depth);
666
667 // Allow somewhat lower kf minq with small image formats.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700668 if ((width * height) <= (352 * 288)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700669 q_adj_factor -= 0.25;
670 }
671
672 // Convert the adjustment factor to a qindex delta
673 // on active_best_quality.
Yaowu Xuf883b422016-08-30 14:01:10 -0700674 q_val = av1_convert_qindex_to_q(active_best_quality, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700675 active_best_quality +=
Yaowu Xuf883b422016-08-30 14:01:10 -0700676 av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700677 }
678 } else if (!rc->is_src_frame_alt_ref &&
679 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) {
680 // Use the lower of active_worst_quality and recent
681 // average Q as basis for GF/ARF best Q limit unless last frame was
682 // a key frame.
683 if (rc->frames_since_key > 1 &&
684 rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) {
685 q = rc->avg_frame_qindex[INTER_FRAME];
686 } else {
687 q = active_worst_quality;
688 }
689 active_best_quality = get_gf_active_quality(rc, q, cm->bit_depth);
690 } else {
691 // Use the lower of active_worst_quality and recent/average Q.
692 if (cm->current_video_frame > 1) {
693 if (rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality)
694 active_best_quality = rtc_minq[rc->avg_frame_qindex[INTER_FRAME]];
695 else
696 active_best_quality = rtc_minq[active_worst_quality];
697 } else {
698 if (rc->avg_frame_qindex[KEY_FRAME] < active_worst_quality)
699 active_best_quality = rtc_minq[rc->avg_frame_qindex[KEY_FRAME]];
700 else
701 active_best_quality = rtc_minq[active_worst_quality];
702 }
703 }
704
705 // Clip the active best and worst quality values to limits
706 active_best_quality =
707 clamp(active_best_quality, rc->best_quality, rc->worst_quality);
708 active_worst_quality =
709 clamp(active_worst_quality, active_best_quality, rc->worst_quality);
710
711 *top_index = active_worst_quality;
712 *bottom_index = active_best_quality;
713
714 // Limit Q range for the adaptive loop.
715 if (cm->frame_type == KEY_FRAME && !rc->this_key_frame_forced &&
716 !(cm->current_video_frame == 0)) {
717 int qdelta = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700718 aom_clear_system_state();
719 qdelta = av1_compute_qdelta_by_rate(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700720 &cpi->rc, cm->frame_type, active_worst_quality, 2.0, cm->bit_depth);
721 *top_index = active_worst_quality + qdelta;
Yaowu Xuf883b422016-08-30 14:01:10 -0700722 *top_index = AOMMAX(*top_index, *bottom_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700723 }
724
725 // Special case code to try and match quality with forced key frames
726 if (cm->frame_type == KEY_FRAME && rc->this_key_frame_forced) {
727 q = rc->last_boosted_qindex;
728 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700729 q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700730 active_worst_quality, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700731 if (q > *top_index) {
732 // Special case when we are targeting the max allowed rate
733 if (rc->this_frame_target >= rc->max_frame_bandwidth)
734 *top_index = q;
735 else
736 q = *top_index;
737 }
738 }
739
740 assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality);
741 assert(*bottom_index <= rc->worst_quality &&
742 *bottom_index >= rc->best_quality);
743 assert(q <= rc->worst_quality && q >= rc->best_quality);
744 return q;
745}
746
747static int get_active_cq_level(const RATE_CONTROL *rc,
Yaowu Xuf883b422016-08-30 14:01:10 -0700748 const AV1EncoderConfig *const oxcf) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700749 static const double cq_adjust_threshold = 0.1;
750 int active_cq_level = oxcf->cq_level;
Yaowu Xuf883b422016-08-30 14:01:10 -0700751 if (oxcf->rc_mode == AOM_CQ && rc->total_target_bits > 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700752 const double x = (double)rc->total_actual_bits / rc->total_target_bits;
753 if (x < cq_adjust_threshold) {
754 active_cq_level = (int)(active_cq_level * x / cq_adjust_threshold);
755 }
756 }
757 return active_cq_level;
758}
759
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700760static int rc_pick_q_and_bounds_one_pass_vbr(const AV1_COMP *cpi, int width,
761 int height, int *bottom_index,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700762 int *top_index) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700763 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700764 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700765 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700766 const int cq_level = get_active_cq_level(rc, oxcf);
767 int active_best_quality;
768 int active_worst_quality = calc_active_worst_quality_one_pass_vbr(cpi);
769 int q;
770 int *inter_minq;
771 ASSIGN_MINQ_TABLE(cm->bit_depth, inter_minq);
772
773 if (frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700774 if (oxcf->rc_mode == AOM_Q) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700775 const int qindex = cq_level;
776 const double q_val = av1_convert_qindex_to_q(qindex, cm->bit_depth);
777 const int delta_qindex =
778 av1_compute_qdelta(rc, q_val, q_val * 0.25, cm->bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700779 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700780 } else if (rc->this_key_frame_forced) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700781 const int qindex = rc->last_boosted_qindex;
782 const double last_boosted_q =
783 av1_convert_qindex_to_q(qindex, cm->bit_depth);
784 const int delta_qindex = av1_compute_qdelta(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700785 rc, last_boosted_q, last_boosted_q * 0.75, cm->bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700786 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Urvang Joshi454280d2016-10-14 16:51:44 -0700787 } else { // not first frame of one pass and kf_boost is set
Yaowu Xuc27fc142016-08-22 16:08:15 -0700788 double q_adj_factor = 1.0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700789
790 active_best_quality = get_kf_active_quality(
791 rc, rc->avg_frame_qindex[KEY_FRAME], cm->bit_depth);
792
793 // Allow somewhat lower kf minq with small image formats.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700794 if ((width * height) <= (352 * 288)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700795 q_adj_factor -= 0.25;
796 }
797
Urvang Joshi454280d2016-10-14 16:51:44 -0700798 // Convert the adjustment factor to a qindex delta on active_best_quality.
799 {
800 const double q_val =
801 av1_convert_qindex_to_q(active_best_quality, cm->bit_depth);
802 active_best_quality +=
803 av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, cm->bit_depth);
804 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700805 }
806 } else if (!rc->is_src_frame_alt_ref &&
807 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) {
808 // Use the lower of active_worst_quality and recent
809 // average Q as basis for GF/ARF best Q limit unless last frame was
810 // a key frame.
Urvang Joshi454280d2016-10-14 16:51:44 -0700811 q = (rc->frames_since_key > 1 &&
812 rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality)
813 ? rc->avg_frame_qindex[INTER_FRAME]
814 : rc->avg_frame_qindex[KEY_FRAME];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700815 // For constrained quality dont allow Q less than the cq level
Yaowu Xuf883b422016-08-30 14:01:10 -0700816 if (oxcf->rc_mode == AOM_CQ) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700817 if (q < cq_level) q = cq_level;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700818 active_best_quality = get_gf_active_quality(rc, q, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700819 // Constrained quality use slightly lower active best.
820 active_best_quality = active_best_quality * 15 / 16;
Yaowu Xuf883b422016-08-30 14:01:10 -0700821 } else if (oxcf->rc_mode == AOM_Q) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700822 const int qindex = cq_level;
823 const double q_val = av1_convert_qindex_to_q(qindex, cm->bit_depth);
824 const int delta_qindex =
825 (cpi->refresh_alt_ref_frame)
826 ? av1_compute_qdelta(rc, q_val, q_val * 0.40, cm->bit_depth)
827 : av1_compute_qdelta(rc, q_val, q_val * 0.50, cm->bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700828 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700829 } else {
830 active_best_quality = get_gf_active_quality(rc, q, cm->bit_depth);
831 }
832 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700833 if (oxcf->rc_mode == AOM_Q) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700834 const int qindex = cq_level;
835 const double q_val = av1_convert_qindex_to_q(qindex, cm->bit_depth);
836 const double delta_rate[FIXED_GF_INTERVAL] = { 0.50, 1.0, 0.85, 1.0,
837 0.70, 1.0, 0.85, 1.0 };
838 const int delta_qindex = av1_compute_qdelta(
839 rc, q_val,
840 q_val * delta_rate[cm->current_video_frame % FIXED_GF_INTERVAL],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700841 cm->bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700842 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700843 } else {
844 // Use the lower of active_worst_quality and recent/average Q.
Urvang Joshi454280d2016-10-14 16:51:44 -0700845 active_best_quality = (cm->current_video_frame > 1)
846 ? inter_minq[rc->avg_frame_qindex[INTER_FRAME]]
847 : inter_minq[rc->avg_frame_qindex[KEY_FRAME]];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700848 // For the constrained quality mode we don't want
849 // q to fall below the cq level.
Yaowu Xuf883b422016-08-30 14:01:10 -0700850 if ((oxcf->rc_mode == AOM_CQ) && (active_best_quality < cq_level)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700851 active_best_quality = cq_level;
852 }
853 }
854 }
855
856 // Clip the active best and worst quality values to limits
857 active_best_quality =
858 clamp(active_best_quality, rc->best_quality, rc->worst_quality);
859 active_worst_quality =
860 clamp(active_worst_quality, active_best_quality, rc->worst_quality);
861
862 *top_index = active_worst_quality;
863 *bottom_index = active_best_quality;
864
865 // Limit Q range for the adaptive loop.
866 {
867 int qdelta = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700868 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700869 if (cm->frame_type == KEY_FRAME && !rc->this_key_frame_forced &&
870 !(cm->current_video_frame == 0)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700871 qdelta = av1_compute_qdelta_by_rate(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700872 &cpi->rc, cm->frame_type, active_worst_quality, 2.0, cm->bit_depth);
873 } else if (!rc->is_src_frame_alt_ref &&
874 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700875 qdelta = av1_compute_qdelta_by_rate(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700876 &cpi->rc, cm->frame_type, active_worst_quality, 1.75, cm->bit_depth);
877 }
878 *top_index = active_worst_quality + qdelta;
Yaowu Xuf883b422016-08-30 14:01:10 -0700879 *top_index = AOMMAX(*top_index, *bottom_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700880 }
881
Yaowu Xuf883b422016-08-30 14:01:10 -0700882 if (oxcf->rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700883 q = active_best_quality;
884 // Special case code to try and match quality with forced key frames
885 } else if ((cm->frame_type == KEY_FRAME) && rc->this_key_frame_forced) {
886 q = rc->last_boosted_qindex;
887 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700888 q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700889 active_worst_quality, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700890 if (q > *top_index) {
891 // Special case when we are targeting the max allowed rate
892 if (rc->this_frame_target >= rc->max_frame_bandwidth)
893 *top_index = q;
894 else
895 q = *top_index;
896 }
897 }
898
899 assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality);
900 assert(*bottom_index <= rc->worst_quality &&
901 *bottom_index >= rc->best_quality);
902 assert(q <= rc->worst_quality && q >= rc->best_quality);
903 return q;
904}
905
Yaowu Xuf883b422016-08-30 14:01:10 -0700906int av1_frame_type_qdelta(const AV1_COMP *cpi, int rf_level, int q) {
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200907 static const FRAME_TYPE frame_type[RATE_FACTOR_LEVELS] = {
908 INTER_FRAME, INTER_FRAME, INTER_FRAME, INTER_FRAME, INTER_FRAME, KEY_FRAME
909 };
Yaowu Xuf883b422016-08-30 14:01:10 -0700910 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700911 int qdelta =
Yaowu Xuf883b422016-08-30 14:01:10 -0700912 av1_compute_qdelta_by_rate(&cpi->rc, frame_type[rf_level], q,
913 rate_factor_deltas[rf_level], cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700914 return qdelta;
915}
916
917#define STATIC_MOTION_THRESH 95
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700918static int rc_pick_q_and_bounds_two_pass(const AV1_COMP *cpi, int width,
919 int height, int *bottom_index,
Yaowu Xuf883b422016-08-30 14:01:10 -0700920 int *top_index) {
921 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700922 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700923 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700924 const GF_GROUP *gf_group = &cpi->twopass.gf_group;
925 const int cq_level = get_active_cq_level(rc, oxcf);
926 int active_best_quality;
927 int active_worst_quality = cpi->twopass.active_worst_quality;
928 int q;
929 int *inter_minq;
930 ASSIGN_MINQ_TABLE(cm->bit_depth, inter_minq);
931
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700932#if CUSTOMIZED_GF
Wei-Ting Lin240d9b42018-07-12 11:48:02 -0700933 const int is_intrl_arf_boost =
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700934 gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE;
935#else
936 const int is_intrl_arf_boost = cpi->refresh_alt2_ref_frame;
Wei-Ting Lincffe49d2018-07-10 14:15:46 -0700937#endif // CUSTOMIZED_GF
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700938
Yaowu Xuc27fc142016-08-22 16:08:15 -0700939 if (frame_is_intra_only(cm)) {
940 // Handle the special case for key frames forced when we have reached
941 // the maximum key frame interval. Here force the Q to a range
942 // based on the ambient Q to reduce the risk of popping.
943 if (rc->this_key_frame_forced) {
944 double last_boosted_q;
945 int delta_qindex;
946 int qindex;
947
948 if (cpi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700949 qindex = AOMMIN(rc->last_kf_qindex, rc->last_boosted_qindex);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700950 active_best_quality = qindex;
Yaowu Xuf883b422016-08-30 14:01:10 -0700951 last_boosted_q = av1_convert_qindex_to_q(qindex, cm->bit_depth);
952 delta_qindex = av1_compute_qdelta(rc, last_boosted_q,
953 last_boosted_q * 1.25, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700954 active_worst_quality =
Yaowu Xuf883b422016-08-30 14:01:10 -0700955 AOMMIN(qindex + delta_qindex, active_worst_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700956 } else {
957 qindex = rc->last_boosted_qindex;
Yaowu Xuf883b422016-08-30 14:01:10 -0700958 last_boosted_q = av1_convert_qindex_to_q(qindex, cm->bit_depth);
959 delta_qindex = av1_compute_qdelta(rc, last_boosted_q,
960 last_boosted_q * 0.75, cm->bit_depth);
961 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700962 }
963 } else {
964 // Not forced keyframe.
965 double q_adj_factor = 1.0;
966 double q_val;
967
968 // Baseline value derived from cpi->active_worst_quality and kf boost.
969 active_best_quality =
970 get_kf_active_quality(rc, active_worst_quality, cm->bit_depth);
971
972 // Allow somewhat lower kf minq with small image formats.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700973 if ((width * height) <= (352 * 288)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700974 q_adj_factor -= 0.25;
975 }
976
977 // Make a further adjustment based on the kf zero motion measure.
978 q_adj_factor += 0.05 - (0.001 * (double)cpi->twopass.kf_zeromotion_pct);
979
980 // Convert the adjustment factor to a qindex delta
981 // on active_best_quality.
Yaowu Xuf883b422016-08-30 14:01:10 -0700982 q_val = av1_convert_qindex_to_q(active_best_quality, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700983 active_best_quality +=
Yaowu Xuf883b422016-08-30 14:01:10 -0700984 av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700985 }
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200986 } else if (!rc->is_src_frame_alt_ref &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700987 (cpi->refresh_golden_frame || is_intrl_arf_boost ||
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200988 cpi->refresh_alt_ref_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700989 // Use the lower of active_worst_quality and recent
990 // average Q as basis for GF/ARF best Q limit unless last frame was
991 // a key frame.
992 if (rc->frames_since_key > 1 &&
993 rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) {
994 q = rc->avg_frame_qindex[INTER_FRAME];
995 } else {
996 q = active_worst_quality;
997 }
998 // For constrained quality dont allow Q less than the cq level
Yaowu Xuf883b422016-08-30 14:01:10 -0700999 if (oxcf->rc_mode == AOM_CQ) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001000 if (q < cq_level) q = cq_level;
1001
1002 active_best_quality = get_gf_active_quality(rc, q, cm->bit_depth);
1003
1004 // Constrained quality use slightly lower active best.
1005 active_best_quality = active_best_quality * 15 / 16;
1006
Yaowu Xuf883b422016-08-30 14:01:10 -07001007 } else if (oxcf->rc_mode == AOM_Q) {
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001008 if (!cpi->refresh_alt_ref_frame && !is_intrl_arf_boost) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001009 active_best_quality = cq_level;
1010 } else {
1011 active_best_quality = get_gf_active_quality(rc, q, cm->bit_depth);
Wei-Ting Lin050fb022018-07-13 11:45:05 -07001012#if USE_SYMM_MULTI_LAYER
1013 if (cpi->new_bwdref_update_rule && is_intrl_arf_boost) {
1014 int this_height = gf_group->pyramid_level[gf_group->index];
1015 while (this_height < gf_group->pyramid_height) {
1016 active_best_quality = (active_best_quality + cq_level + 1) / 2;
1017 ++this_height;
1018 }
1019 } else {
1020#endif
1021 // Modify best quality for second level arfs. For mode AOM_Q this
1022 // becomes the baseline frame q.
1023 if (gf_group->rf_level[gf_group->index] == GF_ARF_LOW)
1024 active_best_quality = (active_best_quality + cq_level + 1) / 2;
1025#if USE_SYMM_MULTI_LAYER
1026 }
1027#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001028 }
1029 } else {
1030 active_best_quality = get_gf_active_quality(rc, q, cm->bit_depth);
Wei-Ting Lin050fb022018-07-13 11:45:05 -07001031#if USE_SYMM_MULTI_LAYER
1032 if (cpi->new_bwdref_update_rule && is_intrl_arf_boost) {
1033 int this_height = gf_group->pyramid_level[gf_group->index];
1034 while (this_height < gf_group->pyramid_height) {
1035 active_best_quality =
1036 (active_best_quality + active_worst_quality + 1) / 2;
1037 ++this_height;
1038 }
1039 }
1040#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001041 }
1042 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001043 if (oxcf->rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001044 active_best_quality = cq_level;
1045 } else {
1046 active_best_quality = inter_minq[active_worst_quality];
1047
1048 // For the constrained quality mode we don't want
1049 // q to fall below the cq level.
Yaowu Xuf883b422016-08-30 14:01:10 -07001050 if ((oxcf->rc_mode == AOM_CQ) && (active_best_quality < cq_level)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001051 active_best_quality = cq_level;
1052 }
1053 }
1054 }
1055
1056 // Extension to max or min Q if undershoot or overshoot is outside
1057 // the permitted range.
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +00001058 if ((cpi->oxcf.rc_mode != AOM_Q) &&
1059 (cpi->twopass.gf_zeromotion_pct < VLOW_MOTION_THRESHOLD)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001060 if (frame_is_intra_only(cm) ||
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02001061 (!rc->is_src_frame_alt_ref &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001062 (cpi->refresh_golden_frame || is_intrl_arf_boost ||
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02001063 cpi->refresh_alt_ref_frame))) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001064 active_best_quality -=
1065 (cpi->twopass.extend_minq + cpi->twopass.extend_minq_fast);
1066 active_worst_quality += (cpi->twopass.extend_maxq / 2);
1067 } else {
1068 active_best_quality -=
1069 (cpi->twopass.extend_minq + cpi->twopass.extend_minq_fast) / 2;
1070 active_worst_quality += cpi->twopass.extend_maxq;
1071 }
1072 }
1073
Yaowu Xuf883b422016-08-30 14:01:10 -07001074 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07001075 // Static forced key frames Q restrictions dealt with elsewhere.
1076 if (!(frame_is_intra_only(cm)) || !rc->this_key_frame_forced ||
1077 (cpi->twopass.last_kfgroup_zeromotion_pct < STATIC_MOTION_THRESH)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001078 int qdelta = av1_frame_type_qdelta(cpi, gf_group->rf_level[gf_group->index],
1079 active_worst_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001080 active_worst_quality =
Yaowu Xuf883b422016-08-30 14:01:10 -07001081 AOMMAX(active_worst_quality + qdelta, active_best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001082 }
1083
1084 // Modify active_best_quality for downscaled normal frames.
Cheng Chen09c83a52018-06-05 12:27:36 -07001085 if (av1_frame_scaled(cm) && !frame_is_kf_gf_arf(cpi)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001086 int qdelta = av1_compute_qdelta_by_rate(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001087 rc, cm->frame_type, active_best_quality, 2.0, cm->bit_depth);
1088 active_best_quality =
Yaowu Xuf883b422016-08-30 14:01:10 -07001089 AOMMAX(active_best_quality + qdelta, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001090 }
1091
1092 active_best_quality =
1093 clamp(active_best_quality, rc->best_quality, rc->worst_quality);
1094 active_worst_quality =
1095 clamp(active_worst_quality, active_best_quality, rc->worst_quality);
1096
Yaowu Xuf883b422016-08-30 14:01:10 -07001097 if (oxcf->rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001098 q = active_best_quality;
1099 // Special case code to try and match quality with forced key frames.
1100 } else if (frame_is_intra_only(cm) && rc->this_key_frame_forced) {
1101 // If static since last kf use better of last boosted and last kf q.
1102 if (cpi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001103 q = AOMMIN(rc->last_kf_qindex, rc->last_boosted_qindex);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001104 } else {
1105 q = rc->last_boosted_qindex;
1106 }
1107 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001108 q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001109 active_worst_quality, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001110 if (q > active_worst_quality) {
1111 // Special case when we are targeting the max allowed rate.
1112 if (rc->this_frame_target >= rc->max_frame_bandwidth)
1113 active_worst_quality = q;
1114 else
1115 q = active_worst_quality;
1116 }
1117 }
1118 clamp(q, active_best_quality, active_worst_quality);
1119
1120 *top_index = active_worst_quality;
1121 *bottom_index = active_best_quality;
1122
1123 assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality);
1124 assert(*bottom_index <= rc->worst_quality &&
1125 *bottom_index >= rc->best_quality);
1126 assert(q <= rc->worst_quality && q >= rc->best_quality);
1127 return q;
1128}
1129
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001130int av1_rc_pick_q_and_bounds(const AV1_COMP *cpi, int width, int height,
1131 int *bottom_index, int *top_index) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001132 int q;
1133 if (cpi->oxcf.pass == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001134 if (cpi->oxcf.rc_mode == AOM_CBR)
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001135 q = rc_pick_q_and_bounds_one_pass_cbr(cpi, width, height, bottom_index,
1136 top_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001137 else
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001138 q = rc_pick_q_and_bounds_one_pass_vbr(cpi, width, height, bottom_index,
1139 top_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001140 } else {
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001141 q = rc_pick_q_and_bounds_two_pass(cpi, width, height, bottom_index,
1142 top_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001143 }
1144
1145 return q;
1146}
1147
Yaowu Xuf883b422016-08-30 14:01:10 -07001148void av1_rc_compute_frame_size_bounds(const AV1_COMP *cpi, int frame_target,
1149 int *frame_under_shoot_limit,
1150 int *frame_over_shoot_limit) {
1151 if (cpi->oxcf.rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001152 *frame_under_shoot_limit = 0;
1153 *frame_over_shoot_limit = INT_MAX;
1154 } else {
1155 // For very small rate targets where the fractional adjustment
1156 // may be tiny make sure there is at least a minimum range.
1157 const int tolerance = (cpi->sf.recode_tolerance * frame_target) / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -07001158 *frame_under_shoot_limit = AOMMAX(frame_target - tolerance - 200, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001159 *frame_over_shoot_limit =
Yaowu Xuf883b422016-08-30 14:01:10 -07001160 AOMMIN(frame_target + tolerance + 200, cpi->rc.max_frame_bandwidth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001161 }
1162}
1163
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001164static void rc_set_frame_target(AV1_COMP *cpi, int target, int width,
1165 int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001166 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001167 RATE_CONTROL *const rc = &cpi->rc;
1168
1169 rc->this_frame_target = target;
1170
Fergus Simpsonfecb2ab2017-04-30 15:49:57 -07001171 // Modify frame size target when down-scaled.
Cheng Chen09c83a52018-06-05 12:27:36 -07001172 if (av1_frame_scaled(cm))
Fergus Simpsonddc846e2017-04-24 18:09:13 -07001173 rc->this_frame_target =
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001174 (int)(rc->this_frame_target * resize_rate_factor(cpi, width, height));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001175
1176 // Target rate per SB64 (including partial SB64s.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001177 rc->sb64_target_rate =
1178 (int)((int64_t)rc->this_frame_target * 64 * 64) / (width * height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001179}
1180
Yaowu Xuf883b422016-08-30 14:01:10 -07001181static void update_alt_ref_frame_stats(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001182 // this frame refreshes means next frames don't unless specified by user
1183 RATE_CONTROL *const rc = &cpi->rc;
1184 rc->frames_since_golden = 0;
1185
1186 // Mark the alt ref as done (setting to 0 means no further alt refs pending).
1187 rc->source_alt_ref_pending = 0;
1188
1189 // Set the alternate reference frame active flag
1190 rc->source_alt_ref_active = 1;
1191}
1192
Yaowu Xuf883b422016-08-30 14:01:10 -07001193static void update_golden_frame_stats(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001194 RATE_CONTROL *const rc = &cpi->rc;
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001195#if CUSTOMIZED_GF
1196 const TWO_PASS *const twopass = &cpi->twopass;
1197 const GF_GROUP *const gf_group = &twopass->gf_group;
1198 const int is_intrnl_arf =
1199 cpi->oxcf.pass == 2
1200 ? gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE
1201 : cpi->refresh_alt2_ref_frame;
1202#else
1203 const int is_intnl_arf = cpi->refresh_alt2_ref_frame;
1204#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001205
Yaowu Xuc27fc142016-08-22 16:08:15 -07001206 // Update the Golden frame usage counts.
1207 // NOTE(weitinglin): If we use show_existing_frame for an OVERLAY frame,
1208 // only the virtual indices for the reference frame will be
1209 // updated and cpi->refresh_golden_frame will still be zero.
1210 if (cpi->refresh_golden_frame || rc->is_src_frame_alt_ref) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001211 // We will not use internal overlay frames to replace the golden frame
1212 if (!rc->is_src_frame_ext_arf)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001213 // this frame refreshes means next frames don't unless specified by user
1214 rc->frames_since_golden = 0;
1215
1216 // If we are not using alt ref in the up and coming group clear the arf
1217 // active flag. In multi arf group case, if the index is not 0 then
1218 // we are overlaying a mid group arf so should not reset the flag.
1219 if (cpi->oxcf.pass == 2) {
1220 if (!rc->source_alt_ref_pending && (cpi->twopass.gf_group.index == 0))
1221 rc->source_alt_ref_active = 0;
1222 } else if (!rc->source_alt_ref_pending) {
1223 rc->source_alt_ref_active = 0;
1224 }
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001225 } else if (!cpi->refresh_alt_ref_frame && !is_intrnl_arf) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001226 rc->frames_since_golden++;
1227 }
1228}
1229
Yaowu Xuf883b422016-08-30 14:01:10 -07001230void av1_rc_postencode_update(AV1_COMP *cpi, uint64_t bytes_used) {
1231 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001232 RATE_CONTROL *const rc = &cpi->rc;
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001233#if CUSTOMIZED_GF
1234 const TWO_PASS *const twopass = &cpi->twopass;
1235 const GF_GROUP *const gf_group = &twopass->gf_group;
1236 const int is_intrnl_arf =
1237 cpi->oxcf.pass == 2
1238 ? gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE
1239 : cpi->refresh_alt2_ref_frame;
1240#else
1241 const int is_intrnl_arf = cpi->refresh_alt2_ref_frame;
1242#endif
1243
Yaowu Xuc27fc142016-08-22 16:08:15 -07001244 const int qindex = cm->base_qindex;
1245
1246 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cm->seg.enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001247 av1_cyclic_refresh_postencode(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001248 }
1249
1250 // Update rate control heuristics
1251 rc->projected_frame_size = (int)(bytes_used << 3);
1252
1253 // Post encode loop adjustment of Q prediction.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001254 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001255
1256 // Keep a record of last Q and ambient average Q.
1257 if (cm->frame_type == KEY_FRAME) {
1258 rc->last_q[KEY_FRAME] = qindex;
1259 rc->avg_frame_qindex[KEY_FRAME] =
1260 ROUND_POWER_OF_TWO(3 * rc->avg_frame_qindex[KEY_FRAME] + qindex, 2);
1261 } else {
1262 if (!rc->is_src_frame_alt_ref &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001263 !(cpi->refresh_golden_frame || is_intrnl_arf ||
Zoe Liue9b15e22017-07-19 15:53:01 -07001264 cpi->refresh_alt_ref_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001265 rc->last_q[INTER_FRAME] = qindex;
1266 rc->avg_frame_qindex[INTER_FRAME] =
1267 ROUND_POWER_OF_TWO(3 * rc->avg_frame_qindex[INTER_FRAME] + qindex, 2);
1268 rc->ni_frames++;
Yaowu Xuf883b422016-08-30 14:01:10 -07001269 rc->tot_q += av1_convert_qindex_to_q(qindex, cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001270 rc->avg_q = rc->tot_q / rc->ni_frames;
1271 // Calculate the average Q for normal inter frames (not key or GFU
1272 // frames).
1273 rc->ni_tot_qi += qindex;
1274 rc->ni_av_qi = rc->ni_tot_qi / rc->ni_frames;
1275 }
1276 }
1277
1278 // Keep record of last boosted (KF/GF/ARF) Q value.
1279 // If the current frame is coded at a lower Q then we also update it.
1280 // If all mbs in this group are skipped only update if the Q value is
1281 // better than that already stored.
1282 // This is used to help set quality in forced key frames to reduce popping
1283 if ((qindex < rc->last_boosted_qindex) || (cm->frame_type == KEY_FRAME) ||
1284 (!rc->constrained_gf_group &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001285 (cpi->refresh_alt_ref_frame || is_intrnl_arf ||
Yaowu Xuc27fc142016-08-22 16:08:15 -07001286 (cpi->refresh_golden_frame && !rc->is_src_frame_alt_ref)))) {
1287 rc->last_boosted_qindex = qindex;
1288 }
1289 if (cm->frame_type == KEY_FRAME) rc->last_kf_qindex = qindex;
1290
1291 update_buffer_level(cpi, rc->projected_frame_size);
1292
1293 // Rolling monitors of whether we are over or underspending used to help
1294 // regulate min and Max Q in two pass.
Cheng Chen09c83a52018-06-05 12:27:36 -07001295 if (av1_frame_scaled(cm))
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001296 rc->this_frame_target =
1297 (int)(rc->this_frame_target /
1298 resize_rate_factor(cpi, cm->width, cm->height));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001299 if (cm->frame_type != KEY_FRAME) {
1300 rc->rolling_target_bits = ROUND_POWER_OF_TWO(
1301 rc->rolling_target_bits * 3 + rc->this_frame_target, 2);
1302 rc->rolling_actual_bits = ROUND_POWER_OF_TWO(
1303 rc->rolling_actual_bits * 3 + rc->projected_frame_size, 2);
1304 rc->long_rolling_target_bits = ROUND_POWER_OF_TWO(
1305 rc->long_rolling_target_bits * 31 + rc->this_frame_target, 5);
1306 rc->long_rolling_actual_bits = ROUND_POWER_OF_TWO(
1307 rc->long_rolling_actual_bits * 31 + rc->projected_frame_size, 5);
1308 }
1309
1310 // Actual bits spent
1311 rc->total_actual_bits += rc->projected_frame_size;
Zoe Liue9b15e22017-07-19 15:53:01 -07001312 // TODO(zoeliu): To investigate whether we should treat BWDREF_FRAME
1313 // differently here for rc->avg_frame_bandwidth.
Yaowu Xuc27fc142016-08-22 16:08:15 -07001314 rc->total_target_bits +=
1315 (cm->show_frame || rc->is_bwd_ref_frame) ? rc->avg_frame_bandwidth : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001316
1317 rc->total_target_vs_actual = rc->total_actual_bits - rc->total_target_bits;
1318
1319 if (is_altref_enabled(cpi) && cpi->refresh_alt_ref_frame &&
1320 (cm->frame_type != KEY_FRAME))
1321 // Update the alternate reference frame stats as appropriate.
1322 update_alt_ref_frame_stats(cpi);
1323 else
1324 // Update the Golden frame stats as appropriate.
1325 update_golden_frame_stats(cpi);
1326
1327 if (cm->frame_type == KEY_FRAME) rc->frames_since_key = 0;
1328
Zoe Liue9b15e22017-07-19 15:53:01 -07001329 // TODO(zoeliu): To investigate whether we should treat BWDREF_FRAME
1330 // differently here for rc->avg_frame_bandwidth.
Yaowu Xuc27fc142016-08-22 16:08:15 -07001331 if (cm->show_frame || rc->is_bwd_ref_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001332 rc->frames_since_key++;
1333 rc->frames_to_key--;
1334 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001335 // if (cm->current_video_frame == 1 && cm->show_frame)
1336 /*
1337 rc->this_frame_target =
1338 (int)(rc->this_frame_target / resize_rate_factor(cpi, cm->width,
1339 cm->height));
1340 */
Yaowu Xuc27fc142016-08-22 16:08:15 -07001341}
1342
Yaowu Xuf883b422016-08-30 14:01:10 -07001343void av1_rc_postencode_update_drop_frame(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001344 // Update buffer level with zero size, update frame counters, and return.
1345 update_buffer_level(cpi, 0);
1346 cpi->rc.frames_since_key++;
1347 cpi->rc.frames_to_key--;
1348 cpi->rc.rc_2_frame = 0;
1349 cpi->rc.rc_1_frame = 0;
1350}
1351
1352// Use this macro to turn on/off use of alt-refs in one-pass mode.
1353#define USE_ALTREF_FOR_ONE_PASS 1
1354
Yaowu Xuf883b422016-08-30 14:01:10 -07001355static int calc_pframe_target_size_one_pass_vbr(const AV1_COMP *const cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001356 static const int af_ratio = 10;
1357 const RATE_CONTROL *const rc = &cpi->rc;
1358 int target;
1359#if USE_ALTREF_FOR_ONE_PASS
1360 target =
1361 (!rc->is_src_frame_alt_ref &&
1362 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame))
1363 ? (rc->avg_frame_bandwidth * rc->baseline_gf_interval * af_ratio) /
1364 (rc->baseline_gf_interval + af_ratio - 1)
1365 : (rc->avg_frame_bandwidth * rc->baseline_gf_interval) /
1366 (rc->baseline_gf_interval + af_ratio - 1);
1367#else
1368 target = rc->avg_frame_bandwidth;
1369#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001370 return av1_rc_clamp_pframe_target_size(cpi, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001371}
1372
Yaowu Xuf883b422016-08-30 14:01:10 -07001373static int calc_iframe_target_size_one_pass_vbr(const AV1_COMP *const cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001374 static const int kf_ratio = 25;
1375 const RATE_CONTROL *rc = &cpi->rc;
1376 const int target = rc->avg_frame_bandwidth * kf_ratio;
Yaowu Xuf883b422016-08-30 14:01:10 -07001377 return av1_rc_clamp_iframe_target_size(cpi, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001378}
1379
Yaowu Xuf883b422016-08-30 14:01:10 -07001380void av1_rc_get_one_pass_vbr_params(AV1_COMP *cpi) {
1381 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001382 RATE_CONTROL *const rc = &cpi->rc;
1383 int target;
Tarek AMARAc9813852018-03-05 18:40:18 -05001384 int altref_enabled = is_altref_enabled(cpi);
1385 int sframe_dist = cpi->oxcf.sframe_dist;
1386 int sframe_mode = cpi->oxcf.sframe_mode;
1387 int sframe_enabled = cpi->oxcf.sframe_enabled;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001388 // TODO(yaowu): replace the "auto_key && 0" below with proper decision logic.
1389 if (!cpi->refresh_alt_ref_frame &&
1390 (cm->current_video_frame == 0 || (cpi->frame_flags & FRAMEFLAGS_KEY) ||
1391 rc->frames_to_key == 0 || (cpi->oxcf.auto_key && 0))) {
1392 cm->frame_type = KEY_FRAME;
1393 rc->this_key_frame_forced =
1394 cm->current_video_frame != 0 && rc->frames_to_key == 0;
1395 rc->frames_to_key = cpi->oxcf.key_freq;
1396 rc->kf_boost = DEFAULT_KF_BOOST;
1397 rc->source_alt_ref_active = 0;
1398 } else {
1399 cm->frame_type = INTER_FRAME;
Tarek AMARAc9813852018-03-05 18:40:18 -05001400 if (sframe_enabled) {
1401 if (altref_enabled) {
1402 if (sframe_mode == 1) {
1403 // sframe_mode == 1: insert sframe if it matches altref frame.
1404
1405 if (cm->current_video_frame % sframe_dist == 0 &&
1406 cm->frame_type != KEY_FRAME && cm->current_video_frame != 0 &&
1407 cpi->refresh_alt_ref_frame) {
1408 cm->frame_type = S_FRAME;
1409 }
1410 } else {
1411 // sframe_mode != 1: if sframe will be inserted at the next available
1412 // altref frame
1413
1414 if (cm->current_video_frame % sframe_dist == 0 &&
1415 cm->frame_type != KEY_FRAME && cm->current_video_frame != 0) {
1416 rc->sframe_due = 1;
1417 }
1418
1419 if (rc->sframe_due && cpi->refresh_alt_ref_frame) {
1420 cm->frame_type = S_FRAME;
1421 rc->sframe_due = 0;
1422 }
1423 }
1424 } else {
1425 if (cm->current_video_frame % sframe_dist == 0 &&
1426 cm->frame_type != KEY_FRAME && cm->current_video_frame != 0) {
1427 cm->frame_type = S_FRAME;
1428 }
1429 }
1430 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001431 }
1432 if (rc->frames_till_gf_update_due == 0) {
1433 rc->baseline_gf_interval = (rc->min_gf_interval + rc->max_gf_interval) / 2;
1434 rc->frames_till_gf_update_due = rc->baseline_gf_interval;
1435 // NOTE: frames_till_gf_update_due must be <= frames_to_key.
1436 if (rc->frames_till_gf_update_due > rc->frames_to_key) {
1437 rc->frames_till_gf_update_due = rc->frames_to_key;
1438 rc->constrained_gf_group = 1;
1439 } else {
1440 rc->constrained_gf_group = 0;
1441 }
1442 cpi->refresh_golden_frame = 1;
1443 rc->source_alt_ref_pending = USE_ALTREF_FOR_ONE_PASS;
1444 rc->gfu_boost = DEFAULT_GF_BOOST;
1445 }
Yushin Chof16b1ad2018-01-29 17:19:58 -08001446
1447 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
1448 av1_cyclic_refresh_update_parameters(cpi);
1449
Yaowu Xuc27fc142016-08-22 16:08:15 -07001450 if (cm->frame_type == KEY_FRAME)
1451 target = calc_iframe_target_size_one_pass_vbr(cpi);
1452 else
1453 target = calc_pframe_target_size_one_pass_vbr(cpi);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001454 rc_set_frame_target(cpi, target, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001455}
1456
Yaowu Xuf883b422016-08-30 14:01:10 -07001457static int calc_pframe_target_size_one_pass_cbr(const AV1_COMP *cpi) {
1458 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001459 const RATE_CONTROL *rc = &cpi->rc;
1460 const int64_t diff = rc->optimal_buffer_level - rc->buffer_level;
1461 const int64_t one_pct_bits = 1 + rc->optimal_buffer_level / 100;
1462 int min_frame_target =
Yaowu Xuf883b422016-08-30 14:01:10 -07001463 AOMMAX(rc->avg_frame_bandwidth >> 4, FRAME_OVERHEAD_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001464 int target;
1465
1466 if (oxcf->gf_cbr_boost_pct) {
1467 const int af_ratio_pct = oxcf->gf_cbr_boost_pct + 100;
1468 target = cpi->refresh_golden_frame
1469 ? (rc->avg_frame_bandwidth * rc->baseline_gf_interval *
1470 af_ratio_pct) /
1471 (rc->baseline_gf_interval * 100 + af_ratio_pct - 100)
1472 : (rc->avg_frame_bandwidth * rc->baseline_gf_interval * 100) /
1473 (rc->baseline_gf_interval * 100 + af_ratio_pct - 100);
1474 } else {
1475 target = rc->avg_frame_bandwidth;
1476 }
1477
1478 if (diff > 0) {
1479 // Lower the target bandwidth for this frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07001480 const int pct_low = (int)AOMMIN(diff / one_pct_bits, oxcf->under_shoot_pct);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001481 target -= (target * pct_low) / 200;
1482 } else if (diff < 0) {
1483 // Increase the target bandwidth for this frame.
1484 const int pct_high =
Yaowu Xuf883b422016-08-30 14:01:10 -07001485 (int)AOMMIN(-diff / one_pct_bits, oxcf->over_shoot_pct);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001486 target += (target * pct_high) / 200;
1487 }
1488 if (oxcf->rc_max_inter_bitrate_pct) {
1489 const int max_rate =
1490 rc->avg_frame_bandwidth * oxcf->rc_max_inter_bitrate_pct / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -07001491 target = AOMMIN(target, max_rate);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001492 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001493 return AOMMAX(min_frame_target, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001494}
1495
Yaowu Xuf883b422016-08-30 14:01:10 -07001496static int calc_iframe_target_size_one_pass_cbr(const AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001497 const RATE_CONTROL *rc = &cpi->rc;
1498 int target;
1499 if (cpi->common.current_video_frame == 0) {
1500 target = ((rc->starting_buffer_level / 2) > INT_MAX)
1501 ? INT_MAX
1502 : (int)(rc->starting_buffer_level / 2);
1503 } else {
1504 int kf_boost = 32;
1505 double framerate = cpi->framerate;
1506
Yaowu Xuf883b422016-08-30 14:01:10 -07001507 kf_boost = AOMMAX(kf_boost, (int)(2 * framerate - 16));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001508 if (rc->frames_since_key < framerate / 2) {
1509 kf_boost = (int)(kf_boost * rc->frames_since_key / (framerate / 2));
1510 }
1511 target = ((16 + kf_boost) * rc->avg_frame_bandwidth) >> 4;
1512 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001513 return av1_rc_clamp_iframe_target_size(cpi, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001514}
1515
Yaowu Xuf883b422016-08-30 14:01:10 -07001516void av1_rc_get_one_pass_cbr_params(AV1_COMP *cpi) {
1517 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001518 RATE_CONTROL *const rc = &cpi->rc;
1519 int target;
1520 // TODO(yaowu): replace the "auto_key && 0" below with proper decision logic.
1521 if ((cm->current_video_frame == 0 || (cpi->frame_flags & FRAMEFLAGS_KEY) ||
1522 rc->frames_to_key == 0 || (cpi->oxcf.auto_key && 0))) {
1523 cm->frame_type = KEY_FRAME;
1524 rc->this_key_frame_forced =
1525 cm->current_video_frame != 0 && rc->frames_to_key == 0;
1526 rc->frames_to_key = cpi->oxcf.key_freq;
1527 rc->kf_boost = DEFAULT_KF_BOOST;
1528 rc->source_alt_ref_active = 0;
1529 } else {
1530 cm->frame_type = INTER_FRAME;
1531 }
1532 if (rc->frames_till_gf_update_due == 0) {
1533 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
Yaowu Xuf883b422016-08-30 14:01:10 -07001534 av1_cyclic_refresh_set_golden_update(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001535 else
1536 rc->baseline_gf_interval =
1537 (rc->min_gf_interval + rc->max_gf_interval) / 2;
1538 rc->frames_till_gf_update_due = rc->baseline_gf_interval;
1539 // NOTE: frames_till_gf_update_due must be <= frames_to_key.
1540 if (rc->frames_till_gf_update_due > rc->frames_to_key)
1541 rc->frames_till_gf_update_due = rc->frames_to_key;
1542 cpi->refresh_golden_frame = 1;
1543 rc->gfu_boost = DEFAULT_GF_BOOST;
1544 }
1545
1546 // Any update/change of global cyclic refresh parameters (amount/delta-qp)
1547 // should be done here, before the frame qp is selected.
1548 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
Yaowu Xuf883b422016-08-30 14:01:10 -07001549 av1_cyclic_refresh_update_parameters(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001550
1551 if (cm->frame_type == KEY_FRAME)
1552 target = calc_iframe_target_size_one_pass_cbr(cpi);
1553 else
1554 target = calc_pframe_target_size_one_pass_cbr(cpi);
1555
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001556 rc_set_frame_target(cpi, target, cm->width, cm->height);
Fergus Simpson0757fd82017-04-28 20:14:27 -07001557 // TODO(afergs): Decide whether to scale up, down, or not at all
Yaowu Xuc27fc142016-08-22 16:08:15 -07001558}
1559
Yaowu Xuf883b422016-08-30 14:01:10 -07001560int av1_compute_qdelta(const RATE_CONTROL *rc, double qstart, double qtarget,
1561 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001562 int start_index = rc->worst_quality;
1563 int target_index = rc->worst_quality;
1564 int i;
1565
1566 // Convert the average q value to an index.
1567 for (i = rc->best_quality; i < rc->worst_quality; ++i) {
1568 start_index = i;
Yaowu Xuf883b422016-08-30 14:01:10 -07001569 if (av1_convert_qindex_to_q(i, bit_depth) >= qstart) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001570 }
1571
1572 // Convert the q target to an index
1573 for (i = rc->best_quality; i < rc->worst_quality; ++i) {
1574 target_index = i;
Yaowu Xuf883b422016-08-30 14:01:10 -07001575 if (av1_convert_qindex_to_q(i, bit_depth) >= qtarget) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001576 }
1577
1578 return target_index - start_index;
1579}
1580
Yaowu Xuf883b422016-08-30 14:01:10 -07001581int av1_compute_qdelta_by_rate(const RATE_CONTROL *rc, FRAME_TYPE frame_type,
1582 int qindex, double rate_target_ratio,
1583 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001584 int target_index = rc->worst_quality;
1585 int i;
1586
1587 // Look up the current projected bits per block for the base index
1588 const int base_bits_per_mb =
Yaowu Xuf883b422016-08-30 14:01:10 -07001589 av1_rc_bits_per_mb(frame_type, qindex, 1.0, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001590
1591 // Find the target bits per mb based on the base value and given ratio.
1592 const int target_bits_per_mb = (int)(rate_target_ratio * base_bits_per_mb);
1593
1594 // Convert the q target to an index
1595 for (i = rc->best_quality; i < rc->worst_quality; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001596 if (av1_rc_bits_per_mb(frame_type, i, 1.0, bit_depth) <=
Yaowu Xuc27fc142016-08-22 16:08:15 -07001597 target_bits_per_mb) {
1598 target_index = i;
1599 break;
1600 }
1601 }
1602 return target_index - qindex;
1603}
1604
Yaowu Xuf883b422016-08-30 14:01:10 -07001605void av1_rc_set_gf_interval_range(const AV1_COMP *const cpi,
1606 RATE_CONTROL *const rc) {
1607 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001608
1609 // Special case code for 1 pass fixed Q mode tests
Yaowu Xuf883b422016-08-30 14:01:10 -07001610 if ((oxcf->pass == 0) && (oxcf->rc_mode == AOM_Q)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001611 rc->max_gf_interval = FIXED_GF_INTERVAL;
1612 rc->min_gf_interval = FIXED_GF_INTERVAL;
1613 rc->static_scene_max_gf_interval = FIXED_GF_INTERVAL;
1614 } else {
1615 // Set Maximum gf/arf interval
1616 rc->max_gf_interval = oxcf->max_gf_interval;
1617 rc->min_gf_interval = oxcf->min_gf_interval;
1618 if (rc->min_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001619 rc->min_gf_interval = av1_rc_get_default_min_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001620 oxcf->width, oxcf->height, cpi->framerate);
1621 if (rc->max_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001622 rc->max_gf_interval = av1_rc_get_default_max_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001623 cpi->framerate, rc->min_gf_interval);
1624
1625 // Extended interval for genuinely static scenes
1626 rc->static_scene_max_gf_interval = MAX_LAG_BUFFERS * 2;
1627
1628 if (is_altref_enabled(cpi)) {
1629 if (rc->static_scene_max_gf_interval > oxcf->lag_in_frames - 1)
1630 rc->static_scene_max_gf_interval = oxcf->lag_in_frames - 1;
1631 }
1632
1633 if (rc->max_gf_interval > rc->static_scene_max_gf_interval)
1634 rc->max_gf_interval = rc->static_scene_max_gf_interval;
1635
Wei-Ting Lin886b6722018-07-02 12:20:20 -07001636#if FIX_GF_INTERVAL_LENGTH
1637 rc->max_gf_interval = FIXED_GF_LENGTH + 1;
1638#endif
1639
Yaowu Xuc27fc142016-08-22 16:08:15 -07001640 // Clamp min to max
Yaowu Xuf883b422016-08-30 14:01:10 -07001641 rc->min_gf_interval = AOMMIN(rc->min_gf_interval, rc->max_gf_interval);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001642 }
1643}
1644
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001645void av1_rc_update_framerate(AV1_COMP *cpi, int width, int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001646 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001647 RATE_CONTROL *const rc = &cpi->rc;
1648 int vbr_max_bits;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001649 const int MBs = av1_get_MBs(width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001650
1651 rc->avg_frame_bandwidth = (int)(oxcf->target_bandwidth / cpi->framerate);
1652 rc->min_frame_bandwidth =
1653 (int)(rc->avg_frame_bandwidth * oxcf->two_pass_vbrmin_section / 100);
1654
1655 rc->min_frame_bandwidth =
Yaowu Xuf883b422016-08-30 14:01:10 -07001656 AOMMAX(rc->min_frame_bandwidth, FRAME_OVERHEAD_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001657
1658 // A maximum bitrate for a frame is defined.
1659 // The baseline for this aligns with HW implementations that
1660 // can support decode of 1080P content up to a bitrate of MAX_MB_RATE bits
1661 // per 16x16 MB (averaged over a frame). However this limit is extended if
1662 // a very high rate is given on the command line or the the rate cannnot
1663 // be acheived because of a user specificed max q (e.g. when the user
1664 // specifies lossless encode.
1665 vbr_max_bits =
1666 (int)(((int64_t)rc->avg_frame_bandwidth * oxcf->two_pass_vbrmax_section) /
1667 100);
1668 rc->max_frame_bandwidth =
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001669 AOMMAX(AOMMAX((MBs * MAX_MB_RATE), MAXRATE_1080P), vbr_max_bits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001670
Yaowu Xuf883b422016-08-30 14:01:10 -07001671 av1_rc_set_gf_interval_range(cpi, rc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001672}
1673
1674#define VBR_PCT_ADJUSTMENT_LIMIT 50
1675// For VBR...adjustment to the frame target based on error from previous frames
Yaowu Xuf883b422016-08-30 14:01:10 -07001676static void vbr_rate_correction(AV1_COMP *cpi, int *this_frame_target) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001677 RATE_CONTROL *const rc = &cpi->rc;
1678 int64_t vbr_bits_off_target = rc->vbr_bits_off_target;
1679 int max_delta;
1680 double position_factor = 1.0;
1681
1682 // How far through the clip are we.
1683 // This number is used to damp the per frame rate correction.
1684 // Range 0 - 1.0
1685 if (cpi->twopass.total_stats.count != 0.) {
1686 position_factor = sqrt((double)cpi->common.current_video_frame /
1687 cpi->twopass.total_stats.count);
1688 }
1689 max_delta = (int)(position_factor *
1690 ((*this_frame_target * VBR_PCT_ADJUSTMENT_LIMIT) / 100));
1691
1692 // vbr_bits_off_target > 0 means we have extra bits to spend
1693 if (vbr_bits_off_target > 0) {
1694 *this_frame_target += (vbr_bits_off_target > max_delta)
1695 ? max_delta
1696 : (int)vbr_bits_off_target;
1697 } else {
1698 *this_frame_target -= (vbr_bits_off_target < -max_delta)
1699 ? max_delta
1700 : (int)-vbr_bits_off_target;
1701 }
1702
1703 // Fast redistribution of bits arising from massive local undershoot.
1704 // Dont do it for kf,arf,gf or overlay frames.
1705 if (!frame_is_kf_gf_arf(cpi) && !rc->is_src_frame_alt_ref &&
1706 rc->vbr_bits_off_target_fast) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001707 int one_frame_bits = AOMMAX(rc->avg_frame_bandwidth, *this_frame_target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001708 int fast_extra_bits;
Yaowu Xuf883b422016-08-30 14:01:10 -07001709 fast_extra_bits = (int)AOMMIN(rc->vbr_bits_off_target_fast, one_frame_bits);
1710 fast_extra_bits = (int)AOMMIN(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001711 fast_extra_bits,
Yaowu Xuf883b422016-08-30 14:01:10 -07001712 AOMMAX(one_frame_bits / 8, rc->vbr_bits_off_target_fast / 8));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001713 *this_frame_target += (int)fast_extra_bits;
1714 rc->vbr_bits_off_target_fast -= fast_extra_bits;
1715 }
1716}
1717
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001718void av1_set_target_rate(AV1_COMP *cpi, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001719 RATE_CONTROL *const rc = &cpi->rc;
1720 int target_rate = rc->base_frame_target;
1721
1722 // Correction to rate target based on prior over or under shoot.
Yaowu Xuf883b422016-08-30 14:01:10 -07001723 if (cpi->oxcf.rc_mode == AOM_VBR || cpi->oxcf.rc_mode == AOM_CQ)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001724 vbr_rate_correction(cpi, &target_rate);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001725 rc_set_frame_target(cpi, target_rate, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001726}