blob: 37bf4c40112805a23cfded8894f719f3f04911fa [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <math.h>
15#include <stdio.h>
16#include <stdlib.h>
17#include <string.h>
18
Yaowu Xuf883b422016-08-30 14:01:10 -070019#include "aom_dsp/aom_dsp_common.h"
20#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070021#include "aom_ports/mem.h"
22#include "aom_ports/system_state.h"
23
24#include "av1/common/alloccommon.h"
25#include "av1/encoder/aq_cyclicrefresh.h"
26#include "av1/common/common.h"
27#include "av1/common/entropymode.h"
28#include "av1/common/quant_common.h"
29#include "av1/common/seg_common.h"
30
31#include "av1/encoder/encodemv.h"
Alex Converse9d068c12017-08-03 11:48:19 -070032#include "av1/encoder/random.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070033#include "av1/encoder/ratectrl.h"
34
35// Max rate target for 1080P and below encodes under normal circumstances
36// (1920 * 1080 / (16 * 16)) * MAX_MB_RATE bits per MB
37#define MAX_MB_RATE 250
38#define MAXRATE_1080P 2025000
39
40#define DEFAULT_KF_BOOST 2000
41#define DEFAULT_GF_BOOST 2000
42
43#define MIN_BPB_FACTOR 0.005
44#define MAX_BPB_FACTOR 50
45
46#define FRAME_OVERHEAD_BITS 200
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#define ASSIGN_MINQ_TABLE(bit_depth, name) \
48 do { \
49 switch (bit_depth) { \
Yaowu Xuf883b422016-08-30 14:01:10 -070050 case AOM_BITS_8: name = name##_8; break; \
51 case AOM_BITS_10: name = name##_10; break; \
52 case AOM_BITS_12: name = name##_12; break; \
Yaowu Xuc27fc142016-08-22 16:08:15 -070053 default: \
54 assert(0 && \
Yaowu Xuf883b422016-08-30 14:01:10 -070055 "bit_depth should be AOM_BITS_8, AOM_BITS_10" \
56 " or AOM_BITS_12"); \
Yaowu Xuc27fc142016-08-22 16:08:15 -070057 name = NULL; \
58 } \
59 } while (0)
Yaowu Xuc27fc142016-08-22 16:08:15 -070060
61// Tables relating active max Q to active min Q
62static int kf_low_motion_minq_8[QINDEX_RANGE];
63static int kf_high_motion_minq_8[QINDEX_RANGE];
64static int arfgf_low_motion_minq_8[QINDEX_RANGE];
65static int arfgf_high_motion_minq_8[QINDEX_RANGE];
66static int inter_minq_8[QINDEX_RANGE];
67static int rtc_minq_8[QINDEX_RANGE];
68
Yaowu Xuc27fc142016-08-22 16:08:15 -070069static int kf_low_motion_minq_10[QINDEX_RANGE];
70static int kf_high_motion_minq_10[QINDEX_RANGE];
71static int arfgf_low_motion_minq_10[QINDEX_RANGE];
72static int arfgf_high_motion_minq_10[QINDEX_RANGE];
73static int inter_minq_10[QINDEX_RANGE];
74static int rtc_minq_10[QINDEX_RANGE];
75static int kf_low_motion_minq_12[QINDEX_RANGE];
76static int kf_high_motion_minq_12[QINDEX_RANGE];
77static int arfgf_low_motion_minq_12[QINDEX_RANGE];
78static int arfgf_high_motion_minq_12[QINDEX_RANGE];
79static int inter_minq_12[QINDEX_RANGE];
80static int rtc_minq_12[QINDEX_RANGE];
Yaowu Xuc27fc142016-08-22 16:08:15 -070081
82static int gf_high = 2000;
83static int gf_low = 400;
84static int kf_high = 5000;
85static int kf_low = 400;
86
Debargha Mukherjee7166f222017-09-05 21:32:42 -070087// How many times less pixels there are to encode given the current scaling.
88// Temporary replacement for rcf_mult and rate_thresh_mult.
89static double resize_rate_factor(const AV1_COMP *cpi, int width, int height) {
Debargha Mukherjee7166f222017-09-05 21:32:42 -070090 return (double)(cpi->oxcf.width * cpi->oxcf.height) / (width * height);
Fergus Simpsonddc846e2017-04-24 18:09:13 -070091}
92
Yaowu Xuc27fc142016-08-22 16:08:15 -070093// Functions to compute the active minq lookup table entries based on a
94// formulaic approach to facilitate easier adjustment of the Q tables.
95// The formulae were derived from computing a 3rd order polynomial best
96// fit to the original data (after plotting real maxq vs minq (not q index))
97static int get_minq_index(double maxq, double x3, double x2, double x1,
Yaowu Xuf883b422016-08-30 14:01:10 -070098 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070099 int i;
Yaowu Xuf883b422016-08-30 14:01:10 -0700100 const double minqtarget = AOMMIN(((x3 * maxq + x2) * maxq + x1) * maxq, maxq);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700101
102 // Special case handling to deal with the step from q2.0
103 // down to lossless mode represented by q 1.0.
104 if (minqtarget <= 2.0) return 0;
105
106 for (i = 0; i < QINDEX_RANGE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700107 if (minqtarget <= av1_convert_qindex_to_q(i, bit_depth)) return i;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700108 }
109
110 return QINDEX_RANGE - 1;
111}
112
113static void init_minq_luts(int *kf_low_m, int *kf_high_m, int *arfgf_low,
114 int *arfgf_high, int *inter, int *rtc,
Yaowu Xuf883b422016-08-30 14:01:10 -0700115 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700116 int i;
117 for (i = 0; i < QINDEX_RANGE; i++) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700118 const double maxq = av1_convert_qindex_to_q(i, bit_depth);
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000119 kf_low_m[i] = get_minq_index(maxq, 0.000001, -0.0004, 0.150, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700120 kf_high_m[i] = get_minq_index(maxq, 0.0000021, -0.00125, 0.55, bit_depth);
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000121 arfgf_low[i] = get_minq_index(maxq, 0.0000015, -0.0009, 0.30, bit_depth);
122 arfgf_high[i] = get_minq_index(maxq, 0.0000021, -0.00125, 0.55, bit_depth);
Debargha Mukherjeeb3db2062018-02-05 18:50:16 +0000123 inter[i] = get_minq_index(maxq, 0.00000271, -0.00113, 0.90, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700124 rtc[i] = get_minq_index(maxq, 0.00000271, -0.00113, 0.70, bit_depth);
125 }
126}
127
Yaowu Xuf883b422016-08-30 14:01:10 -0700128void av1_rc_init_minq_luts(void) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700129 init_minq_luts(kf_low_motion_minq_8, kf_high_motion_minq_8,
130 arfgf_low_motion_minq_8, arfgf_high_motion_minq_8,
Yaowu Xuf883b422016-08-30 14:01:10 -0700131 inter_minq_8, rtc_minq_8, AOM_BITS_8);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700132 init_minq_luts(kf_low_motion_minq_10, kf_high_motion_minq_10,
133 arfgf_low_motion_minq_10, arfgf_high_motion_minq_10,
Yaowu Xuf883b422016-08-30 14:01:10 -0700134 inter_minq_10, rtc_minq_10, AOM_BITS_10);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700135 init_minq_luts(kf_low_motion_minq_12, kf_high_motion_minq_12,
136 arfgf_low_motion_minq_12, arfgf_high_motion_minq_12,
Yaowu Xuf883b422016-08-30 14:01:10 -0700137 inter_minq_12, rtc_minq_12, AOM_BITS_12);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700138}
139
140// These functions use formulaic calculations to make playing with the
141// quantizer tables easier. If necessary they can be replaced by lookup
142// tables if and when things settle down in the experimental bitstream
Yaowu Xuf883b422016-08-30 14:01:10 -0700143double av1_convert_qindex_to_q(int qindex, aom_bit_depth_t bit_depth) {
Yaowu Xud3e7c682017-12-21 14:08:25 -0800144 // Convert the index to a real Q value (scaled down to match old Q values)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700145 switch (bit_depth) {
Monty Montgomery60f2a222017-11-01 19:48:38 -0400146 case AOM_BITS_8: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 4.0;
147 case AOM_BITS_10: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 16.0;
148 case AOM_BITS_12: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 64.0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700149 default:
Yaowu Xuf883b422016-08-30 14:01:10 -0700150 assert(0 && "bit_depth should be AOM_BITS_8, AOM_BITS_10 or AOM_BITS_12");
Yaowu Xuc27fc142016-08-22 16:08:15 -0700151 return -1.0;
152 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700153}
154
Yaowu Xuf883b422016-08-30 14:01:10 -0700155int av1_rc_bits_per_mb(FRAME_TYPE frame_type, int qindex,
156 double correction_factor, aom_bit_depth_t bit_depth) {
157 const double q = av1_convert_qindex_to_q(qindex, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700158 int enumerator = frame_type == KEY_FRAME ? 2700000 : 1800000;
159
160 assert(correction_factor <= MAX_BPB_FACTOR &&
161 correction_factor >= MIN_BPB_FACTOR);
162
163 // q based adjustment to baseline enumerator
164 enumerator += (int)(enumerator * q) >> 12;
165 return (int)(enumerator * correction_factor / q);
166}
167
Yaowu Xuf883b422016-08-30 14:01:10 -0700168int av1_estimate_bits_at_q(FRAME_TYPE frame_type, int q, int mbs,
169 double correction_factor,
170 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700171 const int bpm =
Yaowu Xuf883b422016-08-30 14:01:10 -0700172 (int)(av1_rc_bits_per_mb(frame_type, q, correction_factor, bit_depth));
173 return AOMMAX(FRAME_OVERHEAD_BITS,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700174 (int)((uint64_t)bpm * mbs) >> BPER_MB_NORMBITS);
175}
176
Yaowu Xuf883b422016-08-30 14:01:10 -0700177int av1_rc_clamp_pframe_target_size(const AV1_COMP *const cpi, int target) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700178 const RATE_CONTROL *rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700179 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000180 const int min_frame_target =
181 AOMMAX(rc->min_frame_bandwidth, rc->avg_frame_bandwidth >> 5);
182 // Clip the frame target to the minimum setup value.
183 if (cpi->rc.is_src_frame_alt_ref) {
184 // If there is an active ARF at this location use the minimum
185 // bits on this frame even if it is a constructed arf.
186 // The active maximum quantizer insures that an appropriate
187 // number of bits will be spent if needed for constructed ARFs.
188 target = min_frame_target;
189 } else if (target < min_frame_target) {
190 target = min_frame_target;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700191 }
192
193 // Clip the frame target to the maximum allowed value.
194 if (target > rc->max_frame_bandwidth) target = rc->max_frame_bandwidth;
195 if (oxcf->rc_max_inter_bitrate_pct) {
196 const int max_rate =
197 rc->avg_frame_bandwidth * oxcf->rc_max_inter_bitrate_pct / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -0700198 target = AOMMIN(target, max_rate);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700199 }
200
201 return target;
202}
203
Yaowu Xuf883b422016-08-30 14:01:10 -0700204int av1_rc_clamp_iframe_target_size(const AV1_COMP *const cpi, int target) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700205 const RATE_CONTROL *rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700206 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207 if (oxcf->rc_max_intra_bitrate_pct) {
208 const int max_rate =
209 rc->avg_frame_bandwidth * oxcf->rc_max_intra_bitrate_pct / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -0700210 target = AOMMIN(target, max_rate);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700211 }
212 if (target > rc->max_frame_bandwidth) target = rc->max_frame_bandwidth;
213 return target;
214}
215
216// Update the buffer level: leaky bucket model.
Yaowu Xuf883b422016-08-30 14:01:10 -0700217static void update_buffer_level(AV1_COMP *cpi, int encoded_frame_size) {
218 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 RATE_CONTROL *const rc = &cpi->rc;
220
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200221 // Non-viewable frames are a special case and are treated as pure overhead.
Yaowu Xuc27fc142016-08-22 16:08:15 -0700222 // TODO(zoeliu): To further explore whether we should treat BWDREF_FRAME
223 // differently, since it is a no-show frame.
224 if (!cm->show_frame && !rc->is_bwd_ref_frame)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700225 rc->bits_off_target -= encoded_frame_size;
226 else
227 rc->bits_off_target += rc->avg_frame_bandwidth - encoded_frame_size;
228
229 // Clip the buffer level to the maximum specified buffer size.
Yaowu Xuf883b422016-08-30 14:01:10 -0700230 rc->bits_off_target = AOMMIN(rc->bits_off_target, rc->maximum_buffer_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700231 rc->buffer_level = rc->bits_off_target;
232}
233
Yaowu Xuf883b422016-08-30 14:01:10 -0700234int av1_rc_get_default_min_gf_interval(int width, int height,
235 double framerate) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700236 // Assume we do not need any constraint lower than 4K 20 fps
237 static const double factor_safe = 3840 * 2160 * 20.0;
238 const double factor = width * height * framerate;
239 const int default_interval =
240 clamp((int)(framerate * 0.125), MIN_GF_INTERVAL, MAX_GF_INTERVAL);
241
242 if (factor <= factor_safe)
243 return default_interval;
244 else
Yaowu Xuf883b422016-08-30 14:01:10 -0700245 return AOMMAX(default_interval,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700246 (int)(MIN_GF_INTERVAL * factor / factor_safe + 0.5));
247 // Note this logic makes:
248 // 4K24: 5
249 // 4K30: 6
250 // 4K60: 12
251}
252
Yaowu Xuf883b422016-08-30 14:01:10 -0700253int av1_rc_get_default_max_gf_interval(double framerate, int min_gf_interval) {
254 int interval = AOMMIN(MAX_GF_INTERVAL, (int)(framerate * 0.75));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700255 interval += (interval & 0x01); // Round to even value
Yaowu Xuf883b422016-08-30 14:01:10 -0700256 return AOMMAX(interval, min_gf_interval);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700257}
258
Yaowu Xuf883b422016-08-30 14:01:10 -0700259void av1_rc_init(const AV1EncoderConfig *oxcf, int pass, RATE_CONTROL *rc) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700260 int i;
261
Yaowu Xuf883b422016-08-30 14:01:10 -0700262 if (pass == 0 && oxcf->rc_mode == AOM_CBR) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700263 rc->avg_frame_qindex[KEY_FRAME] = oxcf->worst_allowed_q;
264 rc->avg_frame_qindex[INTER_FRAME] = oxcf->worst_allowed_q;
265 } else {
266 rc->avg_frame_qindex[KEY_FRAME] =
267 (oxcf->worst_allowed_q + oxcf->best_allowed_q) / 2;
268 rc->avg_frame_qindex[INTER_FRAME] =
269 (oxcf->worst_allowed_q + oxcf->best_allowed_q) / 2;
270 }
271
272 rc->last_q[KEY_FRAME] = oxcf->best_allowed_q;
273 rc->last_q[INTER_FRAME] = oxcf->worst_allowed_q;
274
275 rc->buffer_level = rc->starting_buffer_level;
276 rc->bits_off_target = rc->starting_buffer_level;
277
278 rc->rolling_target_bits = rc->avg_frame_bandwidth;
279 rc->rolling_actual_bits = rc->avg_frame_bandwidth;
280 rc->long_rolling_target_bits = rc->avg_frame_bandwidth;
281 rc->long_rolling_actual_bits = rc->avg_frame_bandwidth;
282
283 rc->total_actual_bits = 0;
284 rc->total_target_bits = 0;
285 rc->total_target_vs_actual = 0;
286
287 rc->frames_since_key = 8; // Sensible default for first frame.
288 rc->this_key_frame_forced = 0;
289 rc->next_key_frame_forced = 0;
290 rc->source_alt_ref_pending = 0;
291 rc->source_alt_ref_active = 0;
292
293 rc->frames_till_gf_update_due = 0;
294 rc->ni_av_qi = oxcf->worst_allowed_q;
295 rc->ni_tot_qi = 0;
296 rc->ni_frames = 0;
297
298 rc->tot_q = 0.0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700299 rc->avg_q = av1_convert_qindex_to_q(oxcf->worst_allowed_q, oxcf->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700300
301 for (i = 0; i < RATE_FACTOR_LEVELS; ++i) {
302 rc->rate_correction_factors[i] = 1.0;
303 }
304
305 rc->min_gf_interval = oxcf->min_gf_interval;
306 rc->max_gf_interval = oxcf->max_gf_interval;
307 if (rc->min_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -0700308 rc->min_gf_interval = av1_rc_get_default_min_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700309 oxcf->width, oxcf->height, oxcf->init_framerate);
310 if (rc->max_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -0700311 rc->max_gf_interval = av1_rc_get_default_max_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -0700312 oxcf->init_framerate, rc->min_gf_interval);
313 rc->baseline_gf_interval = (rc->min_gf_interval + rc->max_gf_interval) / 2;
314}
315
Yaowu Xuf883b422016-08-30 14:01:10 -0700316int av1_rc_drop_frame(AV1_COMP *cpi) {
317 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700318 RATE_CONTROL *const rc = &cpi->rc;
319
320 if (!oxcf->drop_frames_water_mark) {
321 return 0;
322 } else {
323 if (rc->buffer_level < 0) {
324 // Always drop if buffer is below 0.
325 return 1;
326 } else {
327 // If buffer is below drop_mark, for now just drop every other frame
328 // (starting with the next frame) until it increases back over drop_mark.
329 int drop_mark =
330 (int)(oxcf->drop_frames_water_mark * rc->optimal_buffer_level / 100);
331 if ((rc->buffer_level > drop_mark) && (rc->decimation_factor > 0)) {
332 --rc->decimation_factor;
333 } else if (rc->buffer_level <= drop_mark && rc->decimation_factor == 0) {
334 rc->decimation_factor = 1;
335 }
336 if (rc->decimation_factor > 0) {
337 if (rc->decimation_count > 0) {
338 --rc->decimation_count;
339 return 1;
340 } else {
341 rc->decimation_count = rc->decimation_factor;
342 return 0;
343 }
344 } else {
345 rc->decimation_count = 0;
346 return 0;
347 }
348 }
349 }
350}
351
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700352static double get_rate_correction_factor(const AV1_COMP *cpi, int width,
353 int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700354 const RATE_CONTROL *const rc = &cpi->rc;
355 double rcf;
356
357 if (cpi->common.frame_type == KEY_FRAME) {
358 rcf = rc->rate_correction_factors[KF_STD];
359 } else if (cpi->oxcf.pass == 2) {
360 RATE_FACTOR_LEVEL rf_lvl =
361 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index];
362 rcf = rc->rate_correction_factors[rf_lvl];
363 } else {
364 if ((cpi->refresh_alt_ref_frame || cpi->refresh_golden_frame) &&
365 !rc->is_src_frame_alt_ref &&
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000366 (cpi->oxcf.rc_mode != AOM_CBR || cpi->oxcf.gf_cbr_boost_pct > 20))
Yaowu Xuc27fc142016-08-22 16:08:15 -0700367 rcf = rc->rate_correction_factors[GF_ARF_STD];
368 else
369 rcf = rc->rate_correction_factors[INTER_NORMAL];
370 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700371 rcf *= resize_rate_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700372 return fclamp(rcf, MIN_BPB_FACTOR, MAX_BPB_FACTOR);
373}
374
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700375static void set_rate_correction_factor(AV1_COMP *cpi, double factor, int width,
376 int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700377 RATE_CONTROL *const rc = &cpi->rc;
378
379 // Normalize RCF to account for the size-dependent scaling factor.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700380 factor /= resize_rate_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700381
382 factor = fclamp(factor, MIN_BPB_FACTOR, MAX_BPB_FACTOR);
383
384 if (cpi->common.frame_type == KEY_FRAME) {
385 rc->rate_correction_factors[KF_STD] = factor;
386 } else if (cpi->oxcf.pass == 2) {
387 RATE_FACTOR_LEVEL rf_lvl =
388 cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index];
389 rc->rate_correction_factors[rf_lvl] = factor;
390 } else {
391 if ((cpi->refresh_alt_ref_frame || cpi->refresh_golden_frame) &&
392 !rc->is_src_frame_alt_ref &&
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +0000393 (cpi->oxcf.rc_mode != AOM_CBR || cpi->oxcf.gf_cbr_boost_pct > 20))
Yaowu Xuc27fc142016-08-22 16:08:15 -0700394 rc->rate_correction_factors[GF_ARF_STD] = factor;
395 else
396 rc->rate_correction_factors[INTER_NORMAL] = factor;
397 }
398}
399
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700400void av1_rc_update_rate_correction_factors(AV1_COMP *cpi, int width,
401 int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700402 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700403 int correction_factor = 100;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700404 double rate_correction_factor =
405 get_rate_correction_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700406 double adjustment_limit;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700407 const int MBs = av1_get_MBs(width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700408
409 int projected_size_based_on_q = 0;
410
411 // Do not update the rate factors for arf overlay frames.
412 if (cpi->rc.is_src_frame_alt_ref) return;
413
414 // Clear down mmx registers to allow floating point in what follows
Yaowu Xuf883b422016-08-30 14:01:10 -0700415 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700416
417 // Work out how big we would have expected the frame to be at this Q given
418 // the current correction factor.
419 // Stay in double to avoid int overflow when values are large
420 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cpi->common.seg.enabled) {
421 projected_size_based_on_q =
Yaowu Xuf883b422016-08-30 14:01:10 -0700422 av1_cyclic_refresh_estimate_bits_at_q(cpi, rate_correction_factor);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700423 } else {
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700424 projected_size_based_on_q = av1_estimate_bits_at_q(
425 cpi->common.frame_type, cm->base_qindex, MBs, rate_correction_factor,
426 cm->seq_params.bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700427 }
428 // Work out a size correction factor.
429 if (projected_size_based_on_q > FRAME_OVERHEAD_BITS)
430 correction_factor = (int)((100 * (int64_t)cpi->rc.projected_frame_size) /
431 projected_size_based_on_q);
432
433 // More heavily damped adjustment used if we have been oscillating either side
434 // of target.
James Zernd2c42f02017-03-10 11:13:10 -0800435 if (correction_factor > 0) {
436 adjustment_limit =
437 0.25 + 0.5 * AOMMIN(1, fabs(log10(0.01 * correction_factor)));
438 } else {
439 adjustment_limit = 0.75;
440 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700441
442 cpi->rc.q_2_frame = cpi->rc.q_1_frame;
443 cpi->rc.q_1_frame = cm->base_qindex;
444 cpi->rc.rc_2_frame = cpi->rc.rc_1_frame;
445 if (correction_factor > 110)
446 cpi->rc.rc_1_frame = -1;
447 else if (correction_factor < 90)
448 cpi->rc.rc_1_frame = 1;
449 else
450 cpi->rc.rc_1_frame = 0;
451
452 if (correction_factor > 102) {
453 // We are not already at the worst allowable quality
454 correction_factor =
455 (int)(100 + ((correction_factor - 100) * adjustment_limit));
456 rate_correction_factor = (rate_correction_factor * correction_factor) / 100;
457 // Keep rate_correction_factor within limits
458 if (rate_correction_factor > MAX_BPB_FACTOR)
459 rate_correction_factor = MAX_BPB_FACTOR;
460 } else if (correction_factor < 99) {
461 // We are not already at the best allowable quality
462 correction_factor =
463 (int)(100 - ((100 - correction_factor) * adjustment_limit));
464 rate_correction_factor = (rate_correction_factor * correction_factor) / 100;
465
466 // Keep rate_correction_factor within limits
467 if (rate_correction_factor < MIN_BPB_FACTOR)
468 rate_correction_factor = MIN_BPB_FACTOR;
469 }
470
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700471 set_rate_correction_factor(cpi, rate_correction_factor, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700472}
473
Yaowu Xuf883b422016-08-30 14:01:10 -0700474int av1_rc_regulate_q(const AV1_COMP *cpi, int target_bits_per_frame,
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700475 int active_best_quality, int active_worst_quality,
476 int width, int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700477 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700478 int q = active_worst_quality;
479 int last_error = INT_MAX;
480 int i, target_bits_per_mb, bits_per_mb_at_this_q;
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700481 const int MBs = av1_get_MBs(width, height);
482 const double correction_factor =
483 get_rate_correction_factor(cpi, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700484
485 // Calculate required scaling factor based on target frame size and size of
486 // frame produced using previous Q.
487 target_bits_per_mb =
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700488 (int)((uint64_t)(target_bits_per_frame) << BPER_MB_NORMBITS) / MBs;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700489
490 i = active_best_quality;
491
492 do {
493 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cm->seg.enabled) {
494 bits_per_mb_at_this_q =
Yaowu Xuf883b422016-08-30 14:01:10 -0700495 (int)av1_cyclic_refresh_rc_bits_per_mb(cpi, i, correction_factor);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700496 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700497 bits_per_mb_at_this_q = (int)av1_rc_bits_per_mb(
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700498 cm->frame_type, i, correction_factor, cm->seq_params.bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700499 }
500
501 if (bits_per_mb_at_this_q <= target_bits_per_mb) {
502 if ((target_bits_per_mb - bits_per_mb_at_this_q) <= last_error)
503 q = i;
504 else
505 q = i - 1;
506
507 break;
508 } else {
509 last_error = bits_per_mb_at_this_q - target_bits_per_mb;
510 }
511 } while (++i <= active_worst_quality);
512
513 // In CBR mode, this makes sure q is between oscillating Qs to prevent
514 // resonance.
Yaowu Xuf883b422016-08-30 14:01:10 -0700515 if (cpi->oxcf.rc_mode == AOM_CBR &&
Yaowu Xuc27fc142016-08-22 16:08:15 -0700516 (cpi->rc.rc_1_frame * cpi->rc.rc_2_frame == -1) &&
517 cpi->rc.q_1_frame != cpi->rc.q_2_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700518 q = clamp(q, AOMMIN(cpi->rc.q_1_frame, cpi->rc.q_2_frame),
519 AOMMAX(cpi->rc.q_1_frame, cpi->rc.q_2_frame));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700520 }
521 return q;
522}
523
524static int get_active_quality(int q, int gfu_boost, int low, int high,
525 int *low_motion_minq, int *high_motion_minq) {
526 if (gfu_boost > high) {
527 return low_motion_minq[q];
528 } else if (gfu_boost < low) {
529 return high_motion_minq[q];
530 } else {
531 const int gap = high - low;
532 const int offset = high - gfu_boost;
533 const int qdiff = high_motion_minq[q] - low_motion_minq[q];
534 const int adjustment = ((offset * qdiff) + (gap >> 1)) / gap;
535 return low_motion_minq[q] + adjustment;
536 }
537}
538
539static int get_kf_active_quality(const RATE_CONTROL *const rc, int q,
Yaowu Xuf883b422016-08-30 14:01:10 -0700540 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700541 int *kf_low_motion_minq;
542 int *kf_high_motion_minq;
543 ASSIGN_MINQ_TABLE(bit_depth, kf_low_motion_minq);
544 ASSIGN_MINQ_TABLE(bit_depth, kf_high_motion_minq);
545 return get_active_quality(q, rc->kf_boost, kf_low, kf_high,
546 kf_low_motion_minq, kf_high_motion_minq);
547}
548
549static int get_gf_active_quality(const RATE_CONTROL *const rc, int q,
Yaowu Xuf883b422016-08-30 14:01:10 -0700550 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700551 int *arfgf_low_motion_minq;
552 int *arfgf_high_motion_minq;
553 ASSIGN_MINQ_TABLE(bit_depth, arfgf_low_motion_minq);
554 ASSIGN_MINQ_TABLE(bit_depth, arfgf_high_motion_minq);
555 return get_active_quality(q, rc->gfu_boost, gf_low, gf_high,
556 arfgf_low_motion_minq, arfgf_high_motion_minq);
557}
558
Wei-Ting Linebff3772018-07-24 11:59:40 -0700559#if REDUCE_LAST_ALT_BOOST
560static int get_gf_high_motion_quality(int q, aom_bit_depth_t bit_depth) {
561 int *arfgf_high_motion_minq;
562 ASSIGN_MINQ_TABLE(bit_depth, arfgf_high_motion_minq);
563 return arfgf_high_motion_minq[q];
564}
565#endif
566
Yaowu Xuf883b422016-08-30 14:01:10 -0700567static int calc_active_worst_quality_one_pass_vbr(const AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700568 const RATE_CONTROL *const rc = &cpi->rc;
569 const unsigned int curr_frame = cpi->common.current_video_frame;
570 int active_worst_quality;
571
572 if (cpi->common.frame_type == KEY_FRAME) {
573 active_worst_quality =
574 curr_frame == 0 ? rc->worst_quality : rc->last_q[KEY_FRAME] * 2;
575 } else {
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200576 if (!rc->is_src_frame_alt_ref &&
577 (cpi->refresh_golden_frame || cpi->refresh_alt2_ref_frame ||
578 cpi->refresh_alt_ref_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700579 active_worst_quality = curr_frame == 1 ? rc->last_q[KEY_FRAME] * 5 / 4
580 : rc->last_q[INTER_FRAME];
581 } else {
582 active_worst_quality = curr_frame == 1 ? rc->last_q[KEY_FRAME] * 2
583 : rc->last_q[INTER_FRAME] * 2;
584 }
585 }
Yaowu Xuf883b422016-08-30 14:01:10 -0700586 return AOMMIN(active_worst_quality, rc->worst_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700587}
588
589// Adjust active_worst_quality level based on buffer level.
Yaowu Xuf883b422016-08-30 14:01:10 -0700590static int calc_active_worst_quality_one_pass_cbr(const AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700591 // Adjust active_worst_quality: If buffer is above the optimal/target level,
592 // bring active_worst_quality down depending on fullness of buffer.
593 // If buffer is below the optimal level, let the active_worst_quality go from
594 // ambient Q (at buffer = optimal level) to worst_quality level
595 // (at buffer = critical level).
Yaowu Xuf883b422016-08-30 14:01:10 -0700596 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700597 const RATE_CONTROL *rc = &cpi->rc;
598 // Buffer level below which we push active_worst to worst_quality.
599 int64_t critical_level = rc->optimal_buffer_level >> 3;
600 int64_t buff_lvl_step = 0;
601 int adjustment = 0;
602 int active_worst_quality;
603 int ambient_qp;
604 if (cm->frame_type == KEY_FRAME) return rc->worst_quality;
605 // For ambient_qp we use minimum of avg_frame_qindex[KEY_FRAME/INTER_FRAME]
606 // for the first few frames following key frame. These are both initialized
607 // to worst_quality and updated with (3/4, 1/4) average in postencode_update.
608 // So for first few frames following key, the qp of that key frame is weighted
609 // into the active_worst_quality setting.
610 ambient_qp = (cm->current_video_frame < 5)
Yaowu Xuf883b422016-08-30 14:01:10 -0700611 ? AOMMIN(rc->avg_frame_qindex[INTER_FRAME],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700612 rc->avg_frame_qindex[KEY_FRAME])
613 : rc->avg_frame_qindex[INTER_FRAME];
Yaowu Xuf883b422016-08-30 14:01:10 -0700614 active_worst_quality = AOMMIN(rc->worst_quality, ambient_qp * 5 / 4);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700615 if (rc->buffer_level > rc->optimal_buffer_level) {
616 // Adjust down.
617 // Maximum limit for down adjustment, ~30%.
618 int max_adjustment_down = active_worst_quality / 3;
619 if (max_adjustment_down) {
620 buff_lvl_step = ((rc->maximum_buffer_size - rc->optimal_buffer_level) /
621 max_adjustment_down);
622 if (buff_lvl_step)
623 adjustment = (int)((rc->buffer_level - rc->optimal_buffer_level) /
624 buff_lvl_step);
625 active_worst_quality -= adjustment;
626 }
627 } else if (rc->buffer_level > critical_level) {
628 // Adjust up from ambient Q.
629 if (critical_level) {
630 buff_lvl_step = (rc->optimal_buffer_level - critical_level);
631 if (buff_lvl_step) {
632 adjustment = (int)((rc->worst_quality - ambient_qp) *
633 (rc->optimal_buffer_level - rc->buffer_level) /
634 buff_lvl_step);
635 }
636 active_worst_quality = ambient_qp + adjustment;
637 }
638 } else {
639 // Set to worst_quality if buffer is below critical level.
640 active_worst_quality = rc->worst_quality;
641 }
642 return active_worst_quality;
643}
644
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700645static int rc_pick_q_and_bounds_one_pass_cbr(const AV1_COMP *cpi, int width,
646 int height, int *bottom_index,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700647 int *top_index) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700648 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700649 const RATE_CONTROL *const rc = &cpi->rc;
650 int active_best_quality;
651 int active_worst_quality = calc_active_worst_quality_one_pass_cbr(cpi);
652 int q;
653 int *rtc_minq;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700654 const int bit_depth = cm->seq_params.bit_depth;
655 ASSIGN_MINQ_TABLE(bit_depth, rtc_minq);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700656
657 if (frame_is_intra_only(cm)) {
658 active_best_quality = rc->best_quality;
659 // Handle the special case for key frames forced when we have reached
660 // the maximum key frame interval. Here force the Q to a range
661 // based on the ambient Q to reduce the risk of popping.
662 if (rc->this_key_frame_forced) {
663 int qindex = rc->last_boosted_qindex;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700664 double last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
665 int delta_qindex = av1_compute_qdelta(rc, last_boosted_q,
666 (last_boosted_q * 0.75), bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700667 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700668 } else if (cm->current_video_frame > 0) {
669 // not first frame of one pass and kf_boost is set
670 double q_adj_factor = 1.0;
671 double q_val;
672
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700673 active_best_quality =
674 get_kf_active_quality(rc, rc->avg_frame_qindex[KEY_FRAME], bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700675
676 // Allow somewhat lower kf minq with small image formats.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700677 if ((width * height) <= (352 * 288)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700678 q_adj_factor -= 0.25;
679 }
680
681 // Convert the adjustment factor to a qindex delta
682 // on active_best_quality.
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700683 q_val = av1_convert_qindex_to_q(active_best_quality, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700684 active_best_quality +=
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700685 av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700686 }
687 } else if (!rc->is_src_frame_alt_ref &&
688 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) {
689 // Use the lower of active_worst_quality and recent
690 // average Q as basis for GF/ARF best Q limit unless last frame was
691 // a key frame.
692 if (rc->frames_since_key > 1 &&
693 rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) {
694 q = rc->avg_frame_qindex[INTER_FRAME];
695 } else {
696 q = active_worst_quality;
697 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700698 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700699 } else {
700 // Use the lower of active_worst_quality and recent/average Q.
701 if (cm->current_video_frame > 1) {
702 if (rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality)
703 active_best_quality = rtc_minq[rc->avg_frame_qindex[INTER_FRAME]];
704 else
705 active_best_quality = rtc_minq[active_worst_quality];
706 } else {
707 if (rc->avg_frame_qindex[KEY_FRAME] < active_worst_quality)
708 active_best_quality = rtc_minq[rc->avg_frame_qindex[KEY_FRAME]];
709 else
710 active_best_quality = rtc_minq[active_worst_quality];
711 }
712 }
713
714 // Clip the active best and worst quality values to limits
715 active_best_quality =
716 clamp(active_best_quality, rc->best_quality, rc->worst_quality);
717 active_worst_quality =
718 clamp(active_worst_quality, active_best_quality, rc->worst_quality);
719
720 *top_index = active_worst_quality;
721 *bottom_index = active_best_quality;
722
723 // Limit Q range for the adaptive loop.
724 if (cm->frame_type == KEY_FRAME && !rc->this_key_frame_forced &&
725 !(cm->current_video_frame == 0)) {
726 int qdelta = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700727 aom_clear_system_state();
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700728 qdelta = av1_compute_qdelta_by_rate(&cpi->rc, cm->frame_type,
729 active_worst_quality, 2.0, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700730 *top_index = active_worst_quality + qdelta;
Yaowu Xuf883b422016-08-30 14:01:10 -0700731 *top_index = AOMMAX(*top_index, *bottom_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700732 }
733
734 // Special case code to try and match quality with forced key frames
735 if (cm->frame_type == KEY_FRAME && rc->this_key_frame_forced) {
736 q = rc->last_boosted_qindex;
737 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700738 q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700739 active_worst_quality, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700740 if (q > *top_index) {
741 // Special case when we are targeting the max allowed rate
742 if (rc->this_frame_target >= rc->max_frame_bandwidth)
743 *top_index = q;
744 else
745 q = *top_index;
746 }
747 }
748
749 assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality);
750 assert(*bottom_index <= rc->worst_quality &&
751 *bottom_index >= rc->best_quality);
752 assert(q <= rc->worst_quality && q >= rc->best_quality);
753 return q;
754}
755
756static int get_active_cq_level(const RATE_CONTROL *rc,
Yaowu Xuf883b422016-08-30 14:01:10 -0700757 const AV1EncoderConfig *const oxcf) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700758 static const double cq_adjust_threshold = 0.1;
759 int active_cq_level = oxcf->cq_level;
Yaowu Xuf883b422016-08-30 14:01:10 -0700760 if (oxcf->rc_mode == AOM_CQ && rc->total_target_bits > 0) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700761 const double x = (double)rc->total_actual_bits / rc->total_target_bits;
762 if (x < cq_adjust_threshold) {
763 active_cq_level = (int)(active_cq_level * x / cq_adjust_threshold);
764 }
765 }
766 return active_cq_level;
767}
768
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700769static int rc_pick_q_and_bounds_one_pass_vbr(const AV1_COMP *cpi, int width,
770 int height, int *bottom_index,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700771 int *top_index) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700772 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700773 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700774 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700775 const int cq_level = get_active_cq_level(rc, oxcf);
776 int active_best_quality;
777 int active_worst_quality = calc_active_worst_quality_one_pass_vbr(cpi);
778 int q;
779 int *inter_minq;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700780 const int bit_depth = cm->seq_params.bit_depth;
781 ASSIGN_MINQ_TABLE(bit_depth, inter_minq);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700782
783 if (frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700784 if (oxcf->rc_mode == AOM_Q) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700785 const int qindex = cq_level;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700786 const double q_val = av1_convert_qindex_to_q(qindex, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700787 const int delta_qindex =
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700788 av1_compute_qdelta(rc, q_val, q_val * 0.25, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700789 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700790 } else if (rc->this_key_frame_forced) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700791 const int qindex = rc->last_boosted_qindex;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700792 const double last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700793 const int delta_qindex = av1_compute_qdelta(
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700794 rc, last_boosted_q, last_boosted_q * 0.75, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700795 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Urvang Joshi454280d2016-10-14 16:51:44 -0700796 } else { // not first frame of one pass and kf_boost is set
Yaowu Xuc27fc142016-08-22 16:08:15 -0700797 double q_adj_factor = 1.0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700798
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700799 active_best_quality =
800 get_kf_active_quality(rc, rc->avg_frame_qindex[KEY_FRAME], bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700801
802 // Allow somewhat lower kf minq with small image formats.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700803 if ((width * height) <= (352 * 288)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700804 q_adj_factor -= 0.25;
805 }
806
Urvang Joshi454280d2016-10-14 16:51:44 -0700807 // Convert the adjustment factor to a qindex delta on active_best_quality.
808 {
809 const double q_val =
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700810 av1_convert_qindex_to_q(active_best_quality, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700811 active_best_quality +=
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700812 av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700813 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700814 }
815 } else if (!rc->is_src_frame_alt_ref &&
816 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) {
817 // Use the lower of active_worst_quality and recent
818 // average Q as basis for GF/ARF best Q limit unless last frame was
819 // a key frame.
Urvang Joshi454280d2016-10-14 16:51:44 -0700820 q = (rc->frames_since_key > 1 &&
821 rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality)
822 ? rc->avg_frame_qindex[INTER_FRAME]
823 : rc->avg_frame_qindex[KEY_FRAME];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700824 // For constrained quality dont allow Q less than the cq level
Yaowu Xuf883b422016-08-30 14:01:10 -0700825 if (oxcf->rc_mode == AOM_CQ) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700826 if (q < cq_level) q = cq_level;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700827 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700828 // Constrained quality use slightly lower active best.
829 active_best_quality = active_best_quality * 15 / 16;
Yaowu Xuf883b422016-08-30 14:01:10 -0700830 } else if (oxcf->rc_mode == AOM_Q) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700831 const int qindex = cq_level;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700832 const double q_val = av1_convert_qindex_to_q(qindex, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700833 const int delta_qindex =
834 (cpi->refresh_alt_ref_frame)
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700835 ? av1_compute_qdelta(rc, q_val, q_val * 0.40, bit_depth)
836 : av1_compute_qdelta(rc, q_val, q_val * 0.50, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700837 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700838 } else {
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700839 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700840 }
841 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700842 if (oxcf->rc_mode == AOM_Q) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700843 const int qindex = cq_level;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700844 const double q_val = av1_convert_qindex_to_q(qindex, bit_depth);
Urvang Joshi454280d2016-10-14 16:51:44 -0700845 const double delta_rate[FIXED_GF_INTERVAL] = { 0.50, 1.0, 0.85, 1.0,
846 0.70, 1.0, 0.85, 1.0 };
847 const int delta_qindex = av1_compute_qdelta(
848 rc, q_val,
849 q_val * delta_rate[cm->current_video_frame % FIXED_GF_INTERVAL],
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700850 bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700851 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700852 } else {
853 // Use the lower of active_worst_quality and recent/average Q.
Urvang Joshi454280d2016-10-14 16:51:44 -0700854 active_best_quality = (cm->current_video_frame > 1)
855 ? inter_minq[rc->avg_frame_qindex[INTER_FRAME]]
856 : inter_minq[rc->avg_frame_qindex[KEY_FRAME]];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700857 // For the constrained quality mode we don't want
858 // q to fall below the cq level.
Yaowu Xuf883b422016-08-30 14:01:10 -0700859 if ((oxcf->rc_mode == AOM_CQ) && (active_best_quality < cq_level)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700860 active_best_quality = cq_level;
861 }
862 }
863 }
864
865 // Clip the active best and worst quality values to limits
866 active_best_quality =
867 clamp(active_best_quality, rc->best_quality, rc->worst_quality);
868 active_worst_quality =
869 clamp(active_worst_quality, active_best_quality, rc->worst_quality);
870
871 *top_index = active_worst_quality;
872 *bottom_index = active_best_quality;
873
874 // Limit Q range for the adaptive loop.
875 {
876 int qdelta = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700877 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700878 if (cm->frame_type == KEY_FRAME && !rc->this_key_frame_forced &&
879 !(cm->current_video_frame == 0)) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700880 qdelta = av1_compute_qdelta_by_rate(&cpi->rc, cm->frame_type,
881 active_worst_quality, 2.0, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700882 } else if (!rc->is_src_frame_alt_ref &&
883 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700884 qdelta = av1_compute_qdelta_by_rate(
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700885 &cpi->rc, cm->frame_type, active_worst_quality, 1.75, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700886 }
887 *top_index = active_worst_quality + qdelta;
Yaowu Xuf883b422016-08-30 14:01:10 -0700888 *top_index = AOMMAX(*top_index, *bottom_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700889 }
890
Yaowu Xuf883b422016-08-30 14:01:10 -0700891 if (oxcf->rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700892 q = active_best_quality;
893 // Special case code to try and match quality with forced key frames
894 } else if ((cm->frame_type == KEY_FRAME) && rc->this_key_frame_forced) {
895 q = rc->last_boosted_qindex;
896 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700897 q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700898 active_worst_quality, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700899 if (q > *top_index) {
900 // Special case when we are targeting the max allowed rate
901 if (rc->this_frame_target >= rc->max_frame_bandwidth)
902 *top_index = q;
903 else
904 q = *top_index;
905 }
906 }
907
908 assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality);
909 assert(*bottom_index <= rc->worst_quality &&
910 *bottom_index >= rc->best_quality);
911 assert(q <= rc->worst_quality && q >= rc->best_quality);
912 return q;
913}
914
Yaowu Xuf883b422016-08-30 14:01:10 -0700915int av1_frame_type_qdelta(const AV1_COMP *cpi, int rf_level, int q) {
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200916 static const FRAME_TYPE frame_type[RATE_FACTOR_LEVELS] = {
917 INTER_FRAME, INTER_FRAME, INTER_FRAME, INTER_FRAME, INTER_FRAME, KEY_FRAME
918 };
Yaowu Xuf883b422016-08-30 14:01:10 -0700919 const AV1_COMMON *const cm = &cpi->common;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700920 int qdelta = av1_compute_qdelta_by_rate(&cpi->rc, frame_type[rf_level], q,
921 rate_factor_deltas[rf_level],
922 cm->seq_params.bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700923 return qdelta;
924}
925
926#define STATIC_MOTION_THRESH 95
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700927static int rc_pick_q_and_bounds_two_pass(const AV1_COMP *cpi, int width,
928 int height, int *bottom_index,
Wei-Ting Linebff3772018-07-24 11:59:40 -0700929 int *top_index, int *arf_q) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700930 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700931 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700932 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700933 const GF_GROUP *gf_group = &cpi->twopass.gf_group;
934 const int cq_level = get_active_cq_level(rc, oxcf);
935 int active_best_quality;
936 int active_worst_quality = cpi->twopass.active_worst_quality;
937 int q;
938 int *inter_minq;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700939 const int bit_depth = cm->seq_params.bit_depth;
940 ASSIGN_MINQ_TABLE(bit_depth, inter_minq);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700941
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700942#if CUSTOMIZED_GF
Wei-Ting Lin240d9b42018-07-12 11:48:02 -0700943 const int is_intrl_arf_boost =
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700944 gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE;
945#else
946 const int is_intrl_arf_boost = cpi->refresh_alt2_ref_frame;
Wei-Ting Lincffe49d2018-07-10 14:15:46 -0700947#endif // CUSTOMIZED_GF
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700948
Yaowu Xuc27fc142016-08-22 16:08:15 -0700949 if (frame_is_intra_only(cm)) {
950 // Handle the special case for key frames forced when we have reached
951 // the maximum key frame interval. Here force the Q to a range
952 // based on the ambient Q to reduce the risk of popping.
953 if (rc->this_key_frame_forced) {
954 double last_boosted_q;
955 int delta_qindex;
956 int qindex;
957
958 if (cpi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700959 qindex = AOMMIN(rc->last_kf_qindex, rc->last_boosted_qindex);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700960 active_best_quality = qindex;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700961 last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700962 delta_qindex = av1_compute_qdelta(rc, last_boosted_q,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700963 last_boosted_q * 1.25, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700964 active_worst_quality =
Yaowu Xuf883b422016-08-30 14:01:10 -0700965 AOMMIN(qindex + delta_qindex, active_worst_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700966 } else {
967 qindex = rc->last_boosted_qindex;
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700968 last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700969 delta_qindex = av1_compute_qdelta(rc, last_boosted_q,
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700970 last_boosted_q * 0.75, bit_depth);
Yaowu Xuf883b422016-08-30 14:01:10 -0700971 active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700972 }
973 } else {
974 // Not forced keyframe.
975 double q_adj_factor = 1.0;
976 double q_val;
977
978 // Baseline value derived from cpi->active_worst_quality and kf boost.
979 active_best_quality =
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700980 get_kf_active_quality(rc, active_worst_quality, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700981
982 // Allow somewhat lower kf minq with small image formats.
Debargha Mukherjee7166f222017-09-05 21:32:42 -0700983 if ((width * height) <= (352 * 288)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700984 q_adj_factor -= 0.25;
985 }
986
987 // Make a further adjustment based on the kf zero motion measure.
988 q_adj_factor += 0.05 - (0.001 * (double)cpi->twopass.kf_zeromotion_pct);
989
990 // Convert the adjustment factor to a qindex delta
991 // on active_best_quality.
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700992 q_val = av1_convert_qindex_to_q(active_best_quality, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700993 active_best_quality +=
Urvang Joshi20cf30e2018-07-19 02:33:58 -0700994 av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700995 }
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200996 } else if (!rc->is_src_frame_alt_ref &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -0700997 (cpi->refresh_golden_frame || is_intrl_arf_boost ||
Sebastien Alaiwan365e6442017-10-16 11:35:00 +0200998 cpi->refresh_alt_ref_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700999 // Use the lower of active_worst_quality and recent
1000 // average Q as basis for GF/ARF best Q limit unless last frame was
1001 // a key frame.
1002 if (rc->frames_since_key > 1 &&
1003 rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) {
1004 q = rc->avg_frame_qindex[INTER_FRAME];
1005 } else {
1006 q = active_worst_quality;
1007 }
1008 // For constrained quality dont allow Q less than the cq level
Yaowu Xuf883b422016-08-30 14:01:10 -07001009 if (oxcf->rc_mode == AOM_CQ) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001010 if (q < cq_level) q = cq_level;
1011
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001012 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001013
1014 // Constrained quality use slightly lower active best.
1015 active_best_quality = active_best_quality * 15 / 16;
1016
Yaowu Xuf883b422016-08-30 14:01:10 -07001017 } else if (oxcf->rc_mode == AOM_Q) {
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001018 if (!cpi->refresh_alt_ref_frame && !is_intrl_arf_boost) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001019 active_best_quality = cq_level;
1020 } else {
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001021 if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
1022 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Wei-Ting Linebff3772018-07-24 11:59:40 -07001023 *arf_q = active_best_quality;
1024#if REDUCE_LAST_ALT_BOOST
1025 int min_boost =
1026 (get_gf_high_motion_quality(q, bit_depth) + active_best_quality) /
1027 2;
1028 int boost = min_boost - active_best_quality;
1029
1030 active_best_quality = min_boost - (int)(boost * rc->arf_boost_factor);
1031#endif
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001032 } else {
1033 active_best_quality = rc->arf_q;
1034 }
Wei-Ting Lin050fb022018-07-13 11:45:05 -07001035#if USE_SYMM_MULTI_LAYER
1036 if (cpi->new_bwdref_update_rule && is_intrl_arf_boost) {
1037 int this_height = gf_group->pyramid_level[gf_group->index];
1038 while (this_height < gf_group->pyramid_height) {
1039 active_best_quality = (active_best_quality + cq_level + 1) / 2;
1040 ++this_height;
1041 }
1042 } else {
1043#endif
1044 // Modify best quality for second level arfs. For mode AOM_Q this
1045 // becomes the baseline frame q.
1046 if (gf_group->rf_level[gf_group->index] == GF_ARF_LOW)
1047 active_best_quality = (active_best_quality + cq_level + 1) / 2;
1048#if USE_SYMM_MULTI_LAYER
1049 }
1050#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001051 }
1052 } else {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001053 active_best_quality = get_gf_active_quality(rc, q, bit_depth);
Wei-Ting Lin050fb022018-07-13 11:45:05 -07001054#if USE_SYMM_MULTI_LAYER
1055 if (cpi->new_bwdref_update_rule && is_intrl_arf_boost) {
1056 int this_height = gf_group->pyramid_level[gf_group->index];
1057 while (this_height < gf_group->pyramid_height) {
1058 active_best_quality =
1059 (active_best_quality + active_worst_quality + 1) / 2;
1060 ++this_height;
1061 }
1062 }
1063#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001064 }
1065 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001066 if (oxcf->rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001067 active_best_quality = cq_level;
1068 } else {
1069 active_best_quality = inter_minq[active_worst_quality];
1070
1071 // For the constrained quality mode we don't want
1072 // q to fall below the cq level.
Yaowu Xuf883b422016-08-30 14:01:10 -07001073 if ((oxcf->rc_mode == AOM_CQ) && (active_best_quality < cq_level)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001074 active_best_quality = cq_level;
1075 }
1076 }
1077 }
1078
1079 // Extension to max or min Q if undershoot or overshoot is outside
1080 // the permitted range.
Debargha Mukherjeed0b9bf72018-05-14 17:45:27 +00001081 if ((cpi->oxcf.rc_mode != AOM_Q) &&
1082 (cpi->twopass.gf_zeromotion_pct < VLOW_MOTION_THRESHOLD)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001083 if (frame_is_intra_only(cm) ||
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02001084 (!rc->is_src_frame_alt_ref &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001085 (cpi->refresh_golden_frame || is_intrl_arf_boost ||
Sebastien Alaiwan365e6442017-10-16 11:35:00 +02001086 cpi->refresh_alt_ref_frame))) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001087 active_best_quality -=
1088 (cpi->twopass.extend_minq + cpi->twopass.extend_minq_fast);
1089 active_worst_quality += (cpi->twopass.extend_maxq / 2);
1090 } else {
1091 active_best_quality -=
1092 (cpi->twopass.extend_minq + cpi->twopass.extend_minq_fast) / 2;
1093 active_worst_quality += cpi->twopass.extend_maxq;
1094 }
1095 }
1096
Yaowu Xuf883b422016-08-30 14:01:10 -07001097 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07001098 // Static forced key frames Q restrictions dealt with elsewhere.
1099 if (!(frame_is_intra_only(cm)) || !rc->this_key_frame_forced ||
1100 (cpi->twopass.last_kfgroup_zeromotion_pct < STATIC_MOTION_THRESH)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001101 int qdelta = av1_frame_type_qdelta(cpi, gf_group->rf_level[gf_group->index],
1102 active_worst_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001103 active_worst_quality =
Yaowu Xuf883b422016-08-30 14:01:10 -07001104 AOMMAX(active_worst_quality + qdelta, active_best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001105 }
1106
1107 // Modify active_best_quality for downscaled normal frames.
Cheng Chen09c83a52018-06-05 12:27:36 -07001108 if (av1_frame_scaled(cm) && !frame_is_kf_gf_arf(cpi)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001109 int qdelta = av1_compute_qdelta_by_rate(
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001110 rc, cm->frame_type, active_best_quality, 2.0, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001111 active_best_quality =
Yaowu Xuf883b422016-08-30 14:01:10 -07001112 AOMMAX(active_best_quality + qdelta, rc->best_quality);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001113 }
1114
1115 active_best_quality =
1116 clamp(active_best_quality, rc->best_quality, rc->worst_quality);
1117 active_worst_quality =
1118 clamp(active_worst_quality, active_best_quality, rc->worst_quality);
1119
Yaowu Xuf883b422016-08-30 14:01:10 -07001120 if (oxcf->rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001121 q = active_best_quality;
1122 // Special case code to try and match quality with forced key frames.
1123 } else if (frame_is_intra_only(cm) && rc->this_key_frame_forced) {
1124 // If static since last kf use better of last boosted and last kf q.
1125 if (cpi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001126 q = AOMMIN(rc->last_kf_qindex, rc->last_boosted_qindex);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001127 } else {
1128 q = rc->last_boosted_qindex;
1129 }
1130 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001131 q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality,
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001132 active_worst_quality, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001133 if (q > active_worst_quality) {
1134 // Special case when we are targeting the max allowed rate.
1135 if (rc->this_frame_target >= rc->max_frame_bandwidth)
1136 active_worst_quality = q;
1137 else
1138 q = active_worst_quality;
1139 }
1140 }
1141 clamp(q, active_best_quality, active_worst_quality);
1142
1143 *top_index = active_worst_quality;
1144 *bottom_index = active_best_quality;
1145
1146 assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality);
1147 assert(*bottom_index <= rc->worst_quality &&
1148 *bottom_index >= rc->best_quality);
1149 assert(q <= rc->worst_quality && q >= rc->best_quality);
1150 return q;
1151}
1152
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001153int av1_rc_pick_q_and_bounds(AV1_COMP *cpi, int width, int height,
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001154 int *bottom_index, int *top_index) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001155 int q;
1156 if (cpi->oxcf.pass == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001157 if (cpi->oxcf.rc_mode == AOM_CBR)
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001158 q = rc_pick_q_and_bounds_one_pass_cbr(cpi, width, height, bottom_index,
1159 top_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001160 else
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001161 q = rc_pick_q_and_bounds_one_pass_vbr(cpi, width, height, bottom_index,
1162 top_index);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001163 } else {
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001164 assert(cpi->oxcf.pass == 2 && "invalid encode pass");
1165
1166 GF_GROUP *gf_group = &cpi->twopass.gf_group;
Wei-Ting Linebff3772018-07-24 11:59:40 -07001167 int arf_q = 0;
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001168
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001169 q = rc_pick_q_and_bounds_two_pass(cpi, width, height, bottom_index,
Wei-Ting Linebff3772018-07-24 11:59:40 -07001170 top_index, &arf_q);
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001171
1172 if (gf_group->update_type[gf_group->index] == ARF_UPDATE) {
Wei-Ting Linebff3772018-07-24 11:59:40 -07001173 cpi->rc.arf_q = arf_q;
Wei-Ting Lin422498b2018-07-20 17:00:09 -07001174 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001175 }
1176
1177 return q;
1178}
1179
Yaowu Xuf883b422016-08-30 14:01:10 -07001180void av1_rc_compute_frame_size_bounds(const AV1_COMP *cpi, int frame_target,
1181 int *frame_under_shoot_limit,
1182 int *frame_over_shoot_limit) {
1183 if (cpi->oxcf.rc_mode == AOM_Q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001184 *frame_under_shoot_limit = 0;
1185 *frame_over_shoot_limit = INT_MAX;
1186 } else {
1187 // For very small rate targets where the fractional adjustment
1188 // may be tiny make sure there is at least a minimum range.
1189 const int tolerance = (cpi->sf.recode_tolerance * frame_target) / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -07001190 *frame_under_shoot_limit = AOMMAX(frame_target - tolerance - 200, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001191 *frame_over_shoot_limit =
Yaowu Xuf883b422016-08-30 14:01:10 -07001192 AOMMIN(frame_target + tolerance + 200, cpi->rc.max_frame_bandwidth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001193 }
1194}
1195
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001196static void rc_set_frame_target(AV1_COMP *cpi, int target, int width,
1197 int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001198 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001199 RATE_CONTROL *const rc = &cpi->rc;
1200
1201 rc->this_frame_target = target;
1202
Fergus Simpsonfecb2ab2017-04-30 15:49:57 -07001203 // Modify frame size target when down-scaled.
Cheng Chen09c83a52018-06-05 12:27:36 -07001204 if (av1_frame_scaled(cm))
Fergus Simpsonddc846e2017-04-24 18:09:13 -07001205 rc->this_frame_target =
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001206 (int)(rc->this_frame_target * resize_rate_factor(cpi, width, height));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001207
1208 // Target rate per SB64 (including partial SB64s.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001209 rc->sb64_target_rate =
1210 (int)((int64_t)rc->this_frame_target * 64 * 64) / (width * height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001211}
1212
Yaowu Xuf883b422016-08-30 14:01:10 -07001213static void update_alt_ref_frame_stats(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001214 // this frame refreshes means next frames don't unless specified by user
1215 RATE_CONTROL *const rc = &cpi->rc;
1216 rc->frames_since_golden = 0;
1217
1218 // Mark the alt ref as done (setting to 0 means no further alt refs pending).
1219 rc->source_alt_ref_pending = 0;
1220
1221 // Set the alternate reference frame active flag
1222 rc->source_alt_ref_active = 1;
1223}
1224
Yaowu Xuf883b422016-08-30 14:01:10 -07001225static void update_golden_frame_stats(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001226 RATE_CONTROL *const rc = &cpi->rc;
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001227#if CUSTOMIZED_GF
1228 const TWO_PASS *const twopass = &cpi->twopass;
1229 const GF_GROUP *const gf_group = &twopass->gf_group;
1230 const int is_intrnl_arf =
1231 cpi->oxcf.pass == 2
1232 ? gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE
1233 : cpi->refresh_alt2_ref_frame;
1234#else
1235 const int is_intnl_arf = cpi->refresh_alt2_ref_frame;
1236#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001237
Yaowu Xuc27fc142016-08-22 16:08:15 -07001238 // Update the Golden frame usage counts.
1239 // NOTE(weitinglin): If we use show_existing_frame for an OVERLAY frame,
1240 // only the virtual indices for the reference frame will be
1241 // updated and cpi->refresh_golden_frame will still be zero.
1242 if (cpi->refresh_golden_frame || rc->is_src_frame_alt_ref) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001243 // We will not use internal overlay frames to replace the golden frame
1244 if (!rc->is_src_frame_ext_arf)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001245 // this frame refreshes means next frames don't unless specified by user
1246 rc->frames_since_golden = 0;
1247
1248 // If we are not using alt ref in the up and coming group clear the arf
1249 // active flag. In multi arf group case, if the index is not 0 then
1250 // we are overlaying a mid group arf so should not reset the flag.
1251 if (cpi->oxcf.pass == 2) {
1252 if (!rc->source_alt_ref_pending && (cpi->twopass.gf_group.index == 0))
1253 rc->source_alt_ref_active = 0;
1254 } else if (!rc->source_alt_ref_pending) {
1255 rc->source_alt_ref_active = 0;
1256 }
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001257 } else if (!cpi->refresh_alt_ref_frame && !is_intrnl_arf) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001258 rc->frames_since_golden++;
1259 }
1260}
1261
Yaowu Xuf883b422016-08-30 14:01:10 -07001262void av1_rc_postencode_update(AV1_COMP *cpi, uint64_t bytes_used) {
1263 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001264 RATE_CONTROL *const rc = &cpi->rc;
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001265#if CUSTOMIZED_GF
1266 const TWO_PASS *const twopass = &cpi->twopass;
1267 const GF_GROUP *const gf_group = &twopass->gf_group;
1268 const int is_intrnl_arf =
1269 cpi->oxcf.pass == 2
1270 ? gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE
1271 : cpi->refresh_alt2_ref_frame;
1272#else
1273 const int is_intrnl_arf = cpi->refresh_alt2_ref_frame;
1274#endif
1275
Yaowu Xuc27fc142016-08-22 16:08:15 -07001276 const int qindex = cm->base_qindex;
1277
1278 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cm->seg.enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001279 av1_cyclic_refresh_postencode(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001280 }
1281
1282 // Update rate control heuristics
1283 rc->projected_frame_size = (int)(bytes_used << 3);
1284
1285 // Post encode loop adjustment of Q prediction.
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001286 av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001287
1288 // Keep a record of last Q and ambient average Q.
1289 if (cm->frame_type == KEY_FRAME) {
1290 rc->last_q[KEY_FRAME] = qindex;
1291 rc->avg_frame_qindex[KEY_FRAME] =
1292 ROUND_POWER_OF_TWO(3 * rc->avg_frame_qindex[KEY_FRAME] + qindex, 2);
1293 } else {
1294 if (!rc->is_src_frame_alt_ref &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001295 !(cpi->refresh_golden_frame || is_intrnl_arf ||
Zoe Liue9b15e22017-07-19 15:53:01 -07001296 cpi->refresh_alt_ref_frame)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001297 rc->last_q[INTER_FRAME] = qindex;
1298 rc->avg_frame_qindex[INTER_FRAME] =
1299 ROUND_POWER_OF_TWO(3 * rc->avg_frame_qindex[INTER_FRAME] + qindex, 2);
1300 rc->ni_frames++;
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001301 rc->tot_q += av1_convert_qindex_to_q(qindex, cm->seq_params.bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001302 rc->avg_q = rc->tot_q / rc->ni_frames;
1303 // Calculate the average Q for normal inter frames (not key or GFU
1304 // frames).
1305 rc->ni_tot_qi += qindex;
1306 rc->ni_av_qi = rc->ni_tot_qi / rc->ni_frames;
1307 }
1308 }
1309
1310 // Keep record of last boosted (KF/GF/ARF) Q value.
1311 // If the current frame is coded at a lower Q then we also update it.
1312 // If all mbs in this group are skipped only update if the Q value is
1313 // better than that already stored.
1314 // This is used to help set quality in forced key frames to reduce popping
1315 if ((qindex < rc->last_boosted_qindex) || (cm->frame_type == KEY_FRAME) ||
1316 (!rc->constrained_gf_group &&
Wei-Ting Lin15a45882018-07-02 16:45:55 -07001317 (cpi->refresh_alt_ref_frame || is_intrnl_arf ||
Yaowu Xuc27fc142016-08-22 16:08:15 -07001318 (cpi->refresh_golden_frame && !rc->is_src_frame_alt_ref)))) {
1319 rc->last_boosted_qindex = qindex;
1320 }
1321 if (cm->frame_type == KEY_FRAME) rc->last_kf_qindex = qindex;
1322
1323 update_buffer_level(cpi, rc->projected_frame_size);
1324
1325 // Rolling monitors of whether we are over or underspending used to help
1326 // regulate min and Max Q in two pass.
Cheng Chen09c83a52018-06-05 12:27:36 -07001327 if (av1_frame_scaled(cm))
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001328 rc->this_frame_target =
1329 (int)(rc->this_frame_target /
1330 resize_rate_factor(cpi, cm->width, cm->height));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001331 if (cm->frame_type != KEY_FRAME) {
1332 rc->rolling_target_bits = ROUND_POWER_OF_TWO(
1333 rc->rolling_target_bits * 3 + rc->this_frame_target, 2);
1334 rc->rolling_actual_bits = ROUND_POWER_OF_TWO(
1335 rc->rolling_actual_bits * 3 + rc->projected_frame_size, 2);
1336 rc->long_rolling_target_bits = ROUND_POWER_OF_TWO(
1337 rc->long_rolling_target_bits * 31 + rc->this_frame_target, 5);
1338 rc->long_rolling_actual_bits = ROUND_POWER_OF_TWO(
1339 rc->long_rolling_actual_bits * 31 + rc->projected_frame_size, 5);
1340 }
1341
1342 // Actual bits spent
1343 rc->total_actual_bits += rc->projected_frame_size;
Zoe Liue9b15e22017-07-19 15:53:01 -07001344 // TODO(zoeliu): To investigate whether we should treat BWDREF_FRAME
1345 // differently here for rc->avg_frame_bandwidth.
Yaowu Xuc27fc142016-08-22 16:08:15 -07001346 rc->total_target_bits +=
1347 (cm->show_frame || rc->is_bwd_ref_frame) ? rc->avg_frame_bandwidth : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001348
1349 rc->total_target_vs_actual = rc->total_actual_bits - rc->total_target_bits;
1350
1351 if (is_altref_enabled(cpi) && cpi->refresh_alt_ref_frame &&
1352 (cm->frame_type != KEY_FRAME))
1353 // Update the alternate reference frame stats as appropriate.
1354 update_alt_ref_frame_stats(cpi);
1355 else
1356 // Update the Golden frame stats as appropriate.
1357 update_golden_frame_stats(cpi);
1358
1359 if (cm->frame_type == KEY_FRAME) rc->frames_since_key = 0;
1360
Zoe Liue9b15e22017-07-19 15:53:01 -07001361 // TODO(zoeliu): To investigate whether we should treat BWDREF_FRAME
1362 // differently here for rc->avg_frame_bandwidth.
Yaowu Xuc27fc142016-08-22 16:08:15 -07001363 if (cm->show_frame || rc->is_bwd_ref_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001364 rc->frames_since_key++;
1365 rc->frames_to_key--;
1366 }
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001367 // if (cm->current_video_frame == 1 && cm->show_frame)
1368 /*
1369 rc->this_frame_target =
1370 (int)(rc->this_frame_target / resize_rate_factor(cpi, cm->width,
1371 cm->height));
1372 */
Yaowu Xuc27fc142016-08-22 16:08:15 -07001373}
1374
Yaowu Xuf883b422016-08-30 14:01:10 -07001375void av1_rc_postencode_update_drop_frame(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001376 // Update buffer level with zero size, update frame counters, and return.
1377 update_buffer_level(cpi, 0);
1378 cpi->rc.frames_since_key++;
1379 cpi->rc.frames_to_key--;
1380 cpi->rc.rc_2_frame = 0;
1381 cpi->rc.rc_1_frame = 0;
1382}
1383
1384// Use this macro to turn on/off use of alt-refs in one-pass mode.
1385#define USE_ALTREF_FOR_ONE_PASS 1
1386
Yaowu Xuf883b422016-08-30 14:01:10 -07001387static int calc_pframe_target_size_one_pass_vbr(const AV1_COMP *const cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001388 static const int af_ratio = 10;
1389 const RATE_CONTROL *const rc = &cpi->rc;
1390 int target;
1391#if USE_ALTREF_FOR_ONE_PASS
1392 target =
1393 (!rc->is_src_frame_alt_ref &&
1394 (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame))
1395 ? (rc->avg_frame_bandwidth * rc->baseline_gf_interval * af_ratio) /
1396 (rc->baseline_gf_interval + af_ratio - 1)
1397 : (rc->avg_frame_bandwidth * rc->baseline_gf_interval) /
1398 (rc->baseline_gf_interval + af_ratio - 1);
1399#else
1400 target = rc->avg_frame_bandwidth;
1401#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001402 return av1_rc_clamp_pframe_target_size(cpi, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001403}
1404
Yaowu Xuf883b422016-08-30 14:01:10 -07001405static int calc_iframe_target_size_one_pass_vbr(const AV1_COMP *const cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001406 static const int kf_ratio = 25;
1407 const RATE_CONTROL *rc = &cpi->rc;
1408 const int target = rc->avg_frame_bandwidth * kf_ratio;
Yaowu Xuf883b422016-08-30 14:01:10 -07001409 return av1_rc_clamp_iframe_target_size(cpi, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001410}
1411
Yaowu Xuf883b422016-08-30 14:01:10 -07001412void av1_rc_get_one_pass_vbr_params(AV1_COMP *cpi) {
1413 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001414 RATE_CONTROL *const rc = &cpi->rc;
1415 int target;
Tarek AMARAc9813852018-03-05 18:40:18 -05001416 int altref_enabled = is_altref_enabled(cpi);
1417 int sframe_dist = cpi->oxcf.sframe_dist;
1418 int sframe_mode = cpi->oxcf.sframe_mode;
1419 int sframe_enabled = cpi->oxcf.sframe_enabled;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001420 // TODO(yaowu): replace the "auto_key && 0" below with proper decision logic.
1421 if (!cpi->refresh_alt_ref_frame &&
1422 (cm->current_video_frame == 0 || (cpi->frame_flags & FRAMEFLAGS_KEY) ||
1423 rc->frames_to_key == 0 || (cpi->oxcf.auto_key && 0))) {
1424 cm->frame_type = KEY_FRAME;
1425 rc->this_key_frame_forced =
1426 cm->current_video_frame != 0 && rc->frames_to_key == 0;
1427 rc->frames_to_key = cpi->oxcf.key_freq;
1428 rc->kf_boost = DEFAULT_KF_BOOST;
1429 rc->source_alt_ref_active = 0;
1430 } else {
1431 cm->frame_type = INTER_FRAME;
Tarek AMARAc9813852018-03-05 18:40:18 -05001432 if (sframe_enabled) {
1433 if (altref_enabled) {
1434 if (sframe_mode == 1) {
1435 // sframe_mode == 1: insert sframe if it matches altref frame.
1436
1437 if (cm->current_video_frame % sframe_dist == 0 &&
1438 cm->frame_type != KEY_FRAME && cm->current_video_frame != 0 &&
1439 cpi->refresh_alt_ref_frame) {
1440 cm->frame_type = S_FRAME;
1441 }
1442 } else {
1443 // sframe_mode != 1: if sframe will be inserted at the next available
1444 // altref frame
1445
1446 if (cm->current_video_frame % sframe_dist == 0 &&
1447 cm->frame_type != KEY_FRAME && cm->current_video_frame != 0) {
1448 rc->sframe_due = 1;
1449 }
1450
1451 if (rc->sframe_due && cpi->refresh_alt_ref_frame) {
1452 cm->frame_type = S_FRAME;
1453 rc->sframe_due = 0;
1454 }
1455 }
1456 } else {
1457 if (cm->current_video_frame % sframe_dist == 0 &&
1458 cm->frame_type != KEY_FRAME && cm->current_video_frame != 0) {
1459 cm->frame_type = S_FRAME;
1460 }
1461 }
1462 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001463 }
1464 if (rc->frames_till_gf_update_due == 0) {
1465 rc->baseline_gf_interval = (rc->min_gf_interval + rc->max_gf_interval) / 2;
1466 rc->frames_till_gf_update_due = rc->baseline_gf_interval;
1467 // NOTE: frames_till_gf_update_due must be <= frames_to_key.
1468 if (rc->frames_till_gf_update_due > rc->frames_to_key) {
1469 rc->frames_till_gf_update_due = rc->frames_to_key;
1470 rc->constrained_gf_group = 1;
1471 } else {
1472 rc->constrained_gf_group = 0;
1473 }
1474 cpi->refresh_golden_frame = 1;
1475 rc->source_alt_ref_pending = USE_ALTREF_FOR_ONE_PASS;
1476 rc->gfu_boost = DEFAULT_GF_BOOST;
1477 }
Yushin Chof16b1ad2018-01-29 17:19:58 -08001478
1479 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
1480 av1_cyclic_refresh_update_parameters(cpi);
1481
Yaowu Xuc27fc142016-08-22 16:08:15 -07001482 if (cm->frame_type == KEY_FRAME)
1483 target = calc_iframe_target_size_one_pass_vbr(cpi);
1484 else
1485 target = calc_pframe_target_size_one_pass_vbr(cpi);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001486 rc_set_frame_target(cpi, target, cm->width, cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001487}
1488
Yaowu Xuf883b422016-08-30 14:01:10 -07001489static int calc_pframe_target_size_one_pass_cbr(const AV1_COMP *cpi) {
1490 const AV1EncoderConfig *oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001491 const RATE_CONTROL *rc = &cpi->rc;
1492 const int64_t diff = rc->optimal_buffer_level - rc->buffer_level;
1493 const int64_t one_pct_bits = 1 + rc->optimal_buffer_level / 100;
1494 int min_frame_target =
Yaowu Xuf883b422016-08-30 14:01:10 -07001495 AOMMAX(rc->avg_frame_bandwidth >> 4, FRAME_OVERHEAD_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001496 int target;
1497
1498 if (oxcf->gf_cbr_boost_pct) {
1499 const int af_ratio_pct = oxcf->gf_cbr_boost_pct + 100;
1500 target = cpi->refresh_golden_frame
1501 ? (rc->avg_frame_bandwidth * rc->baseline_gf_interval *
1502 af_ratio_pct) /
1503 (rc->baseline_gf_interval * 100 + af_ratio_pct - 100)
1504 : (rc->avg_frame_bandwidth * rc->baseline_gf_interval * 100) /
1505 (rc->baseline_gf_interval * 100 + af_ratio_pct - 100);
1506 } else {
1507 target = rc->avg_frame_bandwidth;
1508 }
1509
1510 if (diff > 0) {
1511 // Lower the target bandwidth for this frame.
Yaowu Xuf883b422016-08-30 14:01:10 -07001512 const int pct_low = (int)AOMMIN(diff / one_pct_bits, oxcf->under_shoot_pct);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001513 target -= (target * pct_low) / 200;
1514 } else if (diff < 0) {
1515 // Increase the target bandwidth for this frame.
1516 const int pct_high =
Yaowu Xuf883b422016-08-30 14:01:10 -07001517 (int)AOMMIN(-diff / one_pct_bits, oxcf->over_shoot_pct);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001518 target += (target * pct_high) / 200;
1519 }
1520 if (oxcf->rc_max_inter_bitrate_pct) {
1521 const int max_rate =
1522 rc->avg_frame_bandwidth * oxcf->rc_max_inter_bitrate_pct / 100;
Yaowu Xuf883b422016-08-30 14:01:10 -07001523 target = AOMMIN(target, max_rate);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001524 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001525 return AOMMAX(min_frame_target, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001526}
1527
Yaowu Xuf883b422016-08-30 14:01:10 -07001528static int calc_iframe_target_size_one_pass_cbr(const AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001529 const RATE_CONTROL *rc = &cpi->rc;
1530 int target;
1531 if (cpi->common.current_video_frame == 0) {
1532 target = ((rc->starting_buffer_level / 2) > INT_MAX)
1533 ? INT_MAX
1534 : (int)(rc->starting_buffer_level / 2);
1535 } else {
1536 int kf_boost = 32;
1537 double framerate = cpi->framerate;
1538
Yaowu Xuf883b422016-08-30 14:01:10 -07001539 kf_boost = AOMMAX(kf_boost, (int)(2 * framerate - 16));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001540 if (rc->frames_since_key < framerate / 2) {
1541 kf_boost = (int)(kf_boost * rc->frames_since_key / (framerate / 2));
1542 }
1543 target = ((16 + kf_boost) * rc->avg_frame_bandwidth) >> 4;
1544 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001545 return av1_rc_clamp_iframe_target_size(cpi, target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001546}
1547
Yaowu Xuf883b422016-08-30 14:01:10 -07001548void av1_rc_get_one_pass_cbr_params(AV1_COMP *cpi) {
1549 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001550 RATE_CONTROL *const rc = &cpi->rc;
1551 int target;
1552 // TODO(yaowu): replace the "auto_key && 0" below with proper decision logic.
1553 if ((cm->current_video_frame == 0 || (cpi->frame_flags & FRAMEFLAGS_KEY) ||
1554 rc->frames_to_key == 0 || (cpi->oxcf.auto_key && 0))) {
1555 cm->frame_type = KEY_FRAME;
1556 rc->this_key_frame_forced =
1557 cm->current_video_frame != 0 && rc->frames_to_key == 0;
1558 rc->frames_to_key = cpi->oxcf.key_freq;
1559 rc->kf_boost = DEFAULT_KF_BOOST;
1560 rc->source_alt_ref_active = 0;
1561 } else {
1562 cm->frame_type = INTER_FRAME;
1563 }
1564 if (rc->frames_till_gf_update_due == 0) {
1565 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
Yaowu Xuf883b422016-08-30 14:01:10 -07001566 av1_cyclic_refresh_set_golden_update(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001567 else
1568 rc->baseline_gf_interval =
1569 (rc->min_gf_interval + rc->max_gf_interval) / 2;
1570 rc->frames_till_gf_update_due = rc->baseline_gf_interval;
1571 // NOTE: frames_till_gf_update_due must be <= frames_to_key.
1572 if (rc->frames_till_gf_update_due > rc->frames_to_key)
1573 rc->frames_till_gf_update_due = rc->frames_to_key;
1574 cpi->refresh_golden_frame = 1;
1575 rc->gfu_boost = DEFAULT_GF_BOOST;
1576 }
1577
1578 // Any update/change of global cyclic refresh parameters (amount/delta-qp)
1579 // should be done here, before the frame qp is selected.
1580 if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ)
Yaowu Xuf883b422016-08-30 14:01:10 -07001581 av1_cyclic_refresh_update_parameters(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001582
1583 if (cm->frame_type == KEY_FRAME)
1584 target = calc_iframe_target_size_one_pass_cbr(cpi);
1585 else
1586 target = calc_pframe_target_size_one_pass_cbr(cpi);
1587
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001588 rc_set_frame_target(cpi, target, cm->width, cm->height);
Fergus Simpson0757fd82017-04-28 20:14:27 -07001589 // TODO(afergs): Decide whether to scale up, down, or not at all
Yaowu Xuc27fc142016-08-22 16:08:15 -07001590}
1591
Yaowu Xuf883b422016-08-30 14:01:10 -07001592int av1_compute_qdelta(const RATE_CONTROL *rc, double qstart, double qtarget,
1593 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001594 int start_index = rc->worst_quality;
1595 int target_index = rc->worst_quality;
1596 int i;
1597
1598 // Convert the average q value to an index.
1599 for (i = rc->best_quality; i < rc->worst_quality; ++i) {
1600 start_index = i;
Yaowu Xuf883b422016-08-30 14:01:10 -07001601 if (av1_convert_qindex_to_q(i, bit_depth) >= qstart) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001602 }
1603
1604 // Convert the q target to an index
1605 for (i = rc->best_quality; i < rc->worst_quality; ++i) {
1606 target_index = i;
Yaowu Xuf883b422016-08-30 14:01:10 -07001607 if (av1_convert_qindex_to_q(i, bit_depth) >= qtarget) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001608 }
1609
1610 return target_index - start_index;
1611}
1612
Yaowu Xuf883b422016-08-30 14:01:10 -07001613int av1_compute_qdelta_by_rate(const RATE_CONTROL *rc, FRAME_TYPE frame_type,
1614 int qindex, double rate_target_ratio,
1615 aom_bit_depth_t bit_depth) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001616 int target_index = rc->worst_quality;
1617 int i;
1618
1619 // Look up the current projected bits per block for the base index
1620 const int base_bits_per_mb =
Yaowu Xuf883b422016-08-30 14:01:10 -07001621 av1_rc_bits_per_mb(frame_type, qindex, 1.0, bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001622
1623 // Find the target bits per mb based on the base value and given ratio.
1624 const int target_bits_per_mb = (int)(rate_target_ratio * base_bits_per_mb);
1625
1626 // Convert the q target to an index
1627 for (i = rc->best_quality; i < rc->worst_quality; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001628 if (av1_rc_bits_per_mb(frame_type, i, 1.0, bit_depth) <=
Yaowu Xuc27fc142016-08-22 16:08:15 -07001629 target_bits_per_mb) {
1630 target_index = i;
1631 break;
1632 }
1633 }
1634 return target_index - qindex;
1635}
1636
Yaowu Xuf883b422016-08-30 14:01:10 -07001637void av1_rc_set_gf_interval_range(const AV1_COMP *const cpi,
1638 RATE_CONTROL *const rc) {
1639 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001640
1641 // Special case code for 1 pass fixed Q mode tests
Yaowu Xuf883b422016-08-30 14:01:10 -07001642 if ((oxcf->pass == 0) && (oxcf->rc_mode == AOM_Q)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001643 rc->max_gf_interval = FIXED_GF_INTERVAL;
1644 rc->min_gf_interval = FIXED_GF_INTERVAL;
1645 rc->static_scene_max_gf_interval = FIXED_GF_INTERVAL;
1646 } else {
1647 // Set Maximum gf/arf interval
1648 rc->max_gf_interval = oxcf->max_gf_interval;
1649 rc->min_gf_interval = oxcf->min_gf_interval;
1650 if (rc->min_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001651 rc->min_gf_interval = av1_rc_get_default_min_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001652 oxcf->width, oxcf->height, cpi->framerate);
1653 if (rc->max_gf_interval == 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001654 rc->max_gf_interval = av1_rc_get_default_max_gf_interval(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001655 cpi->framerate, rc->min_gf_interval);
1656
1657 // Extended interval for genuinely static scenes
1658 rc->static_scene_max_gf_interval = MAX_LAG_BUFFERS * 2;
1659
1660 if (is_altref_enabled(cpi)) {
1661 if (rc->static_scene_max_gf_interval > oxcf->lag_in_frames - 1)
1662 rc->static_scene_max_gf_interval = oxcf->lag_in_frames - 1;
1663 }
1664
1665 if (rc->max_gf_interval > rc->static_scene_max_gf_interval)
1666 rc->max_gf_interval = rc->static_scene_max_gf_interval;
1667
Wei-Ting Lin886b6722018-07-02 12:20:20 -07001668#if FIX_GF_INTERVAL_LENGTH
1669 rc->max_gf_interval = FIXED_GF_LENGTH + 1;
1670#endif
1671
Yaowu Xuc27fc142016-08-22 16:08:15 -07001672 // Clamp min to max
Yaowu Xuf883b422016-08-30 14:01:10 -07001673 rc->min_gf_interval = AOMMIN(rc->min_gf_interval, rc->max_gf_interval);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001674 }
1675}
1676
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001677void av1_rc_update_framerate(AV1_COMP *cpi, int width, int height) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001678 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001679 RATE_CONTROL *const rc = &cpi->rc;
1680 int vbr_max_bits;
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001681 const int MBs = av1_get_MBs(width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001682
1683 rc->avg_frame_bandwidth = (int)(oxcf->target_bandwidth / cpi->framerate);
1684 rc->min_frame_bandwidth =
1685 (int)(rc->avg_frame_bandwidth * oxcf->two_pass_vbrmin_section / 100);
1686
1687 rc->min_frame_bandwidth =
Yaowu Xuf883b422016-08-30 14:01:10 -07001688 AOMMAX(rc->min_frame_bandwidth, FRAME_OVERHEAD_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001689
1690 // A maximum bitrate for a frame is defined.
1691 // The baseline for this aligns with HW implementations that
1692 // can support decode of 1080P content up to a bitrate of MAX_MB_RATE bits
1693 // per 16x16 MB (averaged over a frame). However this limit is extended if
1694 // a very high rate is given on the command line or the the rate cannnot
1695 // be acheived because of a user specificed max q (e.g. when the user
1696 // specifies lossless encode.
1697 vbr_max_bits =
1698 (int)(((int64_t)rc->avg_frame_bandwidth * oxcf->two_pass_vbrmax_section) /
1699 100);
1700 rc->max_frame_bandwidth =
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001701 AOMMAX(AOMMAX((MBs * MAX_MB_RATE), MAXRATE_1080P), vbr_max_bits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001702
Yaowu Xuf883b422016-08-30 14:01:10 -07001703 av1_rc_set_gf_interval_range(cpi, rc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001704}
1705
1706#define VBR_PCT_ADJUSTMENT_LIMIT 50
1707// For VBR...adjustment to the frame target based on error from previous frames
Yaowu Xuf883b422016-08-30 14:01:10 -07001708static void vbr_rate_correction(AV1_COMP *cpi, int *this_frame_target) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001709 RATE_CONTROL *const rc = &cpi->rc;
1710 int64_t vbr_bits_off_target = rc->vbr_bits_off_target;
1711 int max_delta;
1712 double position_factor = 1.0;
1713
1714 // How far through the clip are we.
1715 // This number is used to damp the per frame rate correction.
1716 // Range 0 - 1.0
1717 if (cpi->twopass.total_stats.count != 0.) {
1718 position_factor = sqrt((double)cpi->common.current_video_frame /
1719 cpi->twopass.total_stats.count);
1720 }
1721 max_delta = (int)(position_factor *
1722 ((*this_frame_target * VBR_PCT_ADJUSTMENT_LIMIT) / 100));
1723
1724 // vbr_bits_off_target > 0 means we have extra bits to spend
1725 if (vbr_bits_off_target > 0) {
1726 *this_frame_target += (vbr_bits_off_target > max_delta)
1727 ? max_delta
1728 : (int)vbr_bits_off_target;
1729 } else {
1730 *this_frame_target -= (vbr_bits_off_target < -max_delta)
1731 ? max_delta
1732 : (int)-vbr_bits_off_target;
1733 }
1734
1735 // Fast redistribution of bits arising from massive local undershoot.
1736 // Dont do it for kf,arf,gf or overlay frames.
1737 if (!frame_is_kf_gf_arf(cpi) && !rc->is_src_frame_alt_ref &&
1738 rc->vbr_bits_off_target_fast) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001739 int one_frame_bits = AOMMAX(rc->avg_frame_bandwidth, *this_frame_target);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001740 int fast_extra_bits;
Yaowu Xuf883b422016-08-30 14:01:10 -07001741 fast_extra_bits = (int)AOMMIN(rc->vbr_bits_off_target_fast, one_frame_bits);
1742 fast_extra_bits = (int)AOMMIN(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001743 fast_extra_bits,
Yaowu Xuf883b422016-08-30 14:01:10 -07001744 AOMMAX(one_frame_bits / 8, rc->vbr_bits_off_target_fast / 8));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001745 *this_frame_target += (int)fast_extra_bits;
1746 rc->vbr_bits_off_target_fast -= fast_extra_bits;
1747 }
1748}
1749
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001750void av1_set_target_rate(AV1_COMP *cpi, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001751 RATE_CONTROL *const rc = &cpi->rc;
1752 int target_rate = rc->base_frame_target;
1753
1754 // Correction to rate target based on prior over or under shoot.
Yaowu Xuf883b422016-08-30 14:01:10 -07001755 if (cpi->oxcf.rc_mode == AOM_VBR || cpi->oxcf.rc_mode == AOM_CQ)
Yaowu Xuc27fc142016-08-22 16:08:15 -07001756 vbr_rate_correction(cpi, &target_rate);
Debargha Mukherjee7166f222017-09-05 21:32:42 -07001757 rc_set_frame_target(cpi, target_rate, width, height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001758}