Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1 | /* |
Yaowu Xu | 2ab7ff0 | 2016-09-02 12:04:54 -0700 | [diff] [blame] | 2 | * Copyright (c) 2016, Alliance for Open Media. All rights reserved |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3 | * |
Yaowu Xu | 2ab7ff0 | 2016-09-02 12:04:54 -0700 | [diff] [blame] | 4 | * This source code is subject to the terms of the BSD 2 Clause License and |
| 5 | * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License |
| 6 | * was not distributed with this source code in the LICENSE file, you can |
| 7 | * obtain it at www.aomedia.org/license/software. If the Alliance for Open |
| 8 | * Media Patent License 1.0 was not distributed with this source code in the |
| 9 | * PATENTS file, you can obtain it at www.aomedia.org/license/patent. |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 10 | */ |
| 11 | |
| 12 | #include <assert.h> |
| 13 | #include <limits.h> |
| 14 | #include <math.h> |
| 15 | #include <stdio.h> |
| 16 | #include <stdlib.h> |
| 17 | #include <string.h> |
| 18 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 19 | #include "aom_dsp/aom_dsp_common.h" |
| 20 | #include "aom_mem/aom_mem.h" |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 21 | #include "aom_ports/mem.h" |
| 22 | #include "aom_ports/system_state.h" |
| 23 | |
| 24 | #include "av1/common/alloccommon.h" |
| 25 | #include "av1/encoder/aq_cyclicrefresh.h" |
| 26 | #include "av1/common/common.h" |
| 27 | #include "av1/common/entropymode.h" |
| 28 | #include "av1/common/quant_common.h" |
| 29 | #include "av1/common/seg_common.h" |
| 30 | |
| 31 | #include "av1/encoder/encodemv.h" |
Alex Converse | 9d068c1 | 2017-08-03 11:48:19 -0700 | [diff] [blame] | 32 | #include "av1/encoder/random.h" |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 33 | #include "av1/encoder/ratectrl.h" |
| 34 | |
| 35 | // Max rate target for 1080P and below encodes under normal circumstances |
| 36 | // (1920 * 1080 / (16 * 16)) * MAX_MB_RATE bits per MB |
| 37 | #define MAX_MB_RATE 250 |
| 38 | #define MAXRATE_1080P 2025000 |
| 39 | |
| 40 | #define DEFAULT_KF_BOOST 2000 |
| 41 | #define DEFAULT_GF_BOOST 2000 |
| 42 | |
| 43 | #define MIN_BPB_FACTOR 0.005 |
| 44 | #define MAX_BPB_FACTOR 50 |
| 45 | |
| 46 | #define FRAME_OVERHEAD_BITS 200 |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 47 | #define ASSIGN_MINQ_TABLE(bit_depth, name) \ |
| 48 | do { \ |
| 49 | switch (bit_depth) { \ |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 50 | case AOM_BITS_8: name = name##_8; break; \ |
| 51 | case AOM_BITS_10: name = name##_10; break; \ |
| 52 | case AOM_BITS_12: name = name##_12; break; \ |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 53 | default: \ |
| 54 | assert(0 && \ |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 55 | "bit_depth should be AOM_BITS_8, AOM_BITS_10" \ |
| 56 | " or AOM_BITS_12"); \ |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 57 | name = NULL; \ |
| 58 | } \ |
| 59 | } while (0) |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 60 | |
| 61 | // Tables relating active max Q to active min Q |
| 62 | static int kf_low_motion_minq_8[QINDEX_RANGE]; |
| 63 | static int kf_high_motion_minq_8[QINDEX_RANGE]; |
| 64 | static int arfgf_low_motion_minq_8[QINDEX_RANGE]; |
| 65 | static int arfgf_high_motion_minq_8[QINDEX_RANGE]; |
| 66 | static int inter_minq_8[QINDEX_RANGE]; |
| 67 | static int rtc_minq_8[QINDEX_RANGE]; |
| 68 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 69 | static int kf_low_motion_minq_10[QINDEX_RANGE]; |
| 70 | static int kf_high_motion_minq_10[QINDEX_RANGE]; |
| 71 | static int arfgf_low_motion_minq_10[QINDEX_RANGE]; |
| 72 | static int arfgf_high_motion_minq_10[QINDEX_RANGE]; |
| 73 | static int inter_minq_10[QINDEX_RANGE]; |
| 74 | static int rtc_minq_10[QINDEX_RANGE]; |
| 75 | static int kf_low_motion_minq_12[QINDEX_RANGE]; |
| 76 | static int kf_high_motion_minq_12[QINDEX_RANGE]; |
| 77 | static int arfgf_low_motion_minq_12[QINDEX_RANGE]; |
| 78 | static int arfgf_high_motion_minq_12[QINDEX_RANGE]; |
| 79 | static int inter_minq_12[QINDEX_RANGE]; |
| 80 | static int rtc_minq_12[QINDEX_RANGE]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 81 | |
| 82 | static int gf_high = 2000; |
| 83 | static int gf_low = 400; |
| 84 | static int kf_high = 5000; |
| 85 | static int kf_low = 400; |
| 86 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 87 | // How many times less pixels there are to encode given the current scaling. |
| 88 | // Temporary replacement for rcf_mult and rate_thresh_mult. |
| 89 | static double resize_rate_factor(const AV1_COMP *cpi, int width, int height) { |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 90 | return (double)(cpi->oxcf.width * cpi->oxcf.height) / (width * height); |
Fergus Simpson | ddc846e | 2017-04-24 18:09:13 -0700 | [diff] [blame] | 91 | } |
| 92 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 93 | // Functions to compute the active minq lookup table entries based on a |
| 94 | // formulaic approach to facilitate easier adjustment of the Q tables. |
| 95 | // The formulae were derived from computing a 3rd order polynomial best |
| 96 | // fit to the original data (after plotting real maxq vs minq (not q index)) |
| 97 | static int get_minq_index(double maxq, double x3, double x2, double x1, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 98 | aom_bit_depth_t bit_depth) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 99 | int i; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 100 | const double minqtarget = AOMMIN(((x3 * maxq + x2) * maxq + x1) * maxq, maxq); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 101 | |
| 102 | // Special case handling to deal with the step from q2.0 |
| 103 | // down to lossless mode represented by q 1.0. |
| 104 | if (minqtarget <= 2.0) return 0; |
| 105 | |
| 106 | for (i = 0; i < QINDEX_RANGE; i++) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 107 | if (minqtarget <= av1_convert_qindex_to_q(i, bit_depth)) return i; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 108 | } |
| 109 | |
| 110 | return QINDEX_RANGE - 1; |
| 111 | } |
| 112 | |
| 113 | static void init_minq_luts(int *kf_low_m, int *kf_high_m, int *arfgf_low, |
| 114 | int *arfgf_high, int *inter, int *rtc, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 115 | aom_bit_depth_t bit_depth) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 116 | int i; |
| 117 | for (i = 0; i < QINDEX_RANGE; i++) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 118 | const double maxq = av1_convert_qindex_to_q(i, bit_depth); |
Debargha Mukherjee | d0b9bf7 | 2018-05-14 17:45:27 +0000 | [diff] [blame] | 119 | kf_low_m[i] = get_minq_index(maxq, 0.000001, -0.0004, 0.150, bit_depth); |
Debargha Mukherjee | 0de09a8 | 2018-08-25 19:03:38 -0700 | [diff] [blame] | 120 | kf_high_m[i] = get_minq_index(maxq, 0.0000021, -0.00125, 0.45, bit_depth); |
Debargha Mukherjee | d0b9bf7 | 2018-05-14 17:45:27 +0000 | [diff] [blame] | 121 | arfgf_low[i] = get_minq_index(maxq, 0.0000015, -0.0009, 0.30, bit_depth); |
| 122 | arfgf_high[i] = get_minq_index(maxq, 0.0000021, -0.00125, 0.55, bit_depth); |
Debargha Mukherjee | b3db206 | 2018-02-05 18:50:16 +0000 | [diff] [blame] | 123 | inter[i] = get_minq_index(maxq, 0.00000271, -0.00113, 0.90, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 124 | rtc[i] = get_minq_index(maxq, 0.00000271, -0.00113, 0.70, bit_depth); |
| 125 | } |
| 126 | } |
| 127 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 128 | void av1_rc_init_minq_luts(void) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 129 | init_minq_luts(kf_low_motion_minq_8, kf_high_motion_minq_8, |
| 130 | arfgf_low_motion_minq_8, arfgf_high_motion_minq_8, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 131 | inter_minq_8, rtc_minq_8, AOM_BITS_8); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 132 | init_minq_luts(kf_low_motion_minq_10, kf_high_motion_minq_10, |
| 133 | arfgf_low_motion_minq_10, arfgf_high_motion_minq_10, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 134 | inter_minq_10, rtc_minq_10, AOM_BITS_10); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 135 | init_minq_luts(kf_low_motion_minq_12, kf_high_motion_minq_12, |
| 136 | arfgf_low_motion_minq_12, arfgf_high_motion_minq_12, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 137 | inter_minq_12, rtc_minq_12, AOM_BITS_12); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 138 | } |
| 139 | |
| 140 | // These functions use formulaic calculations to make playing with the |
| 141 | // quantizer tables easier. If necessary they can be replaced by lookup |
| 142 | // tables if and when things settle down in the experimental bitstream |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 143 | double av1_convert_qindex_to_q(int qindex, aom_bit_depth_t bit_depth) { |
Yaowu Xu | d3e7c68 | 2017-12-21 14:08:25 -0800 | [diff] [blame] | 144 | // Convert the index to a real Q value (scaled down to match old Q values) |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 145 | switch (bit_depth) { |
Monty Montgomery | 60f2a22 | 2017-11-01 19:48:38 -0400 | [diff] [blame] | 146 | case AOM_BITS_8: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 4.0; |
| 147 | case AOM_BITS_10: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 16.0; |
| 148 | case AOM_BITS_12: return av1_ac_quant_Q3(qindex, 0, bit_depth) / 64.0; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 149 | default: |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 150 | assert(0 && "bit_depth should be AOM_BITS_8, AOM_BITS_10 or AOM_BITS_12"); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 151 | return -1.0; |
| 152 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 153 | } |
| 154 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 155 | int av1_rc_bits_per_mb(FRAME_TYPE frame_type, int qindex, |
| 156 | double correction_factor, aom_bit_depth_t bit_depth) { |
| 157 | const double q = av1_convert_qindex_to_q(qindex, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 158 | int enumerator = frame_type == KEY_FRAME ? 2700000 : 1800000; |
| 159 | |
| 160 | assert(correction_factor <= MAX_BPB_FACTOR && |
| 161 | correction_factor >= MIN_BPB_FACTOR); |
| 162 | |
| 163 | // q based adjustment to baseline enumerator |
| 164 | enumerator += (int)(enumerator * q) >> 12; |
| 165 | return (int)(enumerator * correction_factor / q); |
| 166 | } |
| 167 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 168 | int av1_estimate_bits_at_q(FRAME_TYPE frame_type, int q, int mbs, |
| 169 | double correction_factor, |
| 170 | aom_bit_depth_t bit_depth) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 171 | const int bpm = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 172 | (int)(av1_rc_bits_per_mb(frame_type, q, correction_factor, bit_depth)); |
| 173 | return AOMMAX(FRAME_OVERHEAD_BITS, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 174 | (int)((uint64_t)bpm * mbs) >> BPER_MB_NORMBITS); |
| 175 | } |
| 176 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 177 | int av1_rc_clamp_pframe_target_size(const AV1_COMP *const cpi, int target) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 178 | const RATE_CONTROL *rc = &cpi->rc; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 179 | const AV1EncoderConfig *oxcf = &cpi->oxcf; |
Debargha Mukherjee | d0b9bf7 | 2018-05-14 17:45:27 +0000 | [diff] [blame] | 180 | const int min_frame_target = |
| 181 | AOMMAX(rc->min_frame_bandwidth, rc->avg_frame_bandwidth >> 5); |
| 182 | // Clip the frame target to the minimum setup value. |
| 183 | if (cpi->rc.is_src_frame_alt_ref) { |
| 184 | // If there is an active ARF at this location use the minimum |
| 185 | // bits on this frame even if it is a constructed arf. |
| 186 | // The active maximum quantizer insures that an appropriate |
| 187 | // number of bits will be spent if needed for constructed ARFs. |
| 188 | target = min_frame_target; |
| 189 | } else if (target < min_frame_target) { |
| 190 | target = min_frame_target; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 191 | } |
| 192 | |
| 193 | // Clip the frame target to the maximum allowed value. |
| 194 | if (target > rc->max_frame_bandwidth) target = rc->max_frame_bandwidth; |
| 195 | if (oxcf->rc_max_inter_bitrate_pct) { |
| 196 | const int max_rate = |
| 197 | rc->avg_frame_bandwidth * oxcf->rc_max_inter_bitrate_pct / 100; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 198 | target = AOMMIN(target, max_rate); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 199 | } |
| 200 | |
| 201 | return target; |
| 202 | } |
| 203 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 204 | int av1_rc_clamp_iframe_target_size(const AV1_COMP *const cpi, int target) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 205 | const RATE_CONTROL *rc = &cpi->rc; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 206 | const AV1EncoderConfig *oxcf = &cpi->oxcf; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 207 | if (oxcf->rc_max_intra_bitrate_pct) { |
| 208 | const int max_rate = |
| 209 | rc->avg_frame_bandwidth * oxcf->rc_max_intra_bitrate_pct / 100; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 210 | target = AOMMIN(target, max_rate); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 211 | } |
| 212 | if (target > rc->max_frame_bandwidth) target = rc->max_frame_bandwidth; |
| 213 | return target; |
| 214 | } |
| 215 | |
| 216 | // Update the buffer level: leaky bucket model. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 217 | static void update_buffer_level(AV1_COMP *cpi, int encoded_frame_size) { |
| 218 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 219 | RATE_CONTROL *const rc = &cpi->rc; |
| 220 | |
Sebastien Alaiwan | 365e644 | 2017-10-16 11:35:00 +0200 | [diff] [blame] | 221 | // Non-viewable frames are a special case and are treated as pure overhead. |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 222 | // TODO(zoeliu): To further explore whether we should treat BWDREF_FRAME |
| 223 | // differently, since it is a no-show frame. |
| 224 | if (!cm->show_frame && !rc->is_bwd_ref_frame) |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 225 | rc->bits_off_target -= encoded_frame_size; |
| 226 | else |
| 227 | rc->bits_off_target += rc->avg_frame_bandwidth - encoded_frame_size; |
| 228 | |
| 229 | // Clip the buffer level to the maximum specified buffer size. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 230 | rc->bits_off_target = AOMMIN(rc->bits_off_target, rc->maximum_buffer_size); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 231 | rc->buffer_level = rc->bits_off_target; |
| 232 | } |
| 233 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 234 | int av1_rc_get_default_min_gf_interval(int width, int height, |
| 235 | double framerate) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 236 | // Assume we do not need any constraint lower than 4K 20 fps |
| 237 | static const double factor_safe = 3840 * 2160 * 20.0; |
| 238 | const double factor = width * height * framerate; |
| 239 | const int default_interval = |
| 240 | clamp((int)(framerate * 0.125), MIN_GF_INTERVAL, MAX_GF_INTERVAL); |
| 241 | |
| 242 | if (factor <= factor_safe) |
| 243 | return default_interval; |
| 244 | else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 245 | return AOMMAX(default_interval, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 246 | (int)(MIN_GF_INTERVAL * factor / factor_safe + 0.5)); |
| 247 | // Note this logic makes: |
| 248 | // 4K24: 5 |
| 249 | // 4K30: 6 |
| 250 | // 4K60: 12 |
| 251 | } |
| 252 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 253 | int av1_rc_get_default_max_gf_interval(double framerate, int min_gf_interval) { |
| 254 | int interval = AOMMIN(MAX_GF_INTERVAL, (int)(framerate * 0.75)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 255 | interval += (interval & 0x01); // Round to even value |
Wei-Ting Lin | aebd92f | 2018-07-26 15:00:41 -0700 | [diff] [blame] | 256 | #if CONFIG_FIX_GF_LENGTH |
| 257 | interval = AOMMAX(FIXED_GF_LENGTH, interval); |
| 258 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 259 | return AOMMAX(interval, min_gf_interval); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 260 | } |
| 261 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 262 | void av1_rc_init(const AV1EncoderConfig *oxcf, int pass, RATE_CONTROL *rc) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 263 | int i; |
| 264 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 265 | if (pass == 0 && oxcf->rc_mode == AOM_CBR) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 266 | rc->avg_frame_qindex[KEY_FRAME] = oxcf->worst_allowed_q; |
| 267 | rc->avg_frame_qindex[INTER_FRAME] = oxcf->worst_allowed_q; |
| 268 | } else { |
| 269 | rc->avg_frame_qindex[KEY_FRAME] = |
| 270 | (oxcf->worst_allowed_q + oxcf->best_allowed_q) / 2; |
| 271 | rc->avg_frame_qindex[INTER_FRAME] = |
| 272 | (oxcf->worst_allowed_q + oxcf->best_allowed_q) / 2; |
| 273 | } |
| 274 | |
| 275 | rc->last_q[KEY_FRAME] = oxcf->best_allowed_q; |
| 276 | rc->last_q[INTER_FRAME] = oxcf->worst_allowed_q; |
| 277 | |
| 278 | rc->buffer_level = rc->starting_buffer_level; |
| 279 | rc->bits_off_target = rc->starting_buffer_level; |
| 280 | |
| 281 | rc->rolling_target_bits = rc->avg_frame_bandwidth; |
| 282 | rc->rolling_actual_bits = rc->avg_frame_bandwidth; |
| 283 | rc->long_rolling_target_bits = rc->avg_frame_bandwidth; |
| 284 | rc->long_rolling_actual_bits = rc->avg_frame_bandwidth; |
| 285 | |
| 286 | rc->total_actual_bits = 0; |
| 287 | rc->total_target_bits = 0; |
| 288 | rc->total_target_vs_actual = 0; |
| 289 | |
| 290 | rc->frames_since_key = 8; // Sensible default for first frame. |
| 291 | rc->this_key_frame_forced = 0; |
| 292 | rc->next_key_frame_forced = 0; |
| 293 | rc->source_alt_ref_pending = 0; |
| 294 | rc->source_alt_ref_active = 0; |
| 295 | |
| 296 | rc->frames_till_gf_update_due = 0; |
| 297 | rc->ni_av_qi = oxcf->worst_allowed_q; |
| 298 | rc->ni_tot_qi = 0; |
| 299 | rc->ni_frames = 0; |
| 300 | |
| 301 | rc->tot_q = 0.0; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 302 | rc->avg_q = av1_convert_qindex_to_q(oxcf->worst_allowed_q, oxcf->bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 303 | |
| 304 | for (i = 0; i < RATE_FACTOR_LEVELS; ++i) { |
Peng Bin | fc83791 | 2018-08-23 16:34:35 +0800 | [diff] [blame] | 305 | rc->rate_correction_factors[i] = 0.7; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 306 | } |
Peng Bin | fc83791 | 2018-08-23 16:34:35 +0800 | [diff] [blame] | 307 | rc->rate_correction_factors[KF_STD] = 1.0; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 308 | rc->min_gf_interval = oxcf->min_gf_interval; |
| 309 | rc->max_gf_interval = oxcf->max_gf_interval; |
| 310 | if (rc->min_gf_interval == 0) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 311 | rc->min_gf_interval = av1_rc_get_default_min_gf_interval( |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 312 | oxcf->width, oxcf->height, oxcf->init_framerate); |
| 313 | if (rc->max_gf_interval == 0) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 314 | rc->max_gf_interval = av1_rc_get_default_max_gf_interval( |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 315 | oxcf->init_framerate, rc->min_gf_interval); |
| 316 | rc->baseline_gf_interval = (rc->min_gf_interval + rc->max_gf_interval) / 2; |
| 317 | } |
| 318 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 319 | int av1_rc_drop_frame(AV1_COMP *cpi) { |
| 320 | const AV1EncoderConfig *oxcf = &cpi->oxcf; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 321 | RATE_CONTROL *const rc = &cpi->rc; |
| 322 | |
| 323 | if (!oxcf->drop_frames_water_mark) { |
| 324 | return 0; |
| 325 | } else { |
| 326 | if (rc->buffer_level < 0) { |
| 327 | // Always drop if buffer is below 0. |
| 328 | return 1; |
| 329 | } else { |
| 330 | // If buffer is below drop_mark, for now just drop every other frame |
| 331 | // (starting with the next frame) until it increases back over drop_mark. |
| 332 | int drop_mark = |
| 333 | (int)(oxcf->drop_frames_water_mark * rc->optimal_buffer_level / 100); |
| 334 | if ((rc->buffer_level > drop_mark) && (rc->decimation_factor > 0)) { |
| 335 | --rc->decimation_factor; |
| 336 | } else if (rc->buffer_level <= drop_mark && rc->decimation_factor == 0) { |
| 337 | rc->decimation_factor = 1; |
| 338 | } |
| 339 | if (rc->decimation_factor > 0) { |
| 340 | if (rc->decimation_count > 0) { |
| 341 | --rc->decimation_count; |
| 342 | return 1; |
| 343 | } else { |
| 344 | rc->decimation_count = rc->decimation_factor; |
| 345 | return 0; |
| 346 | } |
| 347 | } else { |
| 348 | rc->decimation_count = 0; |
| 349 | return 0; |
| 350 | } |
| 351 | } |
| 352 | } |
| 353 | } |
| 354 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 355 | static double get_rate_correction_factor(const AV1_COMP *cpi, int width, |
| 356 | int height) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 357 | const RATE_CONTROL *const rc = &cpi->rc; |
| 358 | double rcf; |
| 359 | |
| 360 | if (cpi->common.frame_type == KEY_FRAME) { |
| 361 | rcf = rc->rate_correction_factors[KF_STD]; |
| 362 | } else if (cpi->oxcf.pass == 2) { |
| 363 | RATE_FACTOR_LEVEL rf_lvl = |
| 364 | cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index]; |
| 365 | rcf = rc->rate_correction_factors[rf_lvl]; |
| 366 | } else { |
| 367 | if ((cpi->refresh_alt_ref_frame || cpi->refresh_golden_frame) && |
| 368 | !rc->is_src_frame_alt_ref && |
Debargha Mukherjee | d0b9bf7 | 2018-05-14 17:45:27 +0000 | [diff] [blame] | 369 | (cpi->oxcf.rc_mode != AOM_CBR || cpi->oxcf.gf_cbr_boost_pct > 20)) |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 370 | rcf = rc->rate_correction_factors[GF_ARF_STD]; |
| 371 | else |
| 372 | rcf = rc->rate_correction_factors[INTER_NORMAL]; |
| 373 | } |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 374 | rcf *= resize_rate_factor(cpi, width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 375 | return fclamp(rcf, MIN_BPB_FACTOR, MAX_BPB_FACTOR); |
| 376 | } |
| 377 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 378 | static void set_rate_correction_factor(AV1_COMP *cpi, double factor, int width, |
| 379 | int height) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 380 | RATE_CONTROL *const rc = &cpi->rc; |
| 381 | |
| 382 | // Normalize RCF to account for the size-dependent scaling factor. |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 383 | factor /= resize_rate_factor(cpi, width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 384 | |
| 385 | factor = fclamp(factor, MIN_BPB_FACTOR, MAX_BPB_FACTOR); |
| 386 | |
| 387 | if (cpi->common.frame_type == KEY_FRAME) { |
| 388 | rc->rate_correction_factors[KF_STD] = factor; |
| 389 | } else if (cpi->oxcf.pass == 2) { |
| 390 | RATE_FACTOR_LEVEL rf_lvl = |
| 391 | cpi->twopass.gf_group.rf_level[cpi->twopass.gf_group.index]; |
| 392 | rc->rate_correction_factors[rf_lvl] = factor; |
| 393 | } else { |
| 394 | if ((cpi->refresh_alt_ref_frame || cpi->refresh_golden_frame) && |
| 395 | !rc->is_src_frame_alt_ref && |
Debargha Mukherjee | d0b9bf7 | 2018-05-14 17:45:27 +0000 | [diff] [blame] | 396 | (cpi->oxcf.rc_mode != AOM_CBR || cpi->oxcf.gf_cbr_boost_pct > 20)) |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 397 | rc->rate_correction_factors[GF_ARF_STD] = factor; |
| 398 | else |
| 399 | rc->rate_correction_factors[INTER_NORMAL] = factor; |
| 400 | } |
| 401 | } |
| 402 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 403 | void av1_rc_update_rate_correction_factors(AV1_COMP *cpi, int width, |
| 404 | int height) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 405 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 406 | int correction_factor = 100; |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 407 | double rate_correction_factor = |
| 408 | get_rate_correction_factor(cpi, width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 409 | double adjustment_limit; |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 410 | const int MBs = av1_get_MBs(width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 411 | |
| 412 | int projected_size_based_on_q = 0; |
| 413 | |
| 414 | // Do not update the rate factors for arf overlay frames. |
| 415 | if (cpi->rc.is_src_frame_alt_ref) return; |
| 416 | |
| 417 | // Clear down mmx registers to allow floating point in what follows |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 418 | aom_clear_system_state(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 419 | |
| 420 | // Work out how big we would have expected the frame to be at this Q given |
| 421 | // the current correction factor. |
| 422 | // Stay in double to avoid int overflow when values are large |
| 423 | if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cpi->common.seg.enabled) { |
| 424 | projected_size_based_on_q = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 425 | av1_cyclic_refresh_estimate_bits_at_q(cpi, rate_correction_factor); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 426 | } else { |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 427 | projected_size_based_on_q = av1_estimate_bits_at_q( |
| 428 | cpi->common.frame_type, cm->base_qindex, MBs, rate_correction_factor, |
| 429 | cm->seq_params.bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 430 | } |
| 431 | // Work out a size correction factor. |
| 432 | if (projected_size_based_on_q > FRAME_OVERHEAD_BITS) |
| 433 | correction_factor = (int)((100 * (int64_t)cpi->rc.projected_frame_size) / |
| 434 | projected_size_based_on_q); |
| 435 | |
| 436 | // More heavily damped adjustment used if we have been oscillating either side |
| 437 | // of target. |
James Zern | d2c42f0 | 2017-03-10 11:13:10 -0800 | [diff] [blame] | 438 | if (correction_factor > 0) { |
| 439 | adjustment_limit = |
| 440 | 0.25 + 0.5 * AOMMIN(1, fabs(log10(0.01 * correction_factor))); |
| 441 | } else { |
| 442 | adjustment_limit = 0.75; |
| 443 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 444 | |
| 445 | cpi->rc.q_2_frame = cpi->rc.q_1_frame; |
| 446 | cpi->rc.q_1_frame = cm->base_qindex; |
| 447 | cpi->rc.rc_2_frame = cpi->rc.rc_1_frame; |
| 448 | if (correction_factor > 110) |
| 449 | cpi->rc.rc_1_frame = -1; |
| 450 | else if (correction_factor < 90) |
| 451 | cpi->rc.rc_1_frame = 1; |
| 452 | else |
| 453 | cpi->rc.rc_1_frame = 0; |
| 454 | |
| 455 | if (correction_factor > 102) { |
| 456 | // We are not already at the worst allowable quality |
| 457 | correction_factor = |
| 458 | (int)(100 + ((correction_factor - 100) * adjustment_limit)); |
| 459 | rate_correction_factor = (rate_correction_factor * correction_factor) / 100; |
| 460 | // Keep rate_correction_factor within limits |
| 461 | if (rate_correction_factor > MAX_BPB_FACTOR) |
| 462 | rate_correction_factor = MAX_BPB_FACTOR; |
| 463 | } else if (correction_factor < 99) { |
| 464 | // We are not already at the best allowable quality |
| 465 | correction_factor = |
| 466 | (int)(100 - ((100 - correction_factor) * adjustment_limit)); |
| 467 | rate_correction_factor = (rate_correction_factor * correction_factor) / 100; |
| 468 | |
| 469 | // Keep rate_correction_factor within limits |
| 470 | if (rate_correction_factor < MIN_BPB_FACTOR) |
| 471 | rate_correction_factor = MIN_BPB_FACTOR; |
| 472 | } |
| 473 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 474 | set_rate_correction_factor(cpi, rate_correction_factor, width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 475 | } |
| 476 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 477 | int av1_rc_regulate_q(const AV1_COMP *cpi, int target_bits_per_frame, |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 478 | int active_best_quality, int active_worst_quality, |
| 479 | int width, int height) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 480 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 481 | int q = active_worst_quality; |
| 482 | int last_error = INT_MAX; |
| 483 | int i, target_bits_per_mb, bits_per_mb_at_this_q; |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 484 | const int MBs = av1_get_MBs(width, height); |
| 485 | const double correction_factor = |
| 486 | get_rate_correction_factor(cpi, width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 487 | |
| 488 | // Calculate required scaling factor based on target frame size and size of |
| 489 | // frame produced using previous Q. |
| 490 | target_bits_per_mb = |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 491 | (int)((uint64_t)(target_bits_per_frame) << BPER_MB_NORMBITS) / MBs; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 492 | |
| 493 | i = active_best_quality; |
| 494 | |
| 495 | do { |
| 496 | if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cm->seg.enabled) { |
| 497 | bits_per_mb_at_this_q = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 498 | (int)av1_cyclic_refresh_rc_bits_per_mb(cpi, i, correction_factor); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 499 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 500 | bits_per_mb_at_this_q = (int)av1_rc_bits_per_mb( |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 501 | cm->frame_type, i, correction_factor, cm->seq_params.bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 502 | } |
| 503 | |
| 504 | if (bits_per_mb_at_this_q <= target_bits_per_mb) { |
| 505 | if ((target_bits_per_mb - bits_per_mb_at_this_q) <= last_error) |
| 506 | q = i; |
| 507 | else |
| 508 | q = i - 1; |
| 509 | |
| 510 | break; |
| 511 | } else { |
| 512 | last_error = bits_per_mb_at_this_q - target_bits_per_mb; |
| 513 | } |
| 514 | } while (++i <= active_worst_quality); |
| 515 | |
| 516 | // In CBR mode, this makes sure q is between oscillating Qs to prevent |
| 517 | // resonance. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 518 | if (cpi->oxcf.rc_mode == AOM_CBR && |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 519 | (cpi->rc.rc_1_frame * cpi->rc.rc_2_frame == -1) && |
| 520 | cpi->rc.q_1_frame != cpi->rc.q_2_frame) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 521 | q = clamp(q, AOMMIN(cpi->rc.q_1_frame, cpi->rc.q_2_frame), |
| 522 | AOMMAX(cpi->rc.q_1_frame, cpi->rc.q_2_frame)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 523 | } |
| 524 | return q; |
| 525 | } |
| 526 | |
| 527 | static int get_active_quality(int q, int gfu_boost, int low, int high, |
| 528 | int *low_motion_minq, int *high_motion_minq) { |
| 529 | if (gfu_boost > high) { |
| 530 | return low_motion_minq[q]; |
| 531 | } else if (gfu_boost < low) { |
| 532 | return high_motion_minq[q]; |
| 533 | } else { |
| 534 | const int gap = high - low; |
| 535 | const int offset = high - gfu_boost; |
| 536 | const int qdiff = high_motion_minq[q] - low_motion_minq[q]; |
| 537 | const int adjustment = ((offset * qdiff) + (gap >> 1)) / gap; |
| 538 | return low_motion_minq[q] + adjustment; |
| 539 | } |
| 540 | } |
| 541 | |
| 542 | static int get_kf_active_quality(const RATE_CONTROL *const rc, int q, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 543 | aom_bit_depth_t bit_depth) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 544 | int *kf_low_motion_minq; |
| 545 | int *kf_high_motion_minq; |
| 546 | ASSIGN_MINQ_TABLE(bit_depth, kf_low_motion_minq); |
| 547 | ASSIGN_MINQ_TABLE(bit_depth, kf_high_motion_minq); |
| 548 | return get_active_quality(q, rc->kf_boost, kf_low, kf_high, |
| 549 | kf_low_motion_minq, kf_high_motion_minq); |
| 550 | } |
| 551 | |
| 552 | static int get_gf_active_quality(const RATE_CONTROL *const rc, int q, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 553 | aom_bit_depth_t bit_depth) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 554 | int *arfgf_low_motion_minq; |
| 555 | int *arfgf_high_motion_minq; |
| 556 | ASSIGN_MINQ_TABLE(bit_depth, arfgf_low_motion_minq); |
| 557 | ASSIGN_MINQ_TABLE(bit_depth, arfgf_high_motion_minq); |
| 558 | return get_active_quality(q, rc->gfu_boost, gf_low, gf_high, |
| 559 | arfgf_low_motion_minq, arfgf_high_motion_minq); |
| 560 | } |
| 561 | |
Wei-Ting Lin | ebff377 | 2018-07-24 11:59:40 -0700 | [diff] [blame] | 562 | #if REDUCE_LAST_ALT_BOOST |
| 563 | static int get_gf_high_motion_quality(int q, aom_bit_depth_t bit_depth) { |
| 564 | int *arfgf_high_motion_minq; |
| 565 | ASSIGN_MINQ_TABLE(bit_depth, arfgf_high_motion_minq); |
| 566 | return arfgf_high_motion_minq[q]; |
| 567 | } |
| 568 | #endif |
| 569 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 570 | static int calc_active_worst_quality_one_pass_vbr(const AV1_COMP *cpi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 571 | const RATE_CONTROL *const rc = &cpi->rc; |
| 572 | const unsigned int curr_frame = cpi->common.current_video_frame; |
| 573 | int active_worst_quality; |
| 574 | |
| 575 | if (cpi->common.frame_type == KEY_FRAME) { |
| 576 | active_worst_quality = |
| 577 | curr_frame == 0 ? rc->worst_quality : rc->last_q[KEY_FRAME] * 2; |
| 578 | } else { |
Sebastien Alaiwan | 365e644 | 2017-10-16 11:35:00 +0200 | [diff] [blame] | 579 | if (!rc->is_src_frame_alt_ref && |
| 580 | (cpi->refresh_golden_frame || cpi->refresh_alt2_ref_frame || |
| 581 | cpi->refresh_alt_ref_frame)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 582 | active_worst_quality = curr_frame == 1 ? rc->last_q[KEY_FRAME] * 5 / 4 |
| 583 | : rc->last_q[INTER_FRAME]; |
| 584 | } else { |
| 585 | active_worst_quality = curr_frame == 1 ? rc->last_q[KEY_FRAME] * 2 |
| 586 | : rc->last_q[INTER_FRAME] * 2; |
| 587 | } |
| 588 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 589 | return AOMMIN(active_worst_quality, rc->worst_quality); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 590 | } |
| 591 | |
| 592 | // Adjust active_worst_quality level based on buffer level. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 593 | static int calc_active_worst_quality_one_pass_cbr(const AV1_COMP *cpi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 594 | // Adjust active_worst_quality: If buffer is above the optimal/target level, |
| 595 | // bring active_worst_quality down depending on fullness of buffer. |
| 596 | // If buffer is below the optimal level, let the active_worst_quality go from |
| 597 | // ambient Q (at buffer = optimal level) to worst_quality level |
| 598 | // (at buffer = critical level). |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 599 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 600 | const RATE_CONTROL *rc = &cpi->rc; |
| 601 | // Buffer level below which we push active_worst to worst_quality. |
| 602 | int64_t critical_level = rc->optimal_buffer_level >> 3; |
| 603 | int64_t buff_lvl_step = 0; |
| 604 | int adjustment = 0; |
| 605 | int active_worst_quality; |
| 606 | int ambient_qp; |
| 607 | if (cm->frame_type == KEY_FRAME) return rc->worst_quality; |
| 608 | // For ambient_qp we use minimum of avg_frame_qindex[KEY_FRAME/INTER_FRAME] |
| 609 | // for the first few frames following key frame. These are both initialized |
| 610 | // to worst_quality and updated with (3/4, 1/4) average in postencode_update. |
| 611 | // So for first few frames following key, the qp of that key frame is weighted |
| 612 | // into the active_worst_quality setting. |
| 613 | ambient_qp = (cm->current_video_frame < 5) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 614 | ? AOMMIN(rc->avg_frame_qindex[INTER_FRAME], |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 615 | rc->avg_frame_qindex[KEY_FRAME]) |
| 616 | : rc->avg_frame_qindex[INTER_FRAME]; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 617 | active_worst_quality = AOMMIN(rc->worst_quality, ambient_qp * 5 / 4); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 618 | if (rc->buffer_level > rc->optimal_buffer_level) { |
| 619 | // Adjust down. |
| 620 | // Maximum limit for down adjustment, ~30%. |
| 621 | int max_adjustment_down = active_worst_quality / 3; |
| 622 | if (max_adjustment_down) { |
| 623 | buff_lvl_step = ((rc->maximum_buffer_size - rc->optimal_buffer_level) / |
| 624 | max_adjustment_down); |
| 625 | if (buff_lvl_step) |
| 626 | adjustment = (int)((rc->buffer_level - rc->optimal_buffer_level) / |
| 627 | buff_lvl_step); |
| 628 | active_worst_quality -= adjustment; |
| 629 | } |
| 630 | } else if (rc->buffer_level > critical_level) { |
| 631 | // Adjust up from ambient Q. |
| 632 | if (critical_level) { |
| 633 | buff_lvl_step = (rc->optimal_buffer_level - critical_level); |
| 634 | if (buff_lvl_step) { |
| 635 | adjustment = (int)((rc->worst_quality - ambient_qp) * |
| 636 | (rc->optimal_buffer_level - rc->buffer_level) / |
| 637 | buff_lvl_step); |
| 638 | } |
| 639 | active_worst_quality = ambient_qp + adjustment; |
| 640 | } |
| 641 | } else { |
| 642 | // Set to worst_quality if buffer is below critical level. |
| 643 | active_worst_quality = rc->worst_quality; |
| 644 | } |
| 645 | return active_worst_quality; |
| 646 | } |
| 647 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 648 | static int rc_pick_q_and_bounds_one_pass_cbr(const AV1_COMP *cpi, int width, |
| 649 | int height, int *bottom_index, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 650 | int *top_index) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 651 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 652 | const RATE_CONTROL *const rc = &cpi->rc; |
| 653 | int active_best_quality; |
| 654 | int active_worst_quality = calc_active_worst_quality_one_pass_cbr(cpi); |
| 655 | int q; |
| 656 | int *rtc_minq; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 657 | const int bit_depth = cm->seq_params.bit_depth; |
| 658 | ASSIGN_MINQ_TABLE(bit_depth, rtc_minq); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 659 | |
| 660 | if (frame_is_intra_only(cm)) { |
| 661 | active_best_quality = rc->best_quality; |
| 662 | // Handle the special case for key frames forced when we have reached |
| 663 | // the maximum key frame interval. Here force the Q to a range |
| 664 | // based on the ambient Q to reduce the risk of popping. |
| 665 | if (rc->this_key_frame_forced) { |
| 666 | int qindex = rc->last_boosted_qindex; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 667 | double last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth); |
| 668 | int delta_qindex = av1_compute_qdelta(rc, last_boosted_q, |
| 669 | (last_boosted_q * 0.75), bit_depth); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 670 | active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 671 | } else if (cm->current_video_frame > 0) { |
| 672 | // not first frame of one pass and kf_boost is set |
| 673 | double q_adj_factor = 1.0; |
| 674 | double q_val; |
| 675 | |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 676 | active_best_quality = |
| 677 | get_kf_active_quality(rc, rc->avg_frame_qindex[KEY_FRAME], bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 678 | |
| 679 | // Allow somewhat lower kf minq with small image formats. |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 680 | if ((width * height) <= (352 * 288)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 681 | q_adj_factor -= 0.25; |
| 682 | } |
| 683 | |
| 684 | // Convert the adjustment factor to a qindex delta |
| 685 | // on active_best_quality. |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 686 | q_val = av1_convert_qindex_to_q(active_best_quality, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 687 | active_best_quality += |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 688 | av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 689 | } |
| 690 | } else if (!rc->is_src_frame_alt_ref && |
| 691 | (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) { |
| 692 | // Use the lower of active_worst_quality and recent |
| 693 | // average Q as basis for GF/ARF best Q limit unless last frame was |
| 694 | // a key frame. |
| 695 | if (rc->frames_since_key > 1 && |
| 696 | rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) { |
| 697 | q = rc->avg_frame_qindex[INTER_FRAME]; |
| 698 | } else { |
| 699 | q = active_worst_quality; |
| 700 | } |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 701 | active_best_quality = get_gf_active_quality(rc, q, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 702 | } else { |
| 703 | // Use the lower of active_worst_quality and recent/average Q. |
| 704 | if (cm->current_video_frame > 1) { |
| 705 | if (rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) |
| 706 | active_best_quality = rtc_minq[rc->avg_frame_qindex[INTER_FRAME]]; |
| 707 | else |
| 708 | active_best_quality = rtc_minq[active_worst_quality]; |
| 709 | } else { |
| 710 | if (rc->avg_frame_qindex[KEY_FRAME] < active_worst_quality) |
| 711 | active_best_quality = rtc_minq[rc->avg_frame_qindex[KEY_FRAME]]; |
| 712 | else |
| 713 | active_best_quality = rtc_minq[active_worst_quality]; |
| 714 | } |
| 715 | } |
| 716 | |
| 717 | // Clip the active best and worst quality values to limits |
| 718 | active_best_quality = |
| 719 | clamp(active_best_quality, rc->best_quality, rc->worst_quality); |
| 720 | active_worst_quality = |
| 721 | clamp(active_worst_quality, active_best_quality, rc->worst_quality); |
| 722 | |
| 723 | *top_index = active_worst_quality; |
| 724 | *bottom_index = active_best_quality; |
| 725 | |
| 726 | // Limit Q range for the adaptive loop. |
| 727 | if (cm->frame_type == KEY_FRAME && !rc->this_key_frame_forced && |
| 728 | !(cm->current_video_frame == 0)) { |
| 729 | int qdelta = 0; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 730 | aom_clear_system_state(); |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 731 | qdelta = av1_compute_qdelta_by_rate(&cpi->rc, cm->frame_type, |
| 732 | active_worst_quality, 2.0, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 733 | *top_index = active_worst_quality + qdelta; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 734 | *top_index = AOMMAX(*top_index, *bottom_index); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 735 | } |
| 736 | |
| 737 | // Special case code to try and match quality with forced key frames |
| 738 | if (cm->frame_type == KEY_FRAME && rc->this_key_frame_forced) { |
| 739 | q = rc->last_boosted_qindex; |
| 740 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 741 | q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality, |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 742 | active_worst_quality, width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 743 | if (q > *top_index) { |
| 744 | // Special case when we are targeting the max allowed rate |
| 745 | if (rc->this_frame_target >= rc->max_frame_bandwidth) |
| 746 | *top_index = q; |
| 747 | else |
| 748 | q = *top_index; |
| 749 | } |
| 750 | } |
| 751 | |
| 752 | assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality); |
| 753 | assert(*bottom_index <= rc->worst_quality && |
| 754 | *bottom_index >= rc->best_quality); |
| 755 | assert(q <= rc->worst_quality && q >= rc->best_quality); |
| 756 | return q; |
| 757 | } |
| 758 | |
| 759 | static int get_active_cq_level(const RATE_CONTROL *rc, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 760 | const AV1EncoderConfig *const oxcf) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 761 | static const double cq_adjust_threshold = 0.1; |
| 762 | int active_cq_level = oxcf->cq_level; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 763 | if (oxcf->rc_mode == AOM_CQ && rc->total_target_bits > 0) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 764 | const double x = (double)rc->total_actual_bits / rc->total_target_bits; |
| 765 | if (x < cq_adjust_threshold) { |
| 766 | active_cq_level = (int)(active_cq_level * x / cq_adjust_threshold); |
| 767 | } |
| 768 | } |
| 769 | return active_cq_level; |
| 770 | } |
| 771 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 772 | static int rc_pick_q_and_bounds_one_pass_vbr(const AV1_COMP *cpi, int width, |
| 773 | int height, int *bottom_index, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 774 | int *top_index) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 775 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 776 | const RATE_CONTROL *const rc = &cpi->rc; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 777 | const AV1EncoderConfig *const oxcf = &cpi->oxcf; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 778 | const int cq_level = get_active_cq_level(rc, oxcf); |
| 779 | int active_best_quality; |
| 780 | int active_worst_quality = calc_active_worst_quality_one_pass_vbr(cpi); |
| 781 | int q; |
| 782 | int *inter_minq; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 783 | const int bit_depth = cm->seq_params.bit_depth; |
| 784 | ASSIGN_MINQ_TABLE(bit_depth, inter_minq); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 785 | |
| 786 | if (frame_is_intra_only(cm)) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 787 | if (oxcf->rc_mode == AOM_Q) { |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 788 | const int qindex = cq_level; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 789 | const double q_val = av1_convert_qindex_to_q(qindex, bit_depth); |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 790 | const int delta_qindex = |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 791 | av1_compute_qdelta(rc, q_val, q_val * 0.25, bit_depth); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 792 | active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 793 | } else if (rc->this_key_frame_forced) { |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 794 | const int qindex = rc->last_boosted_qindex; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 795 | const double last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth); |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 796 | const int delta_qindex = av1_compute_qdelta( |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 797 | rc, last_boosted_q, last_boosted_q * 0.75, bit_depth); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 798 | active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality); |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 799 | } else { // not first frame of one pass and kf_boost is set |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 800 | double q_adj_factor = 1.0; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 801 | |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 802 | active_best_quality = |
| 803 | get_kf_active_quality(rc, rc->avg_frame_qindex[KEY_FRAME], bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 804 | |
| 805 | // Allow somewhat lower kf minq with small image formats. |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 806 | if ((width * height) <= (352 * 288)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 807 | q_adj_factor -= 0.25; |
| 808 | } |
| 809 | |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 810 | // Convert the adjustment factor to a qindex delta on active_best_quality. |
| 811 | { |
| 812 | const double q_val = |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 813 | av1_convert_qindex_to_q(active_best_quality, bit_depth); |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 814 | active_best_quality += |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 815 | av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth); |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 816 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 817 | } |
| 818 | } else if (!rc->is_src_frame_alt_ref && |
| 819 | (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) { |
| 820 | // Use the lower of active_worst_quality and recent |
| 821 | // average Q as basis for GF/ARF best Q limit unless last frame was |
| 822 | // a key frame. |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 823 | q = (rc->frames_since_key > 1 && |
| 824 | rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) |
| 825 | ? rc->avg_frame_qindex[INTER_FRAME] |
| 826 | : rc->avg_frame_qindex[KEY_FRAME]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 827 | // For constrained quality dont allow Q less than the cq level |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 828 | if (oxcf->rc_mode == AOM_CQ) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 829 | if (q < cq_level) q = cq_level; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 830 | active_best_quality = get_gf_active_quality(rc, q, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 831 | // Constrained quality use slightly lower active best. |
| 832 | active_best_quality = active_best_quality * 15 / 16; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 833 | } else if (oxcf->rc_mode == AOM_Q) { |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 834 | const int qindex = cq_level; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 835 | const double q_val = av1_convert_qindex_to_q(qindex, bit_depth); |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 836 | const int delta_qindex = |
| 837 | (cpi->refresh_alt_ref_frame) |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 838 | ? av1_compute_qdelta(rc, q_val, q_val * 0.40, bit_depth) |
| 839 | : av1_compute_qdelta(rc, q_val, q_val * 0.50, bit_depth); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 840 | active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 841 | } else { |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 842 | active_best_quality = get_gf_active_quality(rc, q, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 843 | } |
| 844 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 845 | if (oxcf->rc_mode == AOM_Q) { |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 846 | const int qindex = cq_level; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 847 | const double q_val = av1_convert_qindex_to_q(qindex, bit_depth); |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 848 | const double delta_rate[FIXED_GF_INTERVAL] = { 0.50, 1.0, 0.85, 1.0, |
| 849 | 0.70, 1.0, 0.85, 1.0 }; |
| 850 | const int delta_qindex = av1_compute_qdelta( |
| 851 | rc, q_val, |
| 852 | q_val * delta_rate[cm->current_video_frame % FIXED_GF_INTERVAL], |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 853 | bit_depth); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 854 | active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 855 | } else { |
| 856 | // Use the lower of active_worst_quality and recent/average Q. |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 857 | active_best_quality = (cm->current_video_frame > 1) |
| 858 | ? inter_minq[rc->avg_frame_qindex[INTER_FRAME]] |
| 859 | : inter_minq[rc->avg_frame_qindex[KEY_FRAME]]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 860 | // For the constrained quality mode we don't want |
| 861 | // q to fall below the cq level. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 862 | if ((oxcf->rc_mode == AOM_CQ) && (active_best_quality < cq_level)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 863 | active_best_quality = cq_level; |
| 864 | } |
| 865 | } |
| 866 | } |
| 867 | |
| 868 | // Clip the active best and worst quality values to limits |
| 869 | active_best_quality = |
| 870 | clamp(active_best_quality, rc->best_quality, rc->worst_quality); |
| 871 | active_worst_quality = |
| 872 | clamp(active_worst_quality, active_best_quality, rc->worst_quality); |
| 873 | |
| 874 | *top_index = active_worst_quality; |
| 875 | *bottom_index = active_best_quality; |
| 876 | |
| 877 | // Limit Q range for the adaptive loop. |
| 878 | { |
| 879 | int qdelta = 0; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 880 | aom_clear_system_state(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 881 | if (cm->frame_type == KEY_FRAME && !rc->this_key_frame_forced && |
| 882 | !(cm->current_video_frame == 0)) { |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 883 | qdelta = av1_compute_qdelta_by_rate(&cpi->rc, cm->frame_type, |
| 884 | active_worst_quality, 2.0, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 885 | } else if (!rc->is_src_frame_alt_ref && |
| 886 | (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 887 | qdelta = av1_compute_qdelta_by_rate( |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 888 | &cpi->rc, cm->frame_type, active_worst_quality, 1.75, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 889 | } |
| 890 | *top_index = active_worst_quality + qdelta; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 891 | *top_index = AOMMAX(*top_index, *bottom_index); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 892 | } |
| 893 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 894 | if (oxcf->rc_mode == AOM_Q) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 895 | q = active_best_quality; |
| 896 | // Special case code to try and match quality with forced key frames |
| 897 | } else if ((cm->frame_type == KEY_FRAME) && rc->this_key_frame_forced) { |
| 898 | q = rc->last_boosted_qindex; |
| 899 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 900 | q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality, |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 901 | active_worst_quality, width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 902 | if (q > *top_index) { |
| 903 | // Special case when we are targeting the max allowed rate |
| 904 | if (rc->this_frame_target >= rc->max_frame_bandwidth) |
| 905 | *top_index = q; |
| 906 | else |
| 907 | q = *top_index; |
| 908 | } |
| 909 | } |
| 910 | |
| 911 | assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality); |
| 912 | assert(*bottom_index <= rc->worst_quality && |
| 913 | *bottom_index >= rc->best_quality); |
| 914 | assert(q <= rc->worst_quality && q >= rc->best_quality); |
| 915 | return q; |
| 916 | } |
| 917 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 918 | int av1_frame_type_qdelta(const AV1_COMP *cpi, int rf_level, int q) { |
Sebastien Alaiwan | 365e644 | 2017-10-16 11:35:00 +0200 | [diff] [blame] | 919 | static const FRAME_TYPE frame_type[RATE_FACTOR_LEVELS] = { |
| 920 | INTER_FRAME, INTER_FRAME, INTER_FRAME, INTER_FRAME, INTER_FRAME, KEY_FRAME |
| 921 | }; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 922 | const AV1_COMMON *const cm = &cpi->common; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 923 | int qdelta = av1_compute_qdelta_by_rate(&cpi->rc, frame_type[rf_level], q, |
| 924 | rate_factor_deltas[rf_level], |
| 925 | cm->seq_params.bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 926 | return qdelta; |
| 927 | } |
| 928 | |
| 929 | #define STATIC_MOTION_THRESH 95 |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 930 | static int rc_pick_q_and_bounds_two_pass(const AV1_COMP *cpi, int width, |
| 931 | int height, int *bottom_index, |
Wei-Ting Lin | ebff377 | 2018-07-24 11:59:40 -0700 | [diff] [blame] | 932 | int *top_index, int *arf_q) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 933 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 934 | const RATE_CONTROL *const rc = &cpi->rc; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 935 | const AV1EncoderConfig *const oxcf = &cpi->oxcf; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 936 | const GF_GROUP *gf_group = &cpi->twopass.gf_group; |
| 937 | const int cq_level = get_active_cq_level(rc, oxcf); |
| 938 | int active_best_quality; |
| 939 | int active_worst_quality = cpi->twopass.active_worst_quality; |
| 940 | int q; |
| 941 | int *inter_minq; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 942 | const int bit_depth = cm->seq_params.bit_depth; |
| 943 | ASSIGN_MINQ_TABLE(bit_depth, inter_minq); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 944 | |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 945 | #if CUSTOMIZED_GF |
Wei-Ting Lin | 240d9b4 | 2018-07-12 11:48:02 -0700 | [diff] [blame] | 946 | const int is_intrl_arf_boost = |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 947 | gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE; |
| 948 | #else |
| 949 | const int is_intrl_arf_boost = cpi->refresh_alt2_ref_frame; |
Wei-Ting Lin | cffe49d | 2018-07-10 14:15:46 -0700 | [diff] [blame] | 950 | #endif // CUSTOMIZED_GF |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 951 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 952 | if (frame_is_intra_only(cm)) { |
| 953 | // Handle the special case for key frames forced when we have reached |
| 954 | // the maximum key frame interval. Here force the Q to a range |
| 955 | // based on the ambient Q to reduce the risk of popping. |
| 956 | if (rc->this_key_frame_forced) { |
| 957 | double last_boosted_q; |
| 958 | int delta_qindex; |
| 959 | int qindex; |
| 960 | |
| 961 | if (cpi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 962 | qindex = AOMMIN(rc->last_kf_qindex, rc->last_boosted_qindex); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 963 | active_best_quality = qindex; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 964 | last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 965 | delta_qindex = av1_compute_qdelta(rc, last_boosted_q, |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 966 | last_boosted_q * 1.25, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 967 | active_worst_quality = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 968 | AOMMIN(qindex + delta_qindex, active_worst_quality); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 969 | } else { |
| 970 | qindex = rc->last_boosted_qindex; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 971 | last_boosted_q = av1_convert_qindex_to_q(qindex, bit_depth); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 972 | delta_qindex = av1_compute_qdelta(rc, last_boosted_q, |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 973 | last_boosted_q * 0.75, bit_depth); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 974 | active_best_quality = AOMMAX(qindex + delta_qindex, rc->best_quality); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 975 | } |
| 976 | } else { |
| 977 | // Not forced keyframe. |
| 978 | double q_adj_factor = 1.0; |
| 979 | double q_val; |
| 980 | |
| 981 | // Baseline value derived from cpi->active_worst_quality and kf boost. |
| 982 | active_best_quality = |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 983 | get_kf_active_quality(rc, active_worst_quality, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 984 | |
| 985 | // Allow somewhat lower kf minq with small image formats. |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 986 | if ((width * height) <= (352 * 288)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 987 | q_adj_factor -= 0.25; |
| 988 | } |
| 989 | |
| 990 | // Make a further adjustment based on the kf zero motion measure. |
| 991 | q_adj_factor += 0.05 - (0.001 * (double)cpi->twopass.kf_zeromotion_pct); |
| 992 | |
| 993 | // Convert the adjustment factor to a qindex delta |
| 994 | // on active_best_quality. |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 995 | q_val = av1_convert_qindex_to_q(active_best_quality, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 996 | active_best_quality += |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 997 | av1_compute_qdelta(rc, q_val, q_val * q_adj_factor, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 998 | } |
Sebastien Alaiwan | 365e644 | 2017-10-16 11:35:00 +0200 | [diff] [blame] | 999 | } else if (!rc->is_src_frame_alt_ref && |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 1000 | (cpi->refresh_golden_frame || is_intrl_arf_boost || |
Sebastien Alaiwan | 365e644 | 2017-10-16 11:35:00 +0200 | [diff] [blame] | 1001 | cpi->refresh_alt_ref_frame)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1002 | // Use the lower of active_worst_quality and recent |
| 1003 | // average Q as basis for GF/ARF best Q limit unless last frame was |
| 1004 | // a key frame. |
| 1005 | if (rc->frames_since_key > 1 && |
| 1006 | rc->avg_frame_qindex[INTER_FRAME] < active_worst_quality) { |
| 1007 | q = rc->avg_frame_qindex[INTER_FRAME]; |
| 1008 | } else { |
| 1009 | q = active_worst_quality; |
| 1010 | } |
| 1011 | // For constrained quality dont allow Q less than the cq level |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1012 | if (oxcf->rc_mode == AOM_CQ) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1013 | if (q < cq_level) q = cq_level; |
| 1014 | |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 1015 | active_best_quality = get_gf_active_quality(rc, q, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1016 | |
| 1017 | // Constrained quality use slightly lower active best. |
| 1018 | active_best_quality = active_best_quality * 15 / 16; |
| 1019 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1020 | } else if (oxcf->rc_mode == AOM_Q) { |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 1021 | if (!cpi->refresh_alt_ref_frame && !is_intrl_arf_boost) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1022 | active_best_quality = cq_level; |
| 1023 | } else { |
Wei-Ting Lin | 422498b | 2018-07-20 17:00:09 -0700 | [diff] [blame] | 1024 | if (gf_group->update_type[gf_group->index] == ARF_UPDATE) { |
| 1025 | active_best_quality = get_gf_active_quality(rc, q, bit_depth); |
Wei-Ting Lin | ebff377 | 2018-07-24 11:59:40 -0700 | [diff] [blame] | 1026 | *arf_q = active_best_quality; |
| 1027 | #if REDUCE_LAST_ALT_BOOST |
Wei-Ting Lin | 993e24f | 2018-08-13 10:34:49 -0700 | [diff] [blame] | 1028 | const int min_boost = get_gf_high_motion_quality(q, bit_depth); |
| 1029 | const int boost = min_boost - active_best_quality; |
Wei-Ting Lin | ebff377 | 2018-07-24 11:59:40 -0700 | [diff] [blame] | 1030 | |
| 1031 | active_best_quality = min_boost - (int)(boost * rc->arf_boost_factor); |
| 1032 | #endif |
Wei-Ting Lin | 422498b | 2018-07-20 17:00:09 -0700 | [diff] [blame] | 1033 | } else { |
| 1034 | active_best_quality = rc->arf_q; |
| 1035 | } |
Wei-Ting Lin | 050fb02 | 2018-07-13 11:45:05 -0700 | [diff] [blame] | 1036 | #if USE_SYMM_MULTI_LAYER |
| 1037 | if (cpi->new_bwdref_update_rule && is_intrl_arf_boost) { |
| 1038 | int this_height = gf_group->pyramid_level[gf_group->index]; |
| 1039 | while (this_height < gf_group->pyramid_height) { |
| 1040 | active_best_quality = (active_best_quality + cq_level + 1) / 2; |
| 1041 | ++this_height; |
| 1042 | } |
| 1043 | } else { |
| 1044 | #endif |
| 1045 | // Modify best quality for second level arfs. For mode AOM_Q this |
| 1046 | // becomes the baseline frame q. |
| 1047 | if (gf_group->rf_level[gf_group->index] == GF_ARF_LOW) |
| 1048 | active_best_quality = (active_best_quality + cq_level + 1) / 2; |
| 1049 | #if USE_SYMM_MULTI_LAYER |
| 1050 | } |
| 1051 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1052 | } |
| 1053 | } else { |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 1054 | active_best_quality = get_gf_active_quality(rc, q, bit_depth); |
Wei-Ting Lin | 050fb02 | 2018-07-13 11:45:05 -0700 | [diff] [blame] | 1055 | #if USE_SYMM_MULTI_LAYER |
| 1056 | if (cpi->new_bwdref_update_rule && is_intrl_arf_boost) { |
| 1057 | int this_height = gf_group->pyramid_level[gf_group->index]; |
| 1058 | while (this_height < gf_group->pyramid_height) { |
| 1059 | active_best_quality = |
| 1060 | (active_best_quality + active_worst_quality + 1) / 2; |
| 1061 | ++this_height; |
| 1062 | } |
| 1063 | } |
| 1064 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1065 | } |
| 1066 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1067 | if (oxcf->rc_mode == AOM_Q) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1068 | active_best_quality = cq_level; |
| 1069 | } else { |
| 1070 | active_best_quality = inter_minq[active_worst_quality]; |
| 1071 | |
| 1072 | // For the constrained quality mode we don't want |
| 1073 | // q to fall below the cq level. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1074 | if ((oxcf->rc_mode == AOM_CQ) && (active_best_quality < cq_level)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1075 | active_best_quality = cq_level; |
| 1076 | } |
| 1077 | } |
| 1078 | } |
| 1079 | |
| 1080 | // Extension to max or min Q if undershoot or overshoot is outside |
| 1081 | // the permitted range. |
Debargha Mukherjee | d0b9bf7 | 2018-05-14 17:45:27 +0000 | [diff] [blame] | 1082 | if ((cpi->oxcf.rc_mode != AOM_Q) && |
| 1083 | (cpi->twopass.gf_zeromotion_pct < VLOW_MOTION_THRESHOLD)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1084 | if (frame_is_intra_only(cm) || |
Sebastien Alaiwan | 365e644 | 2017-10-16 11:35:00 +0200 | [diff] [blame] | 1085 | (!rc->is_src_frame_alt_ref && |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 1086 | (cpi->refresh_golden_frame || is_intrl_arf_boost || |
Sebastien Alaiwan | 365e644 | 2017-10-16 11:35:00 +0200 | [diff] [blame] | 1087 | cpi->refresh_alt_ref_frame))) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1088 | active_best_quality -= |
| 1089 | (cpi->twopass.extend_minq + cpi->twopass.extend_minq_fast); |
| 1090 | active_worst_quality += (cpi->twopass.extend_maxq / 2); |
| 1091 | } else { |
| 1092 | active_best_quality -= |
| 1093 | (cpi->twopass.extend_minq + cpi->twopass.extend_minq_fast) / 2; |
| 1094 | active_worst_quality += cpi->twopass.extend_maxq; |
| 1095 | } |
| 1096 | } |
| 1097 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1098 | aom_clear_system_state(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1099 | // Static forced key frames Q restrictions dealt with elsewhere. |
| 1100 | if (!(frame_is_intra_only(cm)) || !rc->this_key_frame_forced || |
| 1101 | (cpi->twopass.last_kfgroup_zeromotion_pct < STATIC_MOTION_THRESH)) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1102 | int qdelta = av1_frame_type_qdelta(cpi, gf_group->rf_level[gf_group->index], |
| 1103 | active_worst_quality); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1104 | active_worst_quality = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1105 | AOMMAX(active_worst_quality + qdelta, active_best_quality); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1106 | } |
| 1107 | |
| 1108 | // Modify active_best_quality for downscaled normal frames. |
Cheng Chen | 09c83a5 | 2018-06-05 12:27:36 -0700 | [diff] [blame] | 1109 | if (av1_frame_scaled(cm) && !frame_is_kf_gf_arf(cpi)) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1110 | int qdelta = av1_compute_qdelta_by_rate( |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 1111 | rc, cm->frame_type, active_best_quality, 2.0, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1112 | active_best_quality = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1113 | AOMMAX(active_best_quality + qdelta, rc->best_quality); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1114 | } |
| 1115 | |
| 1116 | active_best_quality = |
| 1117 | clamp(active_best_quality, rc->best_quality, rc->worst_quality); |
| 1118 | active_worst_quality = |
| 1119 | clamp(active_worst_quality, active_best_quality, rc->worst_quality); |
| 1120 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1121 | if (oxcf->rc_mode == AOM_Q) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1122 | q = active_best_quality; |
| 1123 | // Special case code to try and match quality with forced key frames. |
| 1124 | } else if (frame_is_intra_only(cm) && rc->this_key_frame_forced) { |
| 1125 | // If static since last kf use better of last boosted and last kf q. |
| 1126 | if (cpi->twopass.last_kfgroup_zeromotion_pct >= STATIC_MOTION_THRESH) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1127 | q = AOMMIN(rc->last_kf_qindex, rc->last_boosted_qindex); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1128 | } else { |
| 1129 | q = rc->last_boosted_qindex; |
| 1130 | } |
| 1131 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1132 | q = av1_rc_regulate_q(cpi, rc->this_frame_target, active_best_quality, |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1133 | active_worst_quality, width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1134 | if (q > active_worst_quality) { |
| 1135 | // Special case when we are targeting the max allowed rate. |
| 1136 | if (rc->this_frame_target >= rc->max_frame_bandwidth) |
| 1137 | active_worst_quality = q; |
| 1138 | else |
| 1139 | q = active_worst_quality; |
| 1140 | } |
| 1141 | } |
| 1142 | clamp(q, active_best_quality, active_worst_quality); |
| 1143 | |
| 1144 | *top_index = active_worst_quality; |
| 1145 | *bottom_index = active_best_quality; |
| 1146 | |
| 1147 | assert(*top_index <= rc->worst_quality && *top_index >= rc->best_quality); |
| 1148 | assert(*bottom_index <= rc->worst_quality && |
| 1149 | *bottom_index >= rc->best_quality); |
| 1150 | assert(q <= rc->worst_quality && q >= rc->best_quality); |
| 1151 | return q; |
| 1152 | } |
| 1153 | |
Wei-Ting Lin | 422498b | 2018-07-20 17:00:09 -0700 | [diff] [blame] | 1154 | int av1_rc_pick_q_and_bounds(AV1_COMP *cpi, int width, int height, |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1155 | int *bottom_index, int *top_index) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1156 | int q; |
| 1157 | if (cpi->oxcf.pass == 0) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1158 | if (cpi->oxcf.rc_mode == AOM_CBR) |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1159 | q = rc_pick_q_and_bounds_one_pass_cbr(cpi, width, height, bottom_index, |
| 1160 | top_index); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1161 | else |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1162 | q = rc_pick_q_and_bounds_one_pass_vbr(cpi, width, height, bottom_index, |
| 1163 | top_index); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1164 | } else { |
Wei-Ting Lin | 422498b | 2018-07-20 17:00:09 -0700 | [diff] [blame] | 1165 | assert(cpi->oxcf.pass == 2 && "invalid encode pass"); |
| 1166 | |
| 1167 | GF_GROUP *gf_group = &cpi->twopass.gf_group; |
Wei-Ting Lin | ebff377 | 2018-07-24 11:59:40 -0700 | [diff] [blame] | 1168 | int arf_q = 0; |
Wei-Ting Lin | 422498b | 2018-07-20 17:00:09 -0700 | [diff] [blame] | 1169 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1170 | q = rc_pick_q_and_bounds_two_pass(cpi, width, height, bottom_index, |
Wei-Ting Lin | ebff377 | 2018-07-24 11:59:40 -0700 | [diff] [blame] | 1171 | top_index, &arf_q); |
Wei-Ting Lin | 422498b | 2018-07-20 17:00:09 -0700 | [diff] [blame] | 1172 | |
| 1173 | if (gf_group->update_type[gf_group->index] == ARF_UPDATE) { |
Wei-Ting Lin | ebff377 | 2018-07-24 11:59:40 -0700 | [diff] [blame] | 1174 | cpi->rc.arf_q = arf_q; |
Wei-Ting Lin | 422498b | 2018-07-20 17:00:09 -0700 | [diff] [blame] | 1175 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1176 | } |
| 1177 | |
| 1178 | return q; |
| 1179 | } |
| 1180 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1181 | void av1_rc_compute_frame_size_bounds(const AV1_COMP *cpi, int frame_target, |
| 1182 | int *frame_under_shoot_limit, |
| 1183 | int *frame_over_shoot_limit) { |
| 1184 | if (cpi->oxcf.rc_mode == AOM_Q) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1185 | *frame_under_shoot_limit = 0; |
| 1186 | *frame_over_shoot_limit = INT_MAX; |
| 1187 | } else { |
| 1188 | // For very small rate targets where the fractional adjustment |
| 1189 | // may be tiny make sure there is at least a minimum range. |
| 1190 | const int tolerance = (cpi->sf.recode_tolerance * frame_target) / 100; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1191 | *frame_under_shoot_limit = AOMMAX(frame_target - tolerance - 200, 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1192 | *frame_over_shoot_limit = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1193 | AOMMIN(frame_target + tolerance + 200, cpi->rc.max_frame_bandwidth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1194 | } |
| 1195 | } |
| 1196 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1197 | static void rc_set_frame_target(AV1_COMP *cpi, int target, int width, |
| 1198 | int height) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1199 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1200 | RATE_CONTROL *const rc = &cpi->rc; |
| 1201 | |
| 1202 | rc->this_frame_target = target; |
| 1203 | |
Fergus Simpson | fecb2ab | 2017-04-30 15:49:57 -0700 | [diff] [blame] | 1204 | // Modify frame size target when down-scaled. |
Cheng Chen | 09c83a5 | 2018-06-05 12:27:36 -0700 | [diff] [blame] | 1205 | if (av1_frame_scaled(cm)) |
Fergus Simpson | ddc846e | 2017-04-24 18:09:13 -0700 | [diff] [blame] | 1206 | rc->this_frame_target = |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1207 | (int)(rc->this_frame_target * resize_rate_factor(cpi, width, height)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1208 | |
| 1209 | // Target rate per SB64 (including partial SB64s. |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1210 | rc->sb64_target_rate = |
| 1211 | (int)((int64_t)rc->this_frame_target * 64 * 64) / (width * height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1212 | } |
| 1213 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1214 | static void update_alt_ref_frame_stats(AV1_COMP *cpi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1215 | // this frame refreshes means next frames don't unless specified by user |
| 1216 | RATE_CONTROL *const rc = &cpi->rc; |
| 1217 | rc->frames_since_golden = 0; |
| 1218 | |
| 1219 | // Mark the alt ref as done (setting to 0 means no further alt refs pending). |
| 1220 | rc->source_alt_ref_pending = 0; |
| 1221 | |
| 1222 | // Set the alternate reference frame active flag |
| 1223 | rc->source_alt_ref_active = 1; |
| 1224 | } |
| 1225 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1226 | static void update_golden_frame_stats(AV1_COMP *cpi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1227 | RATE_CONTROL *const rc = &cpi->rc; |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 1228 | #if CUSTOMIZED_GF |
| 1229 | const TWO_PASS *const twopass = &cpi->twopass; |
| 1230 | const GF_GROUP *const gf_group = &twopass->gf_group; |
| 1231 | const int is_intrnl_arf = |
| 1232 | cpi->oxcf.pass == 2 |
| 1233 | ? gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE |
| 1234 | : cpi->refresh_alt2_ref_frame; |
| 1235 | #else |
| 1236 | const int is_intnl_arf = cpi->refresh_alt2_ref_frame; |
| 1237 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1238 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1239 | // Update the Golden frame usage counts. |
| 1240 | // NOTE(weitinglin): If we use show_existing_frame for an OVERLAY frame, |
| 1241 | // only the virtual indices for the reference frame will be |
| 1242 | // updated and cpi->refresh_golden_frame will still be zero. |
| 1243 | if (cpi->refresh_golden_frame || rc->is_src_frame_alt_ref) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1244 | // We will not use internal overlay frames to replace the golden frame |
| 1245 | if (!rc->is_src_frame_ext_arf) |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1246 | // this frame refreshes means next frames don't unless specified by user |
| 1247 | rc->frames_since_golden = 0; |
| 1248 | |
| 1249 | // If we are not using alt ref in the up and coming group clear the arf |
| 1250 | // active flag. In multi arf group case, if the index is not 0 then |
| 1251 | // we are overlaying a mid group arf so should not reset the flag. |
| 1252 | if (cpi->oxcf.pass == 2) { |
| 1253 | if (!rc->source_alt_ref_pending && (cpi->twopass.gf_group.index == 0)) |
| 1254 | rc->source_alt_ref_active = 0; |
| 1255 | } else if (!rc->source_alt_ref_pending) { |
| 1256 | rc->source_alt_ref_active = 0; |
| 1257 | } |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 1258 | } else if (!cpi->refresh_alt_ref_frame && !is_intrnl_arf) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1259 | rc->frames_since_golden++; |
| 1260 | } |
| 1261 | } |
| 1262 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1263 | void av1_rc_postencode_update(AV1_COMP *cpi, uint64_t bytes_used) { |
| 1264 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1265 | RATE_CONTROL *const rc = &cpi->rc; |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 1266 | #if CUSTOMIZED_GF |
| 1267 | const TWO_PASS *const twopass = &cpi->twopass; |
| 1268 | const GF_GROUP *const gf_group = &twopass->gf_group; |
| 1269 | const int is_intrnl_arf = |
| 1270 | cpi->oxcf.pass == 2 |
| 1271 | ? gf_group->update_type[gf_group->index] == INTNL_ARF_UPDATE |
| 1272 | : cpi->refresh_alt2_ref_frame; |
| 1273 | #else |
| 1274 | const int is_intrnl_arf = cpi->refresh_alt2_ref_frame; |
| 1275 | #endif |
| 1276 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1277 | const int qindex = cm->base_qindex; |
| 1278 | |
| 1279 | if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ && cm->seg.enabled) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1280 | av1_cyclic_refresh_postencode(cpi); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1281 | } |
| 1282 | |
| 1283 | // Update rate control heuristics |
| 1284 | rc->projected_frame_size = (int)(bytes_used << 3); |
| 1285 | |
| 1286 | // Post encode loop adjustment of Q prediction. |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1287 | av1_rc_update_rate_correction_factors(cpi, cm->width, cm->height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1288 | |
| 1289 | // Keep a record of last Q and ambient average Q. |
| 1290 | if (cm->frame_type == KEY_FRAME) { |
| 1291 | rc->last_q[KEY_FRAME] = qindex; |
| 1292 | rc->avg_frame_qindex[KEY_FRAME] = |
| 1293 | ROUND_POWER_OF_TWO(3 * rc->avg_frame_qindex[KEY_FRAME] + qindex, 2); |
| 1294 | } else { |
| 1295 | if (!rc->is_src_frame_alt_ref && |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 1296 | !(cpi->refresh_golden_frame || is_intrnl_arf || |
Zoe Liu | e9b15e2 | 2017-07-19 15:53:01 -0700 | [diff] [blame] | 1297 | cpi->refresh_alt_ref_frame)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1298 | rc->last_q[INTER_FRAME] = qindex; |
| 1299 | rc->avg_frame_qindex[INTER_FRAME] = |
| 1300 | ROUND_POWER_OF_TWO(3 * rc->avg_frame_qindex[INTER_FRAME] + qindex, 2); |
| 1301 | rc->ni_frames++; |
Urvang Joshi | 20cf30e | 2018-07-19 02:33:58 -0700 | [diff] [blame] | 1302 | rc->tot_q += av1_convert_qindex_to_q(qindex, cm->seq_params.bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1303 | rc->avg_q = rc->tot_q / rc->ni_frames; |
| 1304 | // Calculate the average Q for normal inter frames (not key or GFU |
| 1305 | // frames). |
| 1306 | rc->ni_tot_qi += qindex; |
| 1307 | rc->ni_av_qi = rc->ni_tot_qi / rc->ni_frames; |
| 1308 | } |
| 1309 | } |
| 1310 | |
| 1311 | // Keep record of last boosted (KF/GF/ARF) Q value. |
| 1312 | // If the current frame is coded at a lower Q then we also update it. |
| 1313 | // If all mbs in this group are skipped only update if the Q value is |
| 1314 | // better than that already stored. |
| 1315 | // This is used to help set quality in forced key frames to reduce popping |
| 1316 | if ((qindex < rc->last_boosted_qindex) || (cm->frame_type == KEY_FRAME) || |
| 1317 | (!rc->constrained_gf_group && |
Wei-Ting Lin | 15a4588 | 2018-07-02 16:45:55 -0700 | [diff] [blame] | 1318 | (cpi->refresh_alt_ref_frame || is_intrnl_arf || |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1319 | (cpi->refresh_golden_frame && !rc->is_src_frame_alt_ref)))) { |
| 1320 | rc->last_boosted_qindex = qindex; |
| 1321 | } |
| 1322 | if (cm->frame_type == KEY_FRAME) rc->last_kf_qindex = qindex; |
| 1323 | |
| 1324 | update_buffer_level(cpi, rc->projected_frame_size); |
| 1325 | |
| 1326 | // Rolling monitors of whether we are over or underspending used to help |
| 1327 | // regulate min and Max Q in two pass. |
Cheng Chen | 09c83a5 | 2018-06-05 12:27:36 -0700 | [diff] [blame] | 1328 | if (av1_frame_scaled(cm)) |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1329 | rc->this_frame_target = |
| 1330 | (int)(rc->this_frame_target / |
| 1331 | resize_rate_factor(cpi, cm->width, cm->height)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1332 | if (cm->frame_type != KEY_FRAME) { |
| 1333 | rc->rolling_target_bits = ROUND_POWER_OF_TWO( |
| 1334 | rc->rolling_target_bits * 3 + rc->this_frame_target, 2); |
| 1335 | rc->rolling_actual_bits = ROUND_POWER_OF_TWO( |
| 1336 | rc->rolling_actual_bits * 3 + rc->projected_frame_size, 2); |
| 1337 | rc->long_rolling_target_bits = ROUND_POWER_OF_TWO( |
| 1338 | rc->long_rolling_target_bits * 31 + rc->this_frame_target, 5); |
| 1339 | rc->long_rolling_actual_bits = ROUND_POWER_OF_TWO( |
| 1340 | rc->long_rolling_actual_bits * 31 + rc->projected_frame_size, 5); |
| 1341 | } |
| 1342 | |
| 1343 | // Actual bits spent |
| 1344 | rc->total_actual_bits += rc->projected_frame_size; |
Zoe Liu | e9b15e2 | 2017-07-19 15:53:01 -0700 | [diff] [blame] | 1345 | // TODO(zoeliu): To investigate whether we should treat BWDREF_FRAME |
| 1346 | // differently here for rc->avg_frame_bandwidth. |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1347 | rc->total_target_bits += |
| 1348 | (cm->show_frame || rc->is_bwd_ref_frame) ? rc->avg_frame_bandwidth : 0; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1349 | |
| 1350 | rc->total_target_vs_actual = rc->total_actual_bits - rc->total_target_bits; |
| 1351 | |
| 1352 | if (is_altref_enabled(cpi) && cpi->refresh_alt_ref_frame && |
| 1353 | (cm->frame_type != KEY_FRAME)) |
| 1354 | // Update the alternate reference frame stats as appropriate. |
| 1355 | update_alt_ref_frame_stats(cpi); |
| 1356 | else |
| 1357 | // Update the Golden frame stats as appropriate. |
| 1358 | update_golden_frame_stats(cpi); |
| 1359 | |
| 1360 | if (cm->frame_type == KEY_FRAME) rc->frames_since_key = 0; |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1361 | // if (cm->current_video_frame == 1 && cm->show_frame) |
| 1362 | /* |
| 1363 | rc->this_frame_target = |
| 1364 | (int)(rc->this_frame_target / resize_rate_factor(cpi, cm->width, |
| 1365 | cm->height)); |
| 1366 | */ |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1367 | } |
| 1368 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1369 | void av1_rc_postencode_update_drop_frame(AV1_COMP *cpi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1370 | // Update buffer level with zero size, update frame counters, and return. |
| 1371 | update_buffer_level(cpi, 0); |
| 1372 | cpi->rc.frames_since_key++; |
| 1373 | cpi->rc.frames_to_key--; |
| 1374 | cpi->rc.rc_2_frame = 0; |
| 1375 | cpi->rc.rc_1_frame = 0; |
| 1376 | } |
| 1377 | |
| 1378 | // Use this macro to turn on/off use of alt-refs in one-pass mode. |
| 1379 | #define USE_ALTREF_FOR_ONE_PASS 1 |
| 1380 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1381 | static int calc_pframe_target_size_one_pass_vbr(const AV1_COMP *const cpi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1382 | static const int af_ratio = 10; |
| 1383 | const RATE_CONTROL *const rc = &cpi->rc; |
| 1384 | int target; |
| 1385 | #if USE_ALTREF_FOR_ONE_PASS |
| 1386 | target = |
| 1387 | (!rc->is_src_frame_alt_ref && |
| 1388 | (cpi->refresh_golden_frame || cpi->refresh_alt_ref_frame)) |
| 1389 | ? (rc->avg_frame_bandwidth * rc->baseline_gf_interval * af_ratio) / |
| 1390 | (rc->baseline_gf_interval + af_ratio - 1) |
| 1391 | : (rc->avg_frame_bandwidth * rc->baseline_gf_interval) / |
| 1392 | (rc->baseline_gf_interval + af_ratio - 1); |
| 1393 | #else |
| 1394 | target = rc->avg_frame_bandwidth; |
| 1395 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1396 | return av1_rc_clamp_pframe_target_size(cpi, target); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1397 | } |
| 1398 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1399 | static int calc_iframe_target_size_one_pass_vbr(const AV1_COMP *const cpi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1400 | static const int kf_ratio = 25; |
| 1401 | const RATE_CONTROL *rc = &cpi->rc; |
| 1402 | const int target = rc->avg_frame_bandwidth * kf_ratio; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1403 | return av1_rc_clamp_iframe_target_size(cpi, target); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1404 | } |
| 1405 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1406 | void av1_rc_get_one_pass_vbr_params(AV1_COMP *cpi) { |
| 1407 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1408 | RATE_CONTROL *const rc = &cpi->rc; |
| 1409 | int target; |
Tarek AMARA | c981385 | 2018-03-05 18:40:18 -0500 | [diff] [blame] | 1410 | int altref_enabled = is_altref_enabled(cpi); |
| 1411 | int sframe_dist = cpi->oxcf.sframe_dist; |
| 1412 | int sframe_mode = cpi->oxcf.sframe_mode; |
| 1413 | int sframe_enabled = cpi->oxcf.sframe_enabled; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1414 | // TODO(yaowu): replace the "auto_key && 0" below with proper decision logic. |
| 1415 | if (!cpi->refresh_alt_ref_frame && |
| 1416 | (cm->current_video_frame == 0 || (cpi->frame_flags & FRAMEFLAGS_KEY) || |
| 1417 | rc->frames_to_key == 0 || (cpi->oxcf.auto_key && 0))) { |
| 1418 | cm->frame_type = KEY_FRAME; |
| 1419 | rc->this_key_frame_forced = |
| 1420 | cm->current_video_frame != 0 && rc->frames_to_key == 0; |
| 1421 | rc->frames_to_key = cpi->oxcf.key_freq; |
| 1422 | rc->kf_boost = DEFAULT_KF_BOOST; |
| 1423 | rc->source_alt_ref_active = 0; |
| 1424 | } else { |
| 1425 | cm->frame_type = INTER_FRAME; |
Tarek AMARA | c981385 | 2018-03-05 18:40:18 -0500 | [diff] [blame] | 1426 | if (sframe_enabled) { |
| 1427 | if (altref_enabled) { |
| 1428 | if (sframe_mode == 1) { |
| 1429 | // sframe_mode == 1: insert sframe if it matches altref frame. |
| 1430 | |
| 1431 | if (cm->current_video_frame % sframe_dist == 0 && |
| 1432 | cm->frame_type != KEY_FRAME && cm->current_video_frame != 0 && |
| 1433 | cpi->refresh_alt_ref_frame) { |
| 1434 | cm->frame_type = S_FRAME; |
| 1435 | } |
| 1436 | } else { |
| 1437 | // sframe_mode != 1: if sframe will be inserted at the next available |
| 1438 | // altref frame |
| 1439 | |
| 1440 | if (cm->current_video_frame % sframe_dist == 0 && |
| 1441 | cm->frame_type != KEY_FRAME && cm->current_video_frame != 0) { |
| 1442 | rc->sframe_due = 1; |
| 1443 | } |
| 1444 | |
| 1445 | if (rc->sframe_due && cpi->refresh_alt_ref_frame) { |
| 1446 | cm->frame_type = S_FRAME; |
| 1447 | rc->sframe_due = 0; |
| 1448 | } |
| 1449 | } |
| 1450 | } else { |
| 1451 | if (cm->current_video_frame % sframe_dist == 0 && |
| 1452 | cm->frame_type != KEY_FRAME && cm->current_video_frame != 0) { |
| 1453 | cm->frame_type = S_FRAME; |
| 1454 | } |
| 1455 | } |
| 1456 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1457 | } |
| 1458 | if (rc->frames_till_gf_update_due == 0) { |
| 1459 | rc->baseline_gf_interval = (rc->min_gf_interval + rc->max_gf_interval) / 2; |
| 1460 | rc->frames_till_gf_update_due = rc->baseline_gf_interval; |
| 1461 | // NOTE: frames_till_gf_update_due must be <= frames_to_key. |
| 1462 | if (rc->frames_till_gf_update_due > rc->frames_to_key) { |
| 1463 | rc->frames_till_gf_update_due = rc->frames_to_key; |
| 1464 | rc->constrained_gf_group = 1; |
| 1465 | } else { |
| 1466 | rc->constrained_gf_group = 0; |
| 1467 | } |
| 1468 | cpi->refresh_golden_frame = 1; |
| 1469 | rc->source_alt_ref_pending = USE_ALTREF_FOR_ONE_PASS; |
| 1470 | rc->gfu_boost = DEFAULT_GF_BOOST; |
| 1471 | } |
Yushin Cho | f16b1ad | 2018-01-29 17:19:58 -0800 | [diff] [blame] | 1472 | |
| 1473 | if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) |
| 1474 | av1_cyclic_refresh_update_parameters(cpi); |
| 1475 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1476 | if (cm->frame_type == KEY_FRAME) |
| 1477 | target = calc_iframe_target_size_one_pass_vbr(cpi); |
| 1478 | else |
| 1479 | target = calc_pframe_target_size_one_pass_vbr(cpi); |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1480 | rc_set_frame_target(cpi, target, cm->width, cm->height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1481 | } |
| 1482 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1483 | static int calc_pframe_target_size_one_pass_cbr(const AV1_COMP *cpi) { |
| 1484 | const AV1EncoderConfig *oxcf = &cpi->oxcf; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1485 | const RATE_CONTROL *rc = &cpi->rc; |
| 1486 | const int64_t diff = rc->optimal_buffer_level - rc->buffer_level; |
| 1487 | const int64_t one_pct_bits = 1 + rc->optimal_buffer_level / 100; |
| 1488 | int min_frame_target = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1489 | AOMMAX(rc->avg_frame_bandwidth >> 4, FRAME_OVERHEAD_BITS); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1490 | int target; |
| 1491 | |
| 1492 | if (oxcf->gf_cbr_boost_pct) { |
| 1493 | const int af_ratio_pct = oxcf->gf_cbr_boost_pct + 100; |
| 1494 | target = cpi->refresh_golden_frame |
| 1495 | ? (rc->avg_frame_bandwidth * rc->baseline_gf_interval * |
| 1496 | af_ratio_pct) / |
| 1497 | (rc->baseline_gf_interval * 100 + af_ratio_pct - 100) |
| 1498 | : (rc->avg_frame_bandwidth * rc->baseline_gf_interval * 100) / |
| 1499 | (rc->baseline_gf_interval * 100 + af_ratio_pct - 100); |
| 1500 | } else { |
| 1501 | target = rc->avg_frame_bandwidth; |
| 1502 | } |
| 1503 | |
| 1504 | if (diff > 0) { |
| 1505 | // Lower the target bandwidth for this frame. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1506 | const int pct_low = (int)AOMMIN(diff / one_pct_bits, oxcf->under_shoot_pct); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1507 | target -= (target * pct_low) / 200; |
| 1508 | } else if (diff < 0) { |
| 1509 | // Increase the target bandwidth for this frame. |
| 1510 | const int pct_high = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1511 | (int)AOMMIN(-diff / one_pct_bits, oxcf->over_shoot_pct); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1512 | target += (target * pct_high) / 200; |
| 1513 | } |
| 1514 | if (oxcf->rc_max_inter_bitrate_pct) { |
| 1515 | const int max_rate = |
| 1516 | rc->avg_frame_bandwidth * oxcf->rc_max_inter_bitrate_pct / 100; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1517 | target = AOMMIN(target, max_rate); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1518 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1519 | return AOMMAX(min_frame_target, target); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1520 | } |
| 1521 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1522 | static int calc_iframe_target_size_one_pass_cbr(const AV1_COMP *cpi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1523 | const RATE_CONTROL *rc = &cpi->rc; |
| 1524 | int target; |
| 1525 | if (cpi->common.current_video_frame == 0) { |
| 1526 | target = ((rc->starting_buffer_level / 2) > INT_MAX) |
| 1527 | ? INT_MAX |
| 1528 | : (int)(rc->starting_buffer_level / 2); |
| 1529 | } else { |
| 1530 | int kf_boost = 32; |
| 1531 | double framerate = cpi->framerate; |
| 1532 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1533 | kf_boost = AOMMAX(kf_boost, (int)(2 * framerate - 16)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1534 | if (rc->frames_since_key < framerate / 2) { |
| 1535 | kf_boost = (int)(kf_boost * rc->frames_since_key / (framerate / 2)); |
| 1536 | } |
| 1537 | target = ((16 + kf_boost) * rc->avg_frame_bandwidth) >> 4; |
| 1538 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1539 | return av1_rc_clamp_iframe_target_size(cpi, target); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1540 | } |
| 1541 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1542 | void av1_rc_get_one_pass_cbr_params(AV1_COMP *cpi) { |
| 1543 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1544 | RATE_CONTROL *const rc = &cpi->rc; |
| 1545 | int target; |
| 1546 | // TODO(yaowu): replace the "auto_key && 0" below with proper decision logic. |
| 1547 | if ((cm->current_video_frame == 0 || (cpi->frame_flags & FRAMEFLAGS_KEY) || |
| 1548 | rc->frames_to_key == 0 || (cpi->oxcf.auto_key && 0))) { |
| 1549 | cm->frame_type = KEY_FRAME; |
| 1550 | rc->this_key_frame_forced = |
| 1551 | cm->current_video_frame != 0 && rc->frames_to_key == 0; |
| 1552 | rc->frames_to_key = cpi->oxcf.key_freq; |
| 1553 | rc->kf_boost = DEFAULT_KF_BOOST; |
| 1554 | rc->source_alt_ref_active = 0; |
| 1555 | } else { |
| 1556 | cm->frame_type = INTER_FRAME; |
| 1557 | } |
| 1558 | if (rc->frames_till_gf_update_due == 0) { |
| 1559 | if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1560 | av1_cyclic_refresh_set_golden_update(cpi); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1561 | else |
| 1562 | rc->baseline_gf_interval = |
| 1563 | (rc->min_gf_interval + rc->max_gf_interval) / 2; |
| 1564 | rc->frames_till_gf_update_due = rc->baseline_gf_interval; |
| 1565 | // NOTE: frames_till_gf_update_due must be <= frames_to_key. |
| 1566 | if (rc->frames_till_gf_update_due > rc->frames_to_key) |
| 1567 | rc->frames_till_gf_update_due = rc->frames_to_key; |
| 1568 | cpi->refresh_golden_frame = 1; |
| 1569 | rc->gfu_boost = DEFAULT_GF_BOOST; |
| 1570 | } |
| 1571 | |
| 1572 | // Any update/change of global cyclic refresh parameters (amount/delta-qp) |
| 1573 | // should be done here, before the frame qp is selected. |
| 1574 | if (cpi->oxcf.aq_mode == CYCLIC_REFRESH_AQ) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1575 | av1_cyclic_refresh_update_parameters(cpi); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1576 | |
| 1577 | if (cm->frame_type == KEY_FRAME) |
| 1578 | target = calc_iframe_target_size_one_pass_cbr(cpi); |
| 1579 | else |
| 1580 | target = calc_pframe_target_size_one_pass_cbr(cpi); |
| 1581 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1582 | rc_set_frame_target(cpi, target, cm->width, cm->height); |
Fergus Simpson | 0757fd8 | 2017-04-28 20:14:27 -0700 | [diff] [blame] | 1583 | // TODO(afergs): Decide whether to scale up, down, or not at all |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1584 | } |
| 1585 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1586 | int av1_compute_qdelta(const RATE_CONTROL *rc, double qstart, double qtarget, |
| 1587 | aom_bit_depth_t bit_depth) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1588 | int start_index = rc->worst_quality; |
| 1589 | int target_index = rc->worst_quality; |
| 1590 | int i; |
| 1591 | |
| 1592 | // Convert the average q value to an index. |
| 1593 | for (i = rc->best_quality; i < rc->worst_quality; ++i) { |
| 1594 | start_index = i; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1595 | if (av1_convert_qindex_to_q(i, bit_depth) >= qstart) break; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1596 | } |
| 1597 | |
| 1598 | // Convert the q target to an index |
| 1599 | for (i = rc->best_quality; i < rc->worst_quality; ++i) { |
| 1600 | target_index = i; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1601 | if (av1_convert_qindex_to_q(i, bit_depth) >= qtarget) break; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1602 | } |
| 1603 | |
| 1604 | return target_index - start_index; |
| 1605 | } |
| 1606 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1607 | int av1_compute_qdelta_by_rate(const RATE_CONTROL *rc, FRAME_TYPE frame_type, |
| 1608 | int qindex, double rate_target_ratio, |
| 1609 | aom_bit_depth_t bit_depth) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1610 | int target_index = rc->worst_quality; |
| 1611 | int i; |
| 1612 | |
| 1613 | // Look up the current projected bits per block for the base index |
| 1614 | const int base_bits_per_mb = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1615 | av1_rc_bits_per_mb(frame_type, qindex, 1.0, bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1616 | |
| 1617 | // Find the target bits per mb based on the base value and given ratio. |
| 1618 | const int target_bits_per_mb = (int)(rate_target_ratio * base_bits_per_mb); |
| 1619 | |
| 1620 | // Convert the q target to an index |
| 1621 | for (i = rc->best_quality; i < rc->worst_quality; ++i) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1622 | if (av1_rc_bits_per_mb(frame_type, i, 1.0, bit_depth) <= |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1623 | target_bits_per_mb) { |
| 1624 | target_index = i; |
| 1625 | break; |
| 1626 | } |
| 1627 | } |
| 1628 | return target_index - qindex; |
| 1629 | } |
| 1630 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1631 | void av1_rc_set_gf_interval_range(const AV1_COMP *const cpi, |
| 1632 | RATE_CONTROL *const rc) { |
| 1633 | const AV1EncoderConfig *const oxcf = &cpi->oxcf; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1634 | |
| 1635 | // Special case code for 1 pass fixed Q mode tests |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1636 | if ((oxcf->pass == 0) && (oxcf->rc_mode == AOM_Q)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1637 | rc->max_gf_interval = FIXED_GF_INTERVAL; |
| 1638 | rc->min_gf_interval = FIXED_GF_INTERVAL; |
| 1639 | rc->static_scene_max_gf_interval = FIXED_GF_INTERVAL; |
| 1640 | } else { |
| 1641 | // Set Maximum gf/arf interval |
| 1642 | rc->max_gf_interval = oxcf->max_gf_interval; |
| 1643 | rc->min_gf_interval = oxcf->min_gf_interval; |
| 1644 | if (rc->min_gf_interval == 0) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1645 | rc->min_gf_interval = av1_rc_get_default_min_gf_interval( |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1646 | oxcf->width, oxcf->height, cpi->framerate); |
| 1647 | if (rc->max_gf_interval == 0) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1648 | rc->max_gf_interval = av1_rc_get_default_max_gf_interval( |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1649 | cpi->framerate, rc->min_gf_interval); |
| 1650 | |
| 1651 | // Extended interval for genuinely static scenes |
| 1652 | rc->static_scene_max_gf_interval = MAX_LAG_BUFFERS * 2; |
| 1653 | |
| 1654 | if (is_altref_enabled(cpi)) { |
| 1655 | if (rc->static_scene_max_gf_interval > oxcf->lag_in_frames - 1) |
| 1656 | rc->static_scene_max_gf_interval = oxcf->lag_in_frames - 1; |
| 1657 | } |
| 1658 | |
| 1659 | if (rc->max_gf_interval > rc->static_scene_max_gf_interval) |
| 1660 | rc->max_gf_interval = rc->static_scene_max_gf_interval; |
| 1661 | |
| 1662 | // Clamp min to max |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1663 | rc->min_gf_interval = AOMMIN(rc->min_gf_interval, rc->max_gf_interval); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1664 | } |
| 1665 | } |
| 1666 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1667 | void av1_rc_update_framerate(AV1_COMP *cpi, int width, int height) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1668 | const AV1EncoderConfig *const oxcf = &cpi->oxcf; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1669 | RATE_CONTROL *const rc = &cpi->rc; |
| 1670 | int vbr_max_bits; |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1671 | const int MBs = av1_get_MBs(width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1672 | |
| 1673 | rc->avg_frame_bandwidth = (int)(oxcf->target_bandwidth / cpi->framerate); |
| 1674 | rc->min_frame_bandwidth = |
| 1675 | (int)(rc->avg_frame_bandwidth * oxcf->two_pass_vbrmin_section / 100); |
| 1676 | |
| 1677 | rc->min_frame_bandwidth = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1678 | AOMMAX(rc->min_frame_bandwidth, FRAME_OVERHEAD_BITS); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1679 | |
| 1680 | // A maximum bitrate for a frame is defined. |
| 1681 | // The baseline for this aligns with HW implementations that |
| 1682 | // can support decode of 1080P content up to a bitrate of MAX_MB_RATE bits |
| 1683 | // per 16x16 MB (averaged over a frame). However this limit is extended if |
| 1684 | // a very high rate is given on the command line or the the rate cannnot |
| 1685 | // be acheived because of a user specificed max q (e.g. when the user |
| 1686 | // specifies lossless encode. |
| 1687 | vbr_max_bits = |
| 1688 | (int)(((int64_t)rc->avg_frame_bandwidth * oxcf->two_pass_vbrmax_section) / |
| 1689 | 100); |
| 1690 | rc->max_frame_bandwidth = |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1691 | AOMMAX(AOMMAX((MBs * MAX_MB_RATE), MAXRATE_1080P), vbr_max_bits); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1692 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1693 | av1_rc_set_gf_interval_range(cpi, rc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1694 | } |
| 1695 | |
| 1696 | #define VBR_PCT_ADJUSTMENT_LIMIT 50 |
| 1697 | // For VBR...adjustment to the frame target based on error from previous frames |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1698 | static void vbr_rate_correction(AV1_COMP *cpi, int *this_frame_target) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1699 | RATE_CONTROL *const rc = &cpi->rc; |
| 1700 | int64_t vbr_bits_off_target = rc->vbr_bits_off_target; |
| 1701 | int max_delta; |
| 1702 | double position_factor = 1.0; |
| 1703 | |
| 1704 | // How far through the clip are we. |
| 1705 | // This number is used to damp the per frame rate correction. |
| 1706 | // Range 0 - 1.0 |
| 1707 | if (cpi->twopass.total_stats.count != 0.) { |
| 1708 | position_factor = sqrt((double)cpi->common.current_video_frame / |
| 1709 | cpi->twopass.total_stats.count); |
| 1710 | } |
| 1711 | max_delta = (int)(position_factor * |
| 1712 | ((*this_frame_target * VBR_PCT_ADJUSTMENT_LIMIT) / 100)); |
| 1713 | |
| 1714 | // vbr_bits_off_target > 0 means we have extra bits to spend |
| 1715 | if (vbr_bits_off_target > 0) { |
| 1716 | *this_frame_target += (vbr_bits_off_target > max_delta) |
| 1717 | ? max_delta |
| 1718 | : (int)vbr_bits_off_target; |
| 1719 | } else { |
| 1720 | *this_frame_target -= (vbr_bits_off_target < -max_delta) |
| 1721 | ? max_delta |
| 1722 | : (int)-vbr_bits_off_target; |
| 1723 | } |
| 1724 | |
| 1725 | // Fast redistribution of bits arising from massive local undershoot. |
| 1726 | // Dont do it for kf,arf,gf or overlay frames. |
| 1727 | if (!frame_is_kf_gf_arf(cpi) && !rc->is_src_frame_alt_ref && |
| 1728 | rc->vbr_bits_off_target_fast) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1729 | int one_frame_bits = AOMMAX(rc->avg_frame_bandwidth, *this_frame_target); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1730 | int fast_extra_bits; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1731 | fast_extra_bits = (int)AOMMIN(rc->vbr_bits_off_target_fast, one_frame_bits); |
| 1732 | fast_extra_bits = (int)AOMMIN( |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1733 | fast_extra_bits, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1734 | AOMMAX(one_frame_bits / 8, rc->vbr_bits_off_target_fast / 8)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1735 | *this_frame_target += (int)fast_extra_bits; |
| 1736 | rc->vbr_bits_off_target_fast -= fast_extra_bits; |
| 1737 | } |
| 1738 | } |
| 1739 | |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1740 | void av1_set_target_rate(AV1_COMP *cpi, int width, int height) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1741 | RATE_CONTROL *const rc = &cpi->rc; |
| 1742 | int target_rate = rc->base_frame_target; |
| 1743 | |
| 1744 | // Correction to rate target based on prior over or under shoot. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1745 | if (cpi->oxcf.rc_mode == AOM_VBR || cpi->oxcf.rc_mode == AOM_CQ) |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1746 | vbr_rate_correction(cpi, &target_rate); |
Debargha Mukherjee | 7166f22 | 2017-09-05 21:32:42 -0700 | [diff] [blame] | 1747 | rc_set_frame_target(cpi, target_rate, width, height); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1748 | } |