blob: bcf11a7e9016eca3f50c7ccd94b80905cef3d8c7 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <limits.h>
13#include <math.h>
14
15#include "av1/common/seg_common.h"
16#include "av1/encoder/aq_cyclicrefresh.h"
17#include "av1/encoder/ratectrl.h"
18#include "av1/encoder/segmentation.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070019#include "aom_dsp/aom_dsp_common.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070020#include "aom_ports/system_state.h"
21
22struct CYCLIC_REFRESH {
23 // Percentage of blocks per frame that are targeted as candidates
24 // for cyclic refresh.
25 int percent_refresh;
26 // Maximum q-delta as percentage of base q.
27 int max_qdelta_perc;
28 // Superblock starting index for cycling through the frame.
29 int sb_index;
30 // Controls how long block will need to wait to be refreshed again, in
31 // excess of the cycle time, i.e., in the case of all zero motion, block
32 // will be refreshed every (100/percent_refresh + time_for_refresh) frames.
33 int time_for_refresh;
34 // Target number of (8x8) blocks that are set for delta-q.
35 int target_num_seg_blocks;
36 // Actual number of (8x8) blocks that were applied delta-q.
37 int actual_num_seg1_blocks;
38 int actual_num_seg2_blocks;
39 // RD mult. parameters for segment 1.
40 int rdmult;
41 // Cyclic refresh map.
42 signed char *map;
43 // Map of the last q a block was coded at.
44 uint8_t *last_coded_q_map;
45 // Thresholds applied to the projected rate/distortion of the coding block,
46 // when deciding whether block should be refreshed.
47 int64_t thresh_rate_sb;
48 int64_t thresh_dist_sb;
49 // Threshold applied to the motion vector (in units of 1/8 pel) of the
50 // coding block, when deciding whether block should be refreshed.
51 int16_t motion_thresh;
52 // Rate target ratio to set q delta.
53 double rate_ratio_qdelta;
54 // Boost factor for rate target ratio, for segment CR_SEGMENT_ID_BOOST2.
55 int rate_boost_fac;
56 double low_content_avg;
57 int qindex_delta[3];
58};
59
Yaowu Xuf883b422016-08-30 14:01:10 -070060CYCLIC_REFRESH *av1_cyclic_refresh_alloc(int mi_rows, int mi_cols) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070061 size_t last_coded_q_map_size;
Yaowu Xuf883b422016-08-30 14:01:10 -070062 CYCLIC_REFRESH *const cr = aom_calloc(1, sizeof(*cr));
Yaowu Xuc27fc142016-08-22 16:08:15 -070063 if (cr == NULL) return NULL;
64
Yaowu Xuf883b422016-08-30 14:01:10 -070065 cr->map = aom_calloc(mi_rows * mi_cols, sizeof(*cr->map));
Yaowu Xuc27fc142016-08-22 16:08:15 -070066 if (cr->map == NULL) {
Yaowu Xuf883b422016-08-30 14:01:10 -070067 av1_cyclic_refresh_free(cr);
Yaowu Xuc27fc142016-08-22 16:08:15 -070068 return NULL;
69 }
70 last_coded_q_map_size = mi_rows * mi_cols * sizeof(*cr->last_coded_q_map);
Yaowu Xuf883b422016-08-30 14:01:10 -070071 cr->last_coded_q_map = aom_malloc(last_coded_q_map_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -070072 if (cr->last_coded_q_map == NULL) {
Yaowu Xuf883b422016-08-30 14:01:10 -070073 av1_cyclic_refresh_free(cr);
Yaowu Xuc27fc142016-08-22 16:08:15 -070074 return NULL;
75 }
76 assert(MAXQ <= 255);
77 memset(cr->last_coded_q_map, MAXQ, last_coded_q_map_size);
78
79 return cr;
80}
81
Yaowu Xuf883b422016-08-30 14:01:10 -070082void av1_cyclic_refresh_free(CYCLIC_REFRESH *cr) {
83 aom_free(cr->map);
84 aom_free(cr->last_coded_q_map);
85 aom_free(cr);
Yaowu Xuc27fc142016-08-22 16:08:15 -070086}
87
88// Check if we should turn off cyclic refresh based on bitrate condition.
Yaowu Xuf883b422016-08-30 14:01:10 -070089static int apply_cyclic_refresh_bitrate(const AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -070090 const RATE_CONTROL *rc) {
91 // Turn off cyclic refresh if bits available per frame is not sufficiently
92 // larger than bit cost of segmentation. Segment map bit cost should scale
93 // with number of seg blocks, so compare available bits to number of blocks.
94 // Average bits available per frame = avg_frame_bandwidth
95 // Number of (8x8) blocks in frame = mi_rows * mi_cols;
96 const float factor = 0.25;
97 const int number_blocks = cm->mi_rows * cm->mi_cols;
98 // The condition below corresponds to turning off at target bitrates:
99 // (at 30fps), ~12kbps for CIF, 36kbps for VGA, 100kps for HD/720p.
100 // Also turn off at very small frame sizes, to avoid too large fraction of
101 // superblocks to be refreshed per frame. Threshold below is less than QCIF.
102 if (rc->avg_frame_bandwidth < factor * number_blocks ||
103 number_blocks / 64 < 5)
104 return 0;
105 else
106 return 1;
107}
108
109// Check if this coding block, of size bsize, should be considered for refresh
110// (lower-qp coding). Decision can be based on various factors, such as
111// size of the coding block (i.e., below min_block size rejected), coding
112// mode, and rate/distortion.
113static int candidate_refresh_aq(const CYCLIC_REFRESH *cr,
114 const MB_MODE_INFO *mbmi, int64_t rate,
115 int64_t dist, int bsize) {
116 MV mv = mbmi->mv[0].as_mv;
117 // Reject the block for lower-qp coding if projected distortion
118 // is above the threshold, and any of the following is true:
119 // 1) mode uses large mv
120 // 2) mode is an intra-mode
121 // Otherwise accept for refresh.
122 if (dist > cr->thresh_dist_sb &&
123 (mv.row > cr->motion_thresh || mv.row < -cr->motion_thresh ||
124 mv.col > cr->motion_thresh || mv.col < -cr->motion_thresh ||
125 !is_inter_block(mbmi)))
126 return CR_SEGMENT_ID_BASE;
127 else if (bsize >= BLOCK_16X16 && rate < cr->thresh_rate_sb &&
128 is_inter_block(mbmi) && mbmi->mv[0].as_int == 0 &&
129 cr->rate_boost_fac > 10)
130 // More aggressive delta-q for bigger blocks with zero motion.
131 return CR_SEGMENT_ID_BOOST2;
132 else
133 return CR_SEGMENT_ID_BOOST1;
134}
135
136// Compute delta-q for the segment.
Yaowu Xuf883b422016-08-30 14:01:10 -0700137static int compute_deltaq(const AV1_COMP *cpi, int q, double rate_factor) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700138 const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
139 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700140 int deltaq = av1_compute_qdelta_by_rate(rc, cpi->common.frame_type, q,
141 rate_factor, cpi->common.bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700142 if ((-deltaq) > cr->max_qdelta_perc * q / 100) {
143 deltaq = -cr->max_qdelta_perc * q / 100;
144 }
145 return deltaq;
146}
147
148// For the just encoded frame, estimate the bits, incorporating the delta-q
149// from non-base segment. For now ignore effect of multiple segments
150// (with different delta-q). Note this function is called in the postencode
151// (called from rc_update_rate_correction_factors()).
Yaowu Xuf883b422016-08-30 14:01:10 -0700152int av1_cyclic_refresh_estimate_bits_at_q(const AV1_COMP *cpi,
153 double correction_factor) {
154 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700155 const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
156 int estimated_bits;
157 int mbs = cm->MBs;
158 int num8x8bl = mbs << 2;
159 // Weight for non-base segments: use actual number of blocks refreshed in
160 // previous/just encoded frame. Note number of blocks here is in 8x8 units.
161 double weight_segment1 = (double)cr->actual_num_seg1_blocks / num8x8bl;
162 double weight_segment2 = (double)cr->actual_num_seg2_blocks / num8x8bl;
163 // Take segment weighted average for estimated bits.
164 estimated_bits =
165 (int)((1.0 - weight_segment1 - weight_segment2) *
Yaowu Xuf883b422016-08-30 14:01:10 -0700166 av1_estimate_bits_at_q(cm->frame_type, cm->base_qindex, mbs,
167 correction_factor, cm->bit_depth) +
Yaowu Xuc27fc142016-08-22 16:08:15 -0700168 weight_segment1 *
Yaowu Xuf883b422016-08-30 14:01:10 -0700169 av1_estimate_bits_at_q(cm->frame_type,
170 cm->base_qindex + cr->qindex_delta[1],
171 mbs, correction_factor, cm->bit_depth) +
Yaowu Xuc27fc142016-08-22 16:08:15 -0700172 weight_segment2 *
Yaowu Xuf883b422016-08-30 14:01:10 -0700173 av1_estimate_bits_at_q(cm->frame_type,
174 cm->base_qindex + cr->qindex_delta[2],
175 mbs, correction_factor, cm->bit_depth));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700176 return estimated_bits;
177}
178
179// Prior to encoding the frame, estimate the bits per mb, for a given q = i and
180// a corresponding delta-q (for segment 1). This function is called in the
181// rc_regulate_q() to set the base qp index.
182// Note: the segment map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or
183// to 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock, prior to encoding.
Yaowu Xuf883b422016-08-30 14:01:10 -0700184int av1_cyclic_refresh_rc_bits_per_mb(const AV1_COMP *cpi, int i,
185 double correction_factor) {
186 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700187 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
188 int bits_per_mb;
189 int num8x8bl = cm->MBs << 2;
190 // Weight for segment prior to encoding: take the average of the target
191 // number for the frame to be encoded and the actual from the previous frame.
192 double weight_segment =
193 (double)((cr->target_num_seg_blocks + cr->actual_num_seg1_blocks +
194 cr->actual_num_seg2_blocks) >>
195 1) /
196 num8x8bl;
197 // Compute delta-q corresponding to qindex i.
198 int deltaq = compute_deltaq(cpi, i, cr->rate_ratio_qdelta);
199 // Take segment weighted average for bits per mb.
Yaowu Xuf883b422016-08-30 14:01:10 -0700200 bits_per_mb = (int)((1.0 - weight_segment) *
201 av1_rc_bits_per_mb(cm->frame_type, i,
202 correction_factor, cm->bit_depth) +
203 weight_segment *
204 av1_rc_bits_per_mb(cm->frame_type, i + deltaq,
205 correction_factor, cm->bit_depth));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700206 return bits_per_mb;
207}
208
209// Prior to coding a given prediction block, of size bsize at (mi_row, mi_col),
210// check if we should reset the segment_id, and update the cyclic_refresh map
211// and segmentation map.
Urvang Joshi52648442016-10-13 17:27:51 -0700212void av1_cyclic_refresh_update_segment(const AV1_COMP *cpi,
Yaowu Xuf883b422016-08-30 14:01:10 -0700213 MB_MODE_INFO *const mbmi, int mi_row,
214 int mi_col, BLOCK_SIZE bsize,
215 int64_t rate, int64_t dist, int skip) {
216 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700217 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
218 const int bw = num_8x8_blocks_wide_lookup[bsize];
219 const int bh = num_8x8_blocks_high_lookup[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -0700220 const int xmis = AOMMIN(cm->mi_cols - mi_col, bw);
221 const int ymis = AOMMIN(cm->mi_rows - mi_row, bh);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700222 const int block_index = mi_row * cm->mi_cols + mi_col;
223 const int refresh_this_block =
224 candidate_refresh_aq(cr, mbmi, rate, dist, bsize);
225 // Default is to not update the refresh map.
226 int new_map_value = cr->map[block_index];
227 int x = 0;
228 int y = 0;
229
230 // If this block is labeled for refresh, check if we should reset the
231 // segment_id.
232 if (cyclic_refresh_segment_id_boosted(mbmi->segment_id)) {
233 mbmi->segment_id = refresh_this_block;
234 // Reset segment_id if will be skipped.
235 if (skip) mbmi->segment_id = CR_SEGMENT_ID_BASE;
236 }
237
238 // Update the cyclic refresh map, to be used for setting segmentation map
239 // for the next frame. If the block will be refreshed this frame, mark it
240 // as clean. The magnitude of the -ve influences how long before we consider
241 // it for refresh again.
242 if (cyclic_refresh_segment_id_boosted(mbmi->segment_id)) {
243 new_map_value = -cr->time_for_refresh;
244 } else if (refresh_this_block) {
245 // Else if it is accepted as candidate for refresh, and has not already
246 // been refreshed (marked as 1) then mark it as a candidate for cleanup
247 // for future time (marked as 0), otherwise don't update it.
248 if (cr->map[block_index] == 1) new_map_value = 0;
249 } else {
250 // Leave it marked as block that is not candidate for refresh.
251 new_map_value = 1;
252 }
253
254 // Update entries in the cyclic refresh map with new_map_value, and
255 // copy mbmi->segment_id into global segmentation map.
256 for (y = 0; y < ymis; y++)
257 for (x = 0; x < xmis; x++) {
258 int map_offset = block_index + y * cm->mi_cols + x;
259 cr->map[map_offset] = new_map_value;
260 cpi->segmentation_map[map_offset] = mbmi->segment_id;
261 // Inter skip blocks were clearly not coded at the current qindex, so
262 // don't update the map for them. For cases where motion is non-zero or
263 // the reference frame isn't the previous frame, the previous value in
264 // the map for this spatial location is not entirely correct.
265 if ((!is_inter_block(mbmi) || !skip) &&
266 mbmi->segment_id <= CR_SEGMENT_ID_BOOST2) {
267 cr->last_coded_q_map[map_offset] = clamp(
268 cm->base_qindex + cr->qindex_delta[mbmi->segment_id], 0, MAXQ);
269 } else if (is_inter_block(mbmi) && skip &&
270 mbmi->segment_id <= CR_SEGMENT_ID_BOOST2) {
271 cr->last_coded_q_map[map_offset] =
Yaowu Xuf883b422016-08-30 14:01:10 -0700272 AOMMIN(clamp(cm->base_qindex + cr->qindex_delta[mbmi->segment_id],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700273 0, MAXQ),
274 cr->last_coded_q_map[map_offset]);
275 }
276 }
277}
278
279// Update the actual number of blocks that were applied the segment delta q.
Yaowu Xuf883b422016-08-30 14:01:10 -0700280void av1_cyclic_refresh_postencode(AV1_COMP *const cpi) {
281 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700282 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
283 unsigned char *const seg_map = cpi->segmentation_map;
284 int mi_row, mi_col;
285 cr->actual_num_seg1_blocks = 0;
286 cr->actual_num_seg2_blocks = 0;
287 for (mi_row = 0; mi_row < cm->mi_rows; mi_row++)
288 for (mi_col = 0; mi_col < cm->mi_cols; mi_col++) {
289 if (cyclic_refresh_segment_id(seg_map[mi_row * cm->mi_cols + mi_col]) ==
290 CR_SEGMENT_ID_BOOST1)
291 cr->actual_num_seg1_blocks++;
292 else if (cyclic_refresh_segment_id(
293 seg_map[mi_row * cm->mi_cols + mi_col]) ==
294 CR_SEGMENT_ID_BOOST2)
295 cr->actual_num_seg2_blocks++;
296 }
297}
298
299// Set golden frame update interval, for 1 pass CBR mode.
Yaowu Xuf883b422016-08-30 14:01:10 -0700300void av1_cyclic_refresh_set_golden_update(AV1_COMP *const cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700301 RATE_CONTROL *const rc = &cpi->rc;
302 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
303 // Set minimum gf_interval for GF update to a multiple (== 2) of refresh
304 // period. Depending on past encoding stats, GF flag may be reset and update
305 // may not occur until next baseline_gf_interval.
306 if (cr->percent_refresh > 0)
307 rc->baseline_gf_interval = 4 * (100 / cr->percent_refresh);
308 else
309 rc->baseline_gf_interval = 40;
310}
311
312// Update some encoding stats (from the just encoded frame). If this frame's
313// background has high motion, refresh the golden frame. Otherwise, if the
314// golden reference is to be updated check if we should NOT update the golden
315// ref.
Yaowu Xuf883b422016-08-30 14:01:10 -0700316void av1_cyclic_refresh_check_golden_update(AV1_COMP *const cpi) {
317 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700318 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
319 int mi_row, mi_col;
320 double fraction_low = 0.0;
321 int low_content_frame = 0;
322
323 MODE_INFO **mi;
324 RATE_CONTROL *const rc = &cpi->rc;
325 const int rows = cm->mi_rows, cols = cm->mi_cols;
326 int cnt1 = 0, cnt2 = 0;
327 int force_gf_refresh = 0;
328
329 for (mi_row = 0; mi_row < rows; mi_row++) {
330 mi = cm->mi_grid_visible + mi_row * cm->mi_stride;
331
332 for (mi_col = 0; mi_col < cols; mi_col++) {
333 int16_t abs_mvr = mi[0]->mbmi.mv[0].as_mv.row >= 0
334 ? mi[0]->mbmi.mv[0].as_mv.row
335 : -1 * mi[0]->mbmi.mv[0].as_mv.row;
336 int16_t abs_mvc = mi[0]->mbmi.mv[0].as_mv.col >= 0
337 ? mi[0]->mbmi.mv[0].as_mv.col
338 : -1 * mi[0]->mbmi.mv[0].as_mv.col;
339
340 // Calculate the motion of the background.
341 if (abs_mvr <= 16 && abs_mvc <= 16) {
342 cnt1++;
343 if (abs_mvr == 0 && abs_mvc == 0) cnt2++;
344 }
345 mi++;
346
347 // Accumulate low_content_frame.
348 if (cr->map[mi_row * cols + mi_col] < 1) low_content_frame++;
349 }
350 }
351
352 // For video conference clips, if the background has high motion in current
353 // frame because of the camera movement, set this frame as the golden frame.
354 // Use 70% and 5% as the thresholds for golden frame refreshing.
355 // Also, force this frame as a golden update frame if this frame will change
356 // the resolution (resize_pending != 0).
357 if (cpi->resize_pending != 0 ||
358 (cnt1 * 10 > (70 * rows * cols) && cnt2 * 20 < cnt1)) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700359 av1_cyclic_refresh_set_golden_update(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700360 rc->frames_till_gf_update_due = rc->baseline_gf_interval;
361
362 if (rc->frames_till_gf_update_due > rc->frames_to_key)
363 rc->frames_till_gf_update_due = rc->frames_to_key;
364 cpi->refresh_golden_frame = 1;
365 force_gf_refresh = 1;
366 }
367
368 fraction_low = (double)low_content_frame / (rows * cols);
369 // Update average.
370 cr->low_content_avg = (fraction_low + 3 * cr->low_content_avg) / 4;
371 if (!force_gf_refresh && cpi->refresh_golden_frame == 1) {
372 // Don't update golden reference if the amount of low_content for the
373 // current encoded frame is small, or if the recursive average of the
374 // low_content over the update interval window falls below threshold.
375 if (fraction_low < 0.8 || cr->low_content_avg < 0.7)
376 cpi->refresh_golden_frame = 0;
377 // Reset for next internal.
378 cr->low_content_avg = fraction_low;
379 }
380}
381
382// Update the segmentation map, and related quantities: cyclic refresh map,
383// refresh sb_index, and target number of blocks to be refreshed.
384// The map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or to
385// 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock.
386// Blocks labeled as BOOST1 may later get set to BOOST2 (during the
387// encoding of the superblock).
Yaowu Xuf883b422016-08-30 14:01:10 -0700388static void cyclic_refresh_update_map(AV1_COMP *const cpi) {
389 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700390 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
391 unsigned char *const seg_map = cpi->segmentation_map;
392 int i, block_count, bl_index, sb_rows, sb_cols, sbs_in_frame;
393 int xmis, ymis, x, y;
394 memset(seg_map, CR_SEGMENT_ID_BASE, cm->mi_rows * cm->mi_cols);
395 sb_cols = (cm->mi_cols + cm->mib_size - 1) / cm->mib_size;
396 sb_rows = (cm->mi_rows + cm->mib_size - 1) / cm->mib_size;
397 sbs_in_frame = sb_cols * sb_rows;
398 // Number of target blocks to get the q delta (segment 1).
399 block_count = cr->percent_refresh * cm->mi_rows * cm->mi_cols / 100;
400 // Set the segmentation map: cycle through the superblocks, starting at
401 // cr->mb_index, and stopping when either block_count blocks have been found
402 // to be refreshed, or we have passed through whole frame.
403 assert(cr->sb_index < sbs_in_frame);
404 i = cr->sb_index;
405 cr->target_num_seg_blocks = 0;
406 do {
407 int sum_map = 0;
408 // Get the mi_row/mi_col corresponding to superblock index i.
409 int sb_row_index = (i / sb_cols);
410 int sb_col_index = i - sb_row_index * sb_cols;
411 int mi_row = sb_row_index * cm->mib_size;
412 int mi_col = sb_col_index * cm->mib_size;
413 int qindex_thresh =
Yaowu Xuf883b422016-08-30 14:01:10 -0700414 cpi->oxcf.content == AOM_CONTENT_SCREEN
415 ? av1_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST2, cm->base_qindex)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700416 : 0;
417 assert(mi_row >= 0 && mi_row < cm->mi_rows);
418 assert(mi_col >= 0 && mi_col < cm->mi_cols);
419 bl_index = mi_row * cm->mi_cols + mi_col;
420 // Loop through all MI blocks in superblock and update map.
Yaowu Xuf883b422016-08-30 14:01:10 -0700421 xmis = AOMMIN(cm->mi_cols - mi_col, cm->mib_size);
422 ymis = AOMMIN(cm->mi_rows - mi_row, cm->mib_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700423 for (y = 0; y < ymis; y++) {
424 for (x = 0; x < xmis; x++) {
425 const int bl_index2 = bl_index + y * cm->mi_cols + x;
426 // If the block is as a candidate for clean up then mark it
427 // for possible boost/refresh (segment 1). The segment id may get
428 // reset to 0 later if block gets coded anything other than ZEROMV.
429 if (cr->map[bl_index2] == 0) {
430 if (cr->last_coded_q_map[bl_index2] > qindex_thresh) sum_map++;
431 } else if (cr->map[bl_index2] < 0) {
432 cr->map[bl_index2]++;
433 }
434 }
435 }
436 // Enforce constant segment over superblock.
437 // If segment is at least half of superblock, set to 1.
438 if (sum_map >= xmis * ymis / 2) {
439 for (y = 0; y < ymis; y++)
440 for (x = 0; x < xmis; x++) {
441 seg_map[bl_index + y * cm->mi_cols + x] = CR_SEGMENT_ID_BOOST1;
442 }
443 cr->target_num_seg_blocks += xmis * ymis;
444 }
445 i++;
446 if (i == sbs_in_frame) {
447 i = 0;
448 }
449 } while (cr->target_num_seg_blocks < block_count && i != cr->sb_index);
450 cr->sb_index = i;
451}
452
453// Set cyclic refresh parameters.
Yaowu Xuf883b422016-08-30 14:01:10 -0700454void av1_cyclic_refresh_update_parameters(AV1_COMP *const cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700455 const RATE_CONTROL *const rc = &cpi->rc;
Yaowu Xuf883b422016-08-30 14:01:10 -0700456 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700457 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
458 cr->percent_refresh = 10;
459 cr->max_qdelta_perc = 50;
460 cr->time_for_refresh = 0;
461 // Use larger delta-qp (increase rate_ratio_qdelta) for first few (~4)
462 // periods of the refresh cycle, after a key frame.
463 if (rc->frames_since_key < 4 * cr->percent_refresh)
464 cr->rate_ratio_qdelta = 3.0;
465 else
466 cr->rate_ratio_qdelta = 2.0;
467 // Adjust some parameters for low resolutions at low bitrates.
468 if (cm->width <= 352 && cm->height <= 288 && rc->avg_frame_bandwidth < 3400) {
469 cr->motion_thresh = 4;
470 cr->rate_boost_fac = 10;
471 } else {
472 cr->motion_thresh = 32;
473 cr->rate_boost_fac = 17;
474 }
475}
476
477// Setup cyclic background refresh: set delta q and segmentation map.
Yaowu Xuf883b422016-08-30 14:01:10 -0700478void av1_cyclic_refresh_setup(AV1_COMP *const cpi) {
479 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700480 const RATE_CONTROL *const rc = &cpi->rc;
481 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
482 struct segmentation *const seg = &cm->seg;
483 const int apply_cyclic_refresh = apply_cyclic_refresh_bitrate(cm, rc);
484 if (cm->current_video_frame == 0) cr->low_content_avg = 0.0;
485 // Don't apply refresh on key frame or enhancement layer frames.
486 if (!apply_cyclic_refresh || cm->frame_type == KEY_FRAME) {
487 // Set segmentation map to 0 and disable.
488 unsigned char *const seg_map = cpi->segmentation_map;
489 memset(seg_map, 0, cm->mi_rows * cm->mi_cols);
Yaowu Xuf883b422016-08-30 14:01:10 -0700490 av1_disable_segmentation(&cm->seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700491 if (cm->frame_type == KEY_FRAME) {
492 memset(cr->last_coded_q_map, MAXQ,
493 cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map));
494 cr->sb_index = 0;
495 }
496 return;
497 } else {
498 int qindex_delta = 0;
499 int qindex2;
Yaowu Xuf883b422016-08-30 14:01:10 -0700500 const double q = av1_convert_qindex_to_q(cm->base_qindex, cm->bit_depth);
501 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -0700502 // Set rate threshold to some multiple (set to 2 for now) of the target
503 // rate (target is given by sb64_target_rate and scaled by 256).
504 cr->thresh_rate_sb = ((int64_t)(rc->sb64_target_rate) << 8) << 2;
505 // Distortion threshold, quadratic in Q, scale factor to be adjusted.
506 // q will not exceed 457, so (q * q) is within 32bit; see:
Yaowu Xuf883b422016-08-30 14:01:10 -0700507 // av1_convert_qindex_to_q(), av1_ac_quant(), ac_qlookup*[].
Yaowu Xuc27fc142016-08-22 16:08:15 -0700508 cr->thresh_dist_sb = ((int64_t)(q * q)) << 2;
509
510 // Set up segmentation.
511 // Clear down the segment map.
Yaowu Xuf883b422016-08-30 14:01:10 -0700512 av1_enable_segmentation(&cm->seg);
513 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700514 // Select delta coding method.
515 seg->abs_delta = SEGMENT_DELTADATA;
516
517 // Note: setting temporal_update has no effect, as the seg-map coding method
518 // (temporal or spatial) is determined in
Yaowu Xuf883b422016-08-30 14:01:10 -0700519 // av1_choose_segmap_coding_method(),
Yaowu Xuc27fc142016-08-22 16:08:15 -0700520 // based on the coding cost of each method. For error_resilient mode on the
521 // last_frame_seg_map is set to 0, so if temporal coding is used, it is
522 // relative to 0 previous map.
523 // seg->temporal_update = 0;
524
525 // Segment BASE "Q" feature is disabled so it defaults to the baseline Q.
Yaowu Xuf883b422016-08-30 14:01:10 -0700526 av1_disable_segfeature(seg, CR_SEGMENT_ID_BASE, SEG_LVL_ALT_Q);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700527 // Use segment BOOST1 for in-frame Q adjustment.
Yaowu Xuf883b422016-08-30 14:01:10 -0700528 av1_enable_segfeature(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700529 // Use segment BOOST2 for more aggressive in-frame Q adjustment.
Yaowu Xuf883b422016-08-30 14:01:10 -0700530 av1_enable_segfeature(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700531
532 // Set the q delta for segment BOOST1.
533 qindex_delta = compute_deltaq(cpi, cm->base_qindex, cr->rate_ratio_qdelta);
534 cr->qindex_delta[1] = qindex_delta;
535
536 // Compute rd-mult for segment BOOST1.
537 qindex2 = clamp(cm->base_qindex + cm->y_dc_delta_q + qindex_delta, 0, MAXQ);
538
Yaowu Xuf883b422016-08-30 14:01:10 -0700539 cr->rdmult = av1_compute_rd_mult(cpi, qindex2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700540
Yaowu Xuf883b422016-08-30 14:01:10 -0700541 av1_set_segdata(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q, qindex_delta);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700542
543 // Set a more aggressive (higher) q delta for segment BOOST2.
544 qindex_delta = compute_deltaq(
545 cpi, cm->base_qindex,
Yaowu Xuf883b422016-08-30 14:01:10 -0700546 AOMMIN(CR_MAX_RATE_TARGET_RATIO,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700547 0.1 * cr->rate_boost_fac * cr->rate_ratio_qdelta));
548 cr->qindex_delta[2] = qindex_delta;
Yaowu Xuf883b422016-08-30 14:01:10 -0700549 av1_set_segdata(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q, qindex_delta);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700550
551 // Update the segmentation and refresh map.
552 cyclic_refresh_update_map(cpi);
553 }
554}
555
Yaowu Xuf883b422016-08-30 14:01:10 -0700556int av1_cyclic_refresh_get_rdmult(const CYCLIC_REFRESH *cr) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700557 return cr->rdmult;
558}
559
Yaowu Xuf883b422016-08-30 14:01:10 -0700560void av1_cyclic_refresh_reset_resize(AV1_COMP *const cpi) {
561 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700562 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
563 memset(cr->map, 0, cm->mi_rows * cm->mi_cols);
564 cr->sb_index = 0;
565 cpi->refresh_golden_frame = 1;
566}