blob: 878cec59e6fda7e4f00bce16b91b896fec91930e [file] [log] [blame]
David Turner056f7cd2019-01-07 17:48:13 +00001/*
2 * Copyright (c) 2019, Alliance for Open Media. All rights reserved
3 *
4 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 */
11
12#include <stdint.h>
13
bohanli2541b8a2020-10-14 17:33:13 -070014#include "av1/common/blockd.h"
David Turner1539bb02019-01-24 15:28:13 +000015#include "config/aom_config.h"
David Turnerdedd8ff2019-01-23 13:59:46 +000016#include "config/aom_scale_rtcd.h"
David Turner1539bb02019-01-24 15:28:13 +000017
David Turner056f7cd2019-01-07 17:48:13 +000018#include "aom/aom_codec.h"
David Turnerdedd8ff2019-01-23 13:59:46 +000019#include "aom/aom_encoder.h"
20
David Turner1539bb02019-01-24 15:28:13 +000021#if CONFIG_MISMATCH_DEBUG
22#include "aom_util/debug_util.h"
23#endif // CONFIG_MISMATCH_DEBUG
24
Wan-Teh Changf2d15ee2020-03-10 09:24:43 -070025#include "av1/common/av1_common_int.h"
Cheng Chen5083a9f2019-07-12 15:33:34 -070026#include "av1/common/reconinter.h"
David Turner07dbd8e2019-01-08 17:16:25 +000027
David Turner056f7cd2019-01-07 17:48:13 +000028#include "av1/encoder/encoder.h"
29#include "av1/encoder/encode_strategy.h"
Cheng Chen5083a9f2019-07-12 15:33:34 -070030#include "av1/encoder/encodeframe.h"
Aniket Wanareb7d55292021-11-02 14:07:51 +053031#include "av1/encoder/encoder_alloc.h"
David Turner475a3132019-01-18 15:17:17 +000032#include "av1/encoder/firstpass.h"
Angie Chiang5e711222021-11-05 17:28:03 -070033#include "av1/encoder/gop_structure.h"
David Turner0fa8c492019-02-06 16:38:13 +000034#include "av1/encoder/pass2_strategy.h"
David Turnerdedd8ff2019-01-23 13:59:46 +000035#include "av1/encoder/temporal_filter.h"
Angie Chiangb17a57d2021-12-28 17:34:39 -080036#if CONFIG_THREE_PASS
37#include "av1/encoder/thirdpass.h"
38#endif // CONFIG_THREE_PASS
David Turner475a3132019-01-18 15:17:17 +000039#include "av1/encoder/tpl_model.h"
David Turner056f7cd2019-01-07 17:48:13 +000040
Sai Dengaff27722020-08-31 12:06:09 -070041#if CONFIG_TUNE_VMAF
42#include "av1/encoder/tune_vmaf.h"
43#endif
44
Cheng Chen7abe3132019-06-19 11:55:28 -070045#define TEMPORAL_FILTER_KEY_FRAME (CONFIG_REALTIME_ONLY ? 0 : 1)
46
Jayasanker J24cb9bc2020-04-15 13:43:10 +053047static INLINE void set_refresh_frame_flags(
Yunqing Wang2bfef9c2021-09-14 17:32:38 -070048 RefreshFrameInfo *const refresh_frame, bool refresh_gf, bool refresh_bwdref,
49 bool refresh_arf) {
50 refresh_frame->golden_frame = refresh_gf;
51 refresh_frame->bwd_ref_frame = refresh_bwdref;
52 refresh_frame->alt_ref_frame = refresh_arf;
Jayasanker J24cb9bc2020-04-15 13:43:10 +053053}
54
Yunqing Wang2bfef9c2021-09-14 17:32:38 -070055void av1_configure_buffer_updates(AV1_COMP *const cpi,
56 RefreshFrameInfo *const refresh_frame,
57 const FRAME_UPDATE_TYPE type,
58 const REFBUF_STATE refbuf_state,
59 int force_refresh_all) {
David Turnerce9b5902019-01-23 17:25:47 +000060 // NOTE(weitinglin): Should we define another function to take care of
61 // cpi->rc.is_$Source_Type to make this function as it is in the comment?
Vishesh38c05d72020-04-14 12:19:14 +053062 const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags =
63 &cpi->ext_flags.refresh_frame;
David Turner4f1f1812019-01-24 17:00:24 +000064 cpi->rc.is_src_frame_alt_ref = 0;
David Turnerce9b5902019-01-23 17:25:47 +000065
66 switch (type) {
67 case KF_UPDATE:
Yunqing Wang2bfef9c2021-09-14 17:32:38 -070068 set_refresh_frame_flags(refresh_frame, true, true, true);
David Turnerce9b5902019-01-23 17:25:47 +000069 break;
70
71 case LF_UPDATE:
Yunqing Wang2bfef9c2021-09-14 17:32:38 -070072 set_refresh_frame_flags(refresh_frame, false, false, false);
David Turnerce9b5902019-01-23 17:25:47 +000073 break;
74
75 case GF_UPDATE:
Yunqing Wang2bfef9c2021-09-14 17:32:38 -070076 set_refresh_frame_flags(refresh_frame, true, false, false);
David Turnerce9b5902019-01-23 17:25:47 +000077 break;
78
79 case OVERLAY_UPDATE:
Jingning Han880bb352021-06-18 11:57:26 -070080 if (refbuf_state == REFBUF_RESET)
Yunqing Wang2bfef9c2021-09-14 17:32:38 -070081 set_refresh_frame_flags(refresh_frame, true, true, true);
Jingning Han880bb352021-06-18 11:57:26 -070082 else
Yunqing Wang2bfef9c2021-09-14 17:32:38 -070083 set_refresh_frame_flags(refresh_frame, true, false, false);
Jingning Han880bb352021-06-18 11:57:26 -070084
David Turnerce9b5902019-01-23 17:25:47 +000085 cpi->rc.is_src_frame_alt_ref = 1;
86 break;
87
88 case ARF_UPDATE:
David Turnerce9b5902019-01-23 17:25:47 +000089 // NOTE: BWDREF does not get updated along with ALTREF_FRAME.
Jingning Han880bb352021-06-18 11:57:26 -070090 if (refbuf_state == REFBUF_RESET)
Yunqing Wang2bfef9c2021-09-14 17:32:38 -070091 set_refresh_frame_flags(refresh_frame, true, true, true);
Jingning Han880bb352021-06-18 11:57:26 -070092 else
Yunqing Wang2bfef9c2021-09-14 17:32:38 -070093 set_refresh_frame_flags(refresh_frame, false, false, true);
Jingning Han880bb352021-06-18 11:57:26 -070094
David Turnerce9b5902019-01-23 17:25:47 +000095 break;
96
David Turnerce9b5902019-01-23 17:25:47 +000097 case INTNL_OVERLAY_UPDATE:
Yunqing Wang2bfef9c2021-09-14 17:32:38 -070098 set_refresh_frame_flags(refresh_frame, false, false, false);
David Turnerce9b5902019-01-23 17:25:47 +000099 cpi->rc.is_src_frame_alt_ref = 1;
David Turnerce9b5902019-01-23 17:25:47 +0000100 break;
101
102 case INTNL_ARF_UPDATE:
Yunqing Wang2bfef9c2021-09-14 17:32:38 -0700103 set_refresh_frame_flags(refresh_frame, false, true, false);
David Turnerce9b5902019-01-23 17:25:47 +0000104 break;
105
106 default: assert(0); break;
107 }
David Turner4f1f1812019-01-24 17:00:24 +0000108
Vishesh38c05d72020-04-14 12:19:14 +0530109 if (ext_refresh_frame_flags->update_pending &&
Jingning Hana862e202021-05-14 10:18:50 -0700110 (!is_stat_generation_stage(cpi))) {
Yunqing Wang2bfef9c2021-09-14 17:32:38 -0700111 set_refresh_frame_flags(refresh_frame,
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530112 ext_refresh_frame_flags->golden_frame,
113 ext_refresh_frame_flags->bwd_ref_frame,
114 ext_refresh_frame_flags->alt_ref_frame);
Jingning Hana862e202021-05-14 10:18:50 -0700115 GF_GROUP *gf_group = &cpi->ppi->gf_group;
116 if (ext_refresh_frame_flags->golden_frame)
117 gf_group->update_type[cpi->gf_frame_index] = GF_UPDATE;
118 if (ext_refresh_frame_flags->alt_ref_frame)
119 gf_group->update_type[cpi->gf_frame_index] = ARF_UPDATE;
120 if (ext_refresh_frame_flags->bwd_ref_frame)
121 gf_group->update_type[cpi->gf_frame_index] = INTNL_ARF_UPDATE;
122 }
David Turner4f1f1812019-01-24 17:00:24 +0000123
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530124 if (force_refresh_all)
Yunqing Wang2bfef9c2021-09-14 17:32:38 -0700125 set_refresh_frame_flags(refresh_frame, true, true, true);
David Turnerce9b5902019-01-23 17:25:47 +0000126}
127
David Turner1539bb02019-01-24 15:28:13 +0000128static void set_additional_frame_flags(const AV1_COMMON *const cm,
129 unsigned int *const frame_flags) {
Urvang Joshib6409e92020-03-23 11:23:27 -0700130 if (frame_is_intra_only(cm)) {
131 *frame_flags |= FRAMEFLAGS_INTRAONLY;
132 }
133 if (frame_is_sframe(cm)) {
134 *frame_flags |= FRAMEFLAGS_SWITCH;
135 }
136 if (cm->features.error_resilient_mode) {
137 *frame_flags |= FRAMEFLAGS_ERROR_RESILIENT;
138 }
David Turner1539bb02019-01-24 15:28:13 +0000139}
140
Vishesha195ca32020-04-07 18:46:20 +0530141static void set_ext_overrides(AV1_COMMON *const cm,
142 EncodeFrameParams *const frame_params,
143 ExternalFlags *const ext_flags) {
David Turner07dbd8e2019-01-08 17:16:25 +0000144 // Overrides the defaults with the externally supplied values with
145 // av1_update_reference() and av1_update_entropy() calls
146 // Note: The overrides are valid only for the next frame passed
147 // to av1_encode_lowlevel()
148
Vishesha195ca32020-04-07 18:46:20 +0530149 if (ext_flags->use_s_frame) {
David Turner475a3132019-01-18 15:17:17 +0000150 frame_params->frame_type = S_FRAME;
David Turner07dbd8e2019-01-08 17:16:25 +0000151 }
David Turner07dbd8e2019-01-08 17:16:25 +0000152
Vishesha195ca32020-04-07 18:46:20 +0530153 if (ext_flags->refresh_frame_context_pending) {
154 cm->features.refresh_frame_context = ext_flags->refresh_frame_context;
155 ext_flags->refresh_frame_context_pending = 0;
David Turner07dbd8e2019-01-08 17:16:25 +0000156 }
Vishesha195ca32020-04-07 18:46:20 +0530157 cm->features.allow_ref_frame_mvs = ext_flags->use_ref_frame_mvs;
David Turner07dbd8e2019-01-08 17:16:25 +0000158
Vishesha195ca32020-04-07 18:46:20 +0530159 frame_params->error_resilient_mode = ext_flags->use_error_resilient;
David Turner07dbd8e2019-01-08 17:16:25 +0000160 // A keyframe is already error resilient and keyframes with
161 // error_resilient_mode interferes with the use of show_existing_frame
162 // when forward reference keyframes are enabled.
David Turner475a3132019-01-18 15:17:17 +0000163 frame_params->error_resilient_mode &= frame_params->frame_type != KEY_FRAME;
David Turner07dbd8e2019-01-08 17:16:25 +0000164 // For bitstream conformance, s-frames must be error-resilient
David Turner475a3132019-01-18 15:17:17 +0000165 frame_params->error_resilient_mode |= frame_params->frame_type == S_FRAME;
David Turner07dbd8e2019-01-08 17:16:25 +0000166}
167
David Turnera7f133c2019-01-22 14:47:16 +0000168static int choose_primary_ref_frame(
Remya Prakasan6566bc82021-11-05 23:21:12 +0530169 AV1_COMP *const cpi, const EncodeFrameParams *const frame_params) {
David Turnera7f133c2019-01-22 14:47:16 +0000170 const AV1_COMMON *const cm = &cpi->common;
171
172 const int intra_only = frame_params->frame_type == KEY_FRAME ||
173 frame_params->frame_type == INTRA_ONLY_FRAME;
Marco Paniconiee968342020-06-08 11:21:48 -0700174 if (intra_only || frame_params->error_resilient_mode ||
Vishesha195ca32020-04-07 18:46:20 +0530175 cpi->ext_flags.use_primary_ref_none) {
David Turnera7f133c2019-01-22 14:47:16 +0000176 return PRIMARY_REF_NONE;
177 }
178
Jingning Han77890092022-08-09 12:07:14 -0700179#if !CONFIG_REALTIME_ONLY
180 if (cpi->use_ducky_encode) {
181 int wanted_fb = cpi->ppi->gf_group.primary_ref_idx[cpi->gf_frame_index];
182 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
183 if (get_ref_frame_map_idx(cm, ref_frame) == wanted_fb)
184 return ref_frame - LAST_FRAME;
185 }
186
187 return PRIMARY_REF_NONE;
188 }
189#endif // !CONFIG_REALTIME_ONLY
190
Yunqing Wangd0f0d3b2019-12-23 12:15:44 -0800191 // In large scale case, always use Last frame's frame contexts.
192 // Note(yunqing): In other cases, primary_ref_frame is chosen based on
Mufaddal Chakera8ee04fa2021-03-17 13:33:18 +0530193 // cpi->ppi->gf_group.layer_depth[cpi->gf_frame_index], which also controls
Yunqing Wangd0f0d3b2019-12-23 12:15:44 -0800194 // frame bit allocation.
Urvang Joshi54ffae72020-03-23 13:37:10 -0700195 if (cm->tiles.large_scale) return (LAST_FRAME - LAST_FRAME);
Yunqing Wangd0f0d3b2019-12-23 12:15:44 -0800196
Marco Paniconi1c25d522022-10-11 10:22:51 -0700197 if (cpi->ppi->use_svc || cpi->ppi->rtc_ref.set_ref_frame_config)
198 return av1_svc_primary_ref_frame(cpi);
Marco Paniconi42e1bdd2020-06-10 16:47:39 -0700199
David Turnera7f133c2019-01-22 14:47:16 +0000200 // Find the most recent reference frame with the same reference type as the
201 // current frame
Tarundeep Singh0734c662021-06-04 14:52:52 +0530202 const int current_ref_type = get_current_frame_ref_type(cpi);
Tarundeep Singhfd11f822021-05-27 13:09:06 +0530203 int wanted_fb = cpi->ppi->fb_of_context_type[current_ref_type];
Remya Prakasanffeb4972022-06-21 20:00:28 +0530204#if CONFIG_FPMT_TEST
Remya Prakasan6566bc82021-11-05 23:21:12 +0530205 if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) {
206 GF_GROUP *const gf_group = &cpi->ppi->gf_group;
207 if (gf_group->update_type[cpi->gf_frame_index] == INTNL_ARF_UPDATE) {
208 int frame_level = gf_group->frame_parallel_level[cpi->gf_frame_index];
209 // Book keep wanted_fb of frame_parallel_level 1 frame in an FP2 set.
210 if (frame_level == 1) {
211 cpi->wanted_fb = wanted_fb;
212 }
213 // Use the wanted_fb of level 1 frame in an FP2 for a level 2 frame in the
214 // set.
215 if (frame_level == 2 &&
216 gf_group->update_type[cpi->gf_frame_index - 1] == INTNL_ARF_UPDATE) {
217 assert(gf_group->frame_parallel_level[cpi->gf_frame_index - 1] == 1);
218 wanted_fb = cpi->wanted_fb;
219 }
220 }
221 }
Remya Prakasanffeb4972022-06-21 20:00:28 +0530222#endif // CONFIG_FPMT_TEST
David Turnera7f133c2019-01-22 14:47:16 +0000223 int primary_ref_frame = PRIMARY_REF_NONE;
224 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
225 if (get_ref_frame_map_idx(cm, ref_frame) == wanted_fb) {
226 primary_ref_frame = ref_frame - LAST_FRAME;
227 }
228 }
Jingning Han370d1162019-07-03 10:24:03 -0700229
David Turnera7f133c2019-01-22 14:47:16 +0000230 return primary_ref_frame;
231}
232
Deepa K Gfb89ce02020-04-06 13:34:42 +0530233static void adjust_frame_rate(AV1_COMP *cpi, int64_t ts_start, int64_t ts_end) {
234 TimeStamps *time_stamps = &cpi->time_stamps;
David Turnerdedd8ff2019-01-23 13:59:46 +0000235 int64_t this_duration;
236 int step = 0;
237
238 // Clear down mmx registers
David Turnerdedd8ff2019-01-23 13:59:46 +0000239
Tarundeep Singh15eb4de2021-04-21 15:53:10 +0530240 if (cpi->ppi->use_svc && cpi->svc.spatial_layer_id > 0) {
Marco Paniconi63971322019-08-15 21:32:05 -0700241 cpi->framerate = cpi->svc.base_framerate;
242 av1_rc_update_framerate(cpi, cpi->common.width, cpi->common.height);
243 return;
244 }
245
Yunqing Wang15ab03c2020-11-24 16:45:25 -0800246 if (ts_start == time_stamps->first_ts_start) {
Deepa K Gfb89ce02020-04-06 13:34:42 +0530247 this_duration = ts_end - ts_start;
David Turnerdedd8ff2019-01-23 13:59:46 +0000248 step = 1;
249 } else {
250 int64_t last_duration =
Yunqing Wang15ab03c2020-11-24 16:45:25 -0800251 time_stamps->prev_ts_end - time_stamps->prev_ts_start;
David Turnerdedd8ff2019-01-23 13:59:46 +0000252
Yunqing Wang15ab03c2020-11-24 16:45:25 -0800253 this_duration = ts_end - time_stamps->prev_ts_end;
David Turnerdedd8ff2019-01-23 13:59:46 +0000254
255 // do a step update if the duration changes by 10%
256 if (last_duration)
257 step = (int)((this_duration - last_duration) * 10 / last_duration);
258 }
259
260 if (this_duration) {
261 if (step) {
Aasaipriyaf0c28cb2021-09-15 18:17:48 +0530262 cpi->new_framerate = 10000000.0 / this_duration;
Remya Prakasandc320fa2022-05-17 19:21:58 +0530263 av1_new_framerate(cpi, cpi->new_framerate);
David Turnerdedd8ff2019-01-23 13:59:46 +0000264 } else {
265 // Average this frame's rate into the last second's average
266 // frame rate. If we haven't seen 1 second yet, then average
267 // over the whole interval seen.
Deepa K Gfb89ce02020-04-06 13:34:42 +0530268 const double interval =
Yunqing Wang15ab03c2020-11-24 16:45:25 -0800269 AOMMIN((double)(ts_end - time_stamps->first_ts_start), 10000000.0);
David Turnerdedd8ff2019-01-23 13:59:46 +0000270 double avg_duration = 10000000.0 / cpi->framerate;
271 avg_duration *= (interval - avg_duration + this_duration);
272 avg_duration /= interval;
Aasaipriyaf0c28cb2021-09-15 18:17:48 +0530273 cpi->new_framerate = (10000000.0 / avg_duration);
274 // For parallel frames update cpi->framerate with new_framerate
275 // during av1_post_encode_updates()
Remya Prakasandc320fa2022-05-17 19:21:58 +0530276 double framerate =
Aasaipriyaf0c28cb2021-09-15 18:17:48 +0530277 (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0)
278 ? cpi->framerate
279 : cpi->new_framerate;
Aasaipriyaf0c28cb2021-09-15 18:17:48 +0530280 av1_new_framerate(cpi, framerate);
David Turnerdedd8ff2019-01-23 13:59:46 +0000281 }
282 }
Aasaipriyaf0c28cb2021-09-15 18:17:48 +0530283
Yunqing Wang15ab03c2020-11-24 16:45:25 -0800284 time_stamps->prev_ts_start = ts_start;
285 time_stamps->prev_ts_end = ts_end;
David Turnerdedd8ff2019-01-23 13:59:46 +0000286}
287
David Turnerdedd8ff2019-01-23 13:59:46 +0000288// Determine whether there is a forced keyframe pending in the lookahead buffer
Aasaipriya9bc1dcb2020-03-13 17:46:07 +0530289int is_forced_keyframe_pending(struct lookahead_ctx *lookahead,
290 const int up_to_index,
291 const COMPRESSOR_STAGE compressor_stage) {
David Turnerdedd8ff2019-01-23 13:59:46 +0000292 for (int i = 0; i <= up_to_index; i++) {
Mufaddal Chakera73b7b702019-12-09 11:44:55 +0530293 const struct lookahead_entry *e =
Mufaddal Chakeraac828682019-12-13 16:31:42 +0530294 av1_lookahead_peek(lookahead, i, compressor_stage);
David Turnerdedd8ff2019-01-23 13:59:46 +0000295 if (e == NULL) {
296 // We have reached the end of the lookahead buffer and not early-returned
297 // so there isn't a forced key-frame pending.
Aasaipriya9bc1dcb2020-03-13 17:46:07 +0530298 return -1;
David Turnerdedd8ff2019-01-23 13:59:46 +0000299 } else if (e->flags == AOM_EFLAG_FORCE_KF) {
Aasaipriya592cb002020-12-14 19:24:24 +0530300 return i;
David Turnerdedd8ff2019-01-23 13:59:46 +0000301 } else {
302 continue;
303 }
304 }
Aasaipriya9bc1dcb2020-03-13 17:46:07 +0530305 return -1; // Never reached
David Turnerdedd8ff2019-01-23 13:59:46 +0000306}
307
Urvang Joshif70375a2019-03-22 23:30:19 -0700308// Check if we should encode an ARF or internal ARF. If not, try a LAST
David Turnerdedd8ff2019-01-23 13:59:46 +0000309// Do some setup associated with the chosen source
David Turner4f1f1812019-01-24 17:00:24 +0000310// temporal_filtered, flush, and frame_update_type are outputs.
David Turnerdedd8ff2019-01-23 13:59:46 +0000311// Return the frame source, or NULL if we couldn't find one
Yaowu Xufac3d862019-04-26 15:43:03 -0700312static struct lookahead_entry *choose_frame_source(
Angie Chiang470d1162020-12-31 13:10:55 -0800313 AV1_COMP *const cpi, int *const flush, int *pop_lookahead,
Wan-Teh Chang5529fda2022-09-02 11:31:17 -0700314 struct lookahead_entry **last_source, int *const show_frame) {
David Turnerdedd8ff2019-01-23 13:59:46 +0000315 AV1_COMMON *const cm = &cpi->common;
Mufaddal Chakera8ee04fa2021-03-17 13:33:18 +0530316 const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
David Turnerdedd8ff2019-01-23 13:59:46 +0000317 struct lookahead_entry *source = NULL;
David Turnerdedd8ff2019-01-23 13:59:46 +0000318
bohanli0db9c512020-06-12 17:43:06 -0700319 // Source index in lookahead buffer.
Mufaddal Chakeraab20d372021-03-17 12:18:34 +0530320 int src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
bohanli0db9c512020-06-12 17:43:06 -0700321
Aasaipriya9bc1dcb2020-03-13 17:46:07 +0530322 // TODO(Aasaipriya): Forced key frames need to be fixed when rc_mode != AOM_Q
bohanli0db9c512020-06-12 17:43:06 -0700323 if (src_index &&
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +0530324 (is_forced_keyframe_pending(cpi->ppi->lookahead, src_index,
Aasaipriya9bc1dcb2020-03-13 17:46:07 +0530325 cpi->compressor_stage) != -1) &&
Mufaddal Chakera186f8e32021-03-17 13:20:00 +0530326 cpi->oxcf.rc_cfg.mode != AOM_Q && !is_stat_generation_stage(cpi)) {
bohanli0db9c512020-06-12 17:43:06 -0700327 src_index = 0;
David Turnerdedd8ff2019-01-23 13:59:46 +0000328 *flush = 1;
329 }
330
bohanli0db9c512020-06-12 17:43:06 -0700331 // If the current frame is arf, then we should not pop from the lookahead
332 // buffer. If the current frame is not arf, then pop it. This assumes the
333 // first frame in the GF group is not arf. May need to change if it is not
334 // true.
Angie Chiang470d1162020-12-31 13:10:55 -0800335 *pop_lookahead = (src_index == 0);
bohanli1478d862020-06-17 20:53:44 -0700336 // If this is a key frame and keyframe filtering is enabled with overlay,
337 // then do not pop.
Angie Chiang470d1162020-12-31 13:10:55 -0800338 if (*pop_lookahead && cpi->oxcf.kf_cfg.enable_keyframe_filtering > 1 &&
Mufaddal Chakeraab20d372021-03-17 12:18:34 +0530339 gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE &&
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +0530340 !is_stat_generation_stage(cpi) && cpi->ppi->lookahead) {
341 if (cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz &&
bohanli1478d862020-06-17 20:53:44 -0700342 (*flush ||
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +0530343 cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz ==
344 cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].pop_sz)) {
Angie Chiang470d1162020-12-31 13:10:55 -0800345 *pop_lookahead = 0;
bohanli1478d862020-06-17 20:53:44 -0700346 }
347 }
Mufaddal Chakera186f8e32021-03-17 13:20:00 +0530348
349 // LAP stage does not have ARFs or forward key-frames,
350 // hence, always pop_lookahead here.
351 if (is_stat_generation_stage(cpi)) {
352 *pop_lookahead = 1;
Mufaddal Chakera97f92712021-05-21 15:10:33 +0530353 src_index = 0;
Mufaddal Chakera186f8e32021-03-17 13:20:00 +0530354 }
355
Wan-Teh Chang5529fda2022-09-02 11:31:17 -0700356 *show_frame = *pop_lookahead;
Mufaddal Chakera97f92712021-05-21 15:10:33 +0530357
Remya Prakasanffeb4972022-06-21 20:00:28 +0530358#if CONFIG_FPMT_TEST
Remya Prakasan6566bc82021-11-05 23:21:12 +0530359 if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_ENCODE) {
360#else
361 {
Remya Prakasanffeb4972022-06-21 20:00:28 +0530362#endif // CONFIG_FPMT_TEST
Remya Prakasan6566bc82021-11-05 23:21:12 +0530363 // Future frame in parallel encode set
364 if (gf_group->src_offset[cpi->gf_frame_index] != 0 &&
365 !is_stat_generation_stage(cpi))
366 src_index = gf_group->src_offset[cpi->gf_frame_index];
Mufaddal Chakera97f92712021-05-21 15:10:33 +0530367 }
Wan-Teh Chang5529fda2022-09-02 11:31:17 -0700368 if (*show_frame) {
bohanli0db9c512020-06-12 17:43:06 -0700369 // show frame, pop from buffer
David Turnerdedd8ff2019-01-23 13:59:46 +0000370 // Get last frame source.
371 if (cm->current_frame.frame_number > 0) {
Mufaddal Chakera97f92712021-05-21 15:10:33 +0530372 *last_source = av1_lookahead_peek(cpi->ppi->lookahead, src_index - 1,
373 cpi->compressor_stage);
David Turnerdedd8ff2019-01-23 13:59:46 +0000374 }
375 // Read in the source frame.
Mufaddal Chakera97f92712021-05-21 15:10:33 +0530376 source = av1_lookahead_peek(cpi->ppi->lookahead, src_index,
377 cpi->compressor_stage);
bohanli0db9c512020-06-12 17:43:06 -0700378 } else {
379 // no show frames are arf frames
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +0530380 source = av1_lookahead_peek(cpi->ppi->lookahead, src_index,
381 cpi->compressor_stage);
bohanli0db9c512020-06-12 17:43:06 -0700382 if (source != NULL) {
383 cm->showable_frame = 1;
384 }
David Turnerdedd8ff2019-01-23 13:59:46 +0000385 }
386 return source;
387}
388
David Turnerb0c0aa32019-01-28 16:17:13 +0000389// Don't allow a show_existing_frame to coincide with an error resilient or
390// S-Frame. An exception can be made in the case of a keyframe, since it does
391// not depend on any previous frames.
David Turner73245762019-02-11 16:42:34 +0000392static int allow_show_existing(const AV1_COMP *const cpi,
393 unsigned int frame_flags) {
David Turnerb0c0aa32019-01-28 16:17:13 +0000394 if (cpi->common.current_frame.frame_number == 0) return 0;
395
396 const struct lookahead_entry *lookahead_src =
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +0530397 av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage);
David Turnerb0c0aa32019-01-28 16:17:13 +0000398 if (lookahead_src == NULL) return 1;
399
400 const int is_error_resilient =
Visheshaa6a1702020-06-30 19:27:22 +0530401 cpi->oxcf.tool_cfg.error_resilient_mode ||
David Turnerb0c0aa32019-01-28 16:17:13 +0000402 (lookahead_src->flags & AOM_EFLAG_ERROR_RESILIENT);
Vishesh7e9873d2020-06-08 15:41:33 +0530403 const int is_s_frame = cpi->oxcf.kf_cfg.enable_sframe ||
404 (lookahead_src->flags & AOM_EFLAG_SET_S_FRAME);
David Turnerb0c0aa32019-01-28 16:17:13 +0000405 const int is_key_frame =
David Turner73245762019-02-11 16:42:34 +0000406 (cpi->rc.frames_to_key == 0) || (frame_flags & FRAMEFLAGS_KEY);
David Turnerb0c0aa32019-01-28 16:17:13 +0000407 return !(is_error_resilient || is_s_frame) || is_key_frame;
408}
409
David Turner73245762019-02-11 16:42:34 +0000410// Update frame_flags to tell the encoder's caller what sort of frame was
411// encoded.
Yunqing Wang2bfef9c2021-09-14 17:32:38 -0700412static void update_frame_flags(const AV1_COMMON *const cm,
413 const RefreshFrameInfo *const refresh_frame,
414 unsigned int *frame_flags) {
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530415 if (encode_show_existing_frame(cm)) {
James Zern71ddd812022-08-30 17:15:20 -0700416 *frame_flags &= ~(uint32_t)FRAMEFLAGS_GOLDEN;
417 *frame_flags &= ~(uint32_t)FRAMEFLAGS_BWDREF;
418 *frame_flags &= ~(uint32_t)FRAMEFLAGS_ALTREF;
419 *frame_flags &= ~(uint32_t)FRAMEFLAGS_KEY;
David Turner73245762019-02-11 16:42:34 +0000420 return;
421 }
422
Yunqing Wang2bfef9c2021-09-14 17:32:38 -0700423 if (refresh_frame->golden_frame) {
David Turner73245762019-02-11 16:42:34 +0000424 *frame_flags |= FRAMEFLAGS_GOLDEN;
425 } else {
James Zern71ddd812022-08-30 17:15:20 -0700426 *frame_flags &= ~(uint32_t)FRAMEFLAGS_GOLDEN;
David Turner73245762019-02-11 16:42:34 +0000427 }
428
Yunqing Wang2bfef9c2021-09-14 17:32:38 -0700429 if (refresh_frame->alt_ref_frame) {
David Turner73245762019-02-11 16:42:34 +0000430 *frame_flags |= FRAMEFLAGS_ALTREF;
431 } else {
James Zern71ddd812022-08-30 17:15:20 -0700432 *frame_flags &= ~(uint32_t)FRAMEFLAGS_ALTREF;
David Turner73245762019-02-11 16:42:34 +0000433 }
434
Yunqing Wang2bfef9c2021-09-14 17:32:38 -0700435 if (refresh_frame->bwd_ref_frame) {
David Turner73245762019-02-11 16:42:34 +0000436 *frame_flags |= FRAMEFLAGS_BWDREF;
437 } else {
James Zern71ddd812022-08-30 17:15:20 -0700438 *frame_flags &= ~(uint32_t)FRAMEFLAGS_BWDREF;
David Turner73245762019-02-11 16:42:34 +0000439 }
440
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530441 if (cm->current_frame.frame_type == KEY_FRAME) {
David Turner73245762019-02-11 16:42:34 +0000442 *frame_flags |= FRAMEFLAGS_KEY;
443 } else {
James Zern71ddd812022-08-30 17:15:20 -0700444 *frame_flags &= ~(uint32_t)FRAMEFLAGS_KEY;
David Turner73245762019-02-11 16:42:34 +0000445 }
446}
447
448#define DUMP_REF_FRAME_IMAGES 0
449
450#if DUMP_REF_FRAME_IMAGES == 1
451static int dump_one_image(AV1_COMMON *cm,
452 const YV12_BUFFER_CONFIG *const ref_buf,
453 char *file_name) {
454 int h;
455 FILE *f_ref = NULL;
456
457 if (ref_buf == NULL) {
458 printf("Frame data buffer is NULL.\n");
459 return AOM_CODEC_MEM_ERROR;
460 }
461
462 if ((f_ref = fopen(file_name, "wb")) == NULL) {
463 printf("Unable to open file %s to write.\n", file_name);
464 return AOM_CODEC_MEM_ERROR;
465 }
466
467 // --- Y ---
468 for (h = 0; h < cm->height; ++h) {
469 fwrite(&ref_buf->y_buffer[h * ref_buf->y_stride], 1, cm->width, f_ref);
470 }
471 // --- U ---
472 for (h = 0; h < (cm->height >> 1); ++h) {
473 fwrite(&ref_buf->u_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
474 f_ref);
475 }
476 // --- V ---
477 for (h = 0; h < (cm->height >> 1); ++h) {
478 fwrite(&ref_buf->v_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
479 f_ref);
480 }
481
482 fclose(f_ref);
483
484 return AOM_CODEC_OK;
485}
486
487static void dump_ref_frame_images(AV1_COMP *cpi) {
488 AV1_COMMON *const cm = &cpi->common;
489 MV_REFERENCE_FRAME ref_frame;
490
491 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
492 char file_name[256] = "";
493 snprintf(file_name, sizeof(file_name), "/tmp/enc_F%d_ref_%d.yuv",
494 cm->current_frame.frame_number, ref_frame);
495 dump_one_image(cm, get_ref_frame_yv12_buf(cpi, ref_frame), file_name);
496 }
497}
498#endif // DUMP_REF_FRAME_IMAGES == 1
499
Jingning Han81d6fbb2019-07-15 10:14:13 -0700500int av1_get_refresh_ref_frame_map(int refresh_frame_flags) {
Jingning Han8a6a1b82020-08-11 23:46:15 -0700501 int ref_map_index;
Jingning Hanf58175c2019-07-07 15:02:00 -0700502
503 for (ref_map_index = 0; ref_map_index < REF_FRAMES; ++ref_map_index)
504 if ((refresh_frame_flags >> ref_map_index) & 1) break;
505
Jingning Han8a6a1b82020-08-11 23:46:15 -0700506 if (ref_map_index == REF_FRAMES) ref_map_index = INVALID_IDX;
Jingning Hanf58175c2019-07-07 15:02:00 -0700507 return ref_map_index;
508}
509
Remya Prakasan55318e32022-03-03 23:34:35 +0530510static int get_free_ref_map_index(RefFrameMapPair ref_map_pairs[REF_FRAMES]) {
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530511 for (int idx = 0; idx < REF_FRAMES; ++idx)
512 if (ref_map_pairs[idx].disp_order == -1) return idx;
513 return INVALID_IDX;
Jingning Han0a2af4e2019-07-08 19:30:03 -0700514}
515
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530516static int get_refresh_idx(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
Remya Prakasandaf23942022-06-08 14:53:46 +0530517 int update_arf, GF_GROUP *gf_group, int gf_index,
518 int enable_refresh_skip, int cur_frame_disp) {
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530519 int arf_count = 0;
520 int oldest_arf_order = INT32_MAX;
521 int oldest_arf_idx = -1;
522
523 int oldest_frame_order = INT32_MAX;
524 int oldest_idx = -1;
525
526 for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
527 RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx];
528 if (ref_pair.disp_order == -1) continue;
529 const int frame_order = ref_pair.disp_order;
530 const int reference_frame_level = ref_pair.pyr_level;
Remya Prakasan1e8ab6a2022-02-20 20:17:32 +0530531 // Keep future frames and three closest previous frames in output order.
532 if (frame_order > cur_frame_disp - 3) continue;
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530533
Remya Prakasana05ccde2021-06-07 21:24:11 +0530534 if (enable_refresh_skip) {
535 int skip_frame = 0;
536 // Prevent refreshing a frame in gf_group->skip_frame_refresh.
537 for (int i = 0; i < REF_FRAMES; i++) {
538 int frame_to_skip = gf_group->skip_frame_refresh[gf_index][i];
539 if (frame_to_skip == INVALID_IDX) break;
540 if (frame_order == frame_to_skip) {
541 skip_frame = 1;
542 break;
543 }
544 }
545 if (skip_frame) continue;
546 }
Remya Prakasana05ccde2021-06-07 21:24:11 +0530547
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530548 // Keep track of the oldest level 1 frame if the current frame is also level
549 // 1.
550 if (reference_frame_level == 1) {
551 // If there are more than 2 level 1 frames in the reference list,
552 // discard the oldest.
553 if (frame_order < oldest_arf_order) {
554 oldest_arf_order = frame_order;
555 oldest_arf_idx = map_idx;
556 }
557 arf_count++;
558 continue;
559 }
560
561 // Update the overall oldest reference frame.
562 if (frame_order < oldest_frame_order) {
563 oldest_frame_order = frame_order;
564 oldest_idx = map_idx;
565 }
566 }
567 if (update_arf && arf_count > 2) return oldest_arf_idx;
568 if (oldest_idx >= 0) return oldest_idx;
569 if (oldest_arf_idx >= 0) return oldest_arf_idx;
Remya Prakasana05ccde2021-06-07 21:24:11 +0530570 if (oldest_idx == -1) {
571 assert(arf_count > 2 && enable_refresh_skip);
572 return oldest_arf_idx;
573 }
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530574 assert(0 && "No valid refresh index found");
575 return -1;
576}
Remya Prakasand80b8ce2021-07-04 16:58:09 +0530577
Remya Prakasand80b8ce2021-07-04 16:58:09 +0530578// Computes the reference refresh index for INTNL_ARF_UPDATE frame.
579int av1_calc_refresh_idx_for_intnl_arf(
580 AV1_COMP *cpi, RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
581 int gf_index) {
582 GF_GROUP *const gf_group = &cpi->ppi->gf_group;
583
584 // Search for the open slot to store the current frame.
Remya Prakasan55318e32022-03-03 23:34:35 +0530585 int free_fb_index = get_free_ref_map_index(ref_frame_map_pairs);
Remya Prakasand80b8ce2021-07-04 16:58:09 +0530586
587 // Use a free slot if available.
588 if (free_fb_index != INVALID_IDX) {
589 return free_fb_index;
590 } else {
591 int enable_refresh_skip = !is_one_pass_rt_params(cpi);
592 int refresh_idx =
593 get_refresh_idx(ref_frame_map_pairs, 0, gf_group, gf_index,
594 enable_refresh_skip, gf_group->display_idx[gf_index]);
595 return refresh_idx;
596 }
597}
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530598
Remya Prakasan55318e32022-03-03 23:34:35 +0530599int av1_get_refresh_frame_flags(
600 const AV1_COMP *const cpi, const EncodeFrameParams *const frame_params,
601 FRAME_UPDATE_TYPE frame_update_type, int gf_index, int cur_disp_order,
602 RefFrameMapPair ref_frame_map_pairs[REF_FRAMES]) {
David Turner6e8b4d92019-02-18 15:01:33 +0000603 const AV1_COMMON *const cm = &cpi->common;
Vishesh38c05d72020-04-14 12:19:14 +0530604 const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags =
605 &cpi->ext_flags.refresh_frame;
606
Jingning Han11eaaa92021-06-18 01:13:48 -0700607 GF_GROUP *gf_group = &cpi->ppi->gf_group;
608 if (gf_group->refbuf_state[gf_index] == REFBUF_RESET)
609 return SELECT_ALL_BUF_SLOTS;
610
611 // TODO(jingning): Deprecate the following operations.
David Turner6e8b4d92019-02-18 15:01:33 +0000612 // Switch frames and shown key-frames overwrite all reference slots
Jingning Hanaa542f42021-06-19 14:45:37 -0700613 if (frame_params->frame_type == S_FRAME) return SELECT_ALL_BUF_SLOTS;
David Turner6e8b4d92019-02-18 15:01:33 +0000614
615 // show_existing_frames don't actually send refresh_frame_flags so set the
616 // flags to 0 to keep things consistent.
Jingning Han4338bcd2021-06-19 14:48:40 -0700617 if (frame_params->show_existing_frame) return 0;
David Turner6e8b4d92019-02-18 15:01:33 +0000618
Marco Paniconi53218432022-10-10 13:00:27 -0700619 const RTC_REF *const rtc_ref = &cpi->ppi->rtc_ref;
Marco Paniconi7aaca882022-08-28 21:58:59 -0700620 if (is_frame_droppable(rtc_ref, ext_refresh_frame_flags)) return 0;
Jingning Hanbcbbd8c2019-07-21 16:37:12 -0700621
Jingning Han9b65d232022-08-09 10:27:13 -0700622#if !CONFIG_REALTIME_ONLY
623 if (cpi->use_ducky_encode &&
624 cpi->ducky_encode_info.frame_info.gop_mode == DUCKY_ENCODE_GOP_MODE_RCL) {
625 int new_fb_map_idx = cpi->ppi->gf_group.update_ref_idx[gf_index];
626 if (new_fb_map_idx == INVALID_IDX) return 0;
627 return 1 << new_fb_map_idx;
628 }
629#endif // !CONFIG_REALTIME_ONLY
David Turner6e8b4d92019-02-18 15:01:33 +0000630
Jingning Han9b65d232022-08-09 10:27:13 -0700631 int refresh_mask = 0;
Vishesh38c05d72020-04-14 12:19:14 +0530632 if (ext_refresh_frame_flags->update_pending) {
Marco Paniconi7aaca882022-08-28 21:58:59 -0700633 if (rtc_ref->set_ref_frame_config ||
Marco Paniconi1a3a74a2022-08-28 22:26:48 -0700634 use_rtc_reference_structure_one_layer(cpi)) {
Marco Paniconid8574e32019-08-04 21:30:12 -0700635 for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++) {
Marco Paniconi7aaca882022-08-28 21:58:59 -0700636 int ref_frame_map_idx = rtc_ref->ref_idx[i];
637 refresh_mask |= rtc_ref->refresh[ref_frame_map_idx]
638 << ref_frame_map_idx;
Marco Paniconid8574e32019-08-04 21:30:12 -0700639 }
640 return refresh_mask;
641 }
David Turner6e8b4d92019-02-18 15:01:33 +0000642 // Unfortunately the encoder interface reflects the old refresh_*_frame
643 // flags so we have to replicate the old refresh_frame_flags logic here in
644 // order to preserve the behaviour of the flag overrides.
Marco Paniconi314bc362019-08-13 10:53:02 -0700645 int ref_frame_map_idx = get_ref_frame_map_idx(cm, LAST_FRAME);
Jingning Han0a2af4e2019-07-08 19:30:03 -0700646 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530647 refresh_mask |= ext_refresh_frame_flags->last_frame << ref_frame_map_idx;
Jingning Han0a2af4e2019-07-08 19:30:03 -0700648
649 ref_frame_map_idx = get_ref_frame_map_idx(cm, EXTREF_FRAME);
650 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530651 refresh_mask |= ext_refresh_frame_flags->bwd_ref_frame
652 << ref_frame_map_idx;
Jingning Han0a2af4e2019-07-08 19:30:03 -0700653
654 ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF2_FRAME);
655 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530656 refresh_mask |= ext_refresh_frame_flags->alt2_ref_frame
657 << ref_frame_map_idx;
Jingning Han0a2af4e2019-07-08 19:30:03 -0700658
David Turner6e8b4d92019-02-18 15:01:33 +0000659 if (frame_update_type == OVERLAY_UPDATE) {
Jingning Han5738e032019-07-22 15:22:52 -0700660 ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
661 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530662 refresh_mask |= ext_refresh_frame_flags->golden_frame
663 << ref_frame_map_idx;
David Turner6e8b4d92019-02-18 15:01:33 +0000664 } else {
Jingning Han0a2af4e2019-07-08 19:30:03 -0700665 ref_frame_map_idx = get_ref_frame_map_idx(cm, GOLDEN_FRAME);
666 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530667 refresh_mask |= ext_refresh_frame_flags->golden_frame
668 << ref_frame_map_idx;
Jingning Han0a2af4e2019-07-08 19:30:03 -0700669
670 ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
671 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530672 refresh_mask |= ext_refresh_frame_flags->alt_ref_frame
673 << ref_frame_map_idx;
David Turner6e8b4d92019-02-18 15:01:33 +0000674 }
675 return refresh_mask;
676 }
677
Jingning Hanc9c172d2019-07-23 14:10:32 -0700678 // Search for the open slot to store the current frame.
Remya Prakasan55318e32022-03-03 23:34:35 +0530679 int free_fb_index = get_free_ref_map_index(ref_frame_map_pairs);
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530680
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530681 // No refresh necessary for these frame types.
682 if (frame_update_type == OVERLAY_UPDATE ||
683 frame_update_type == INTNL_OVERLAY_UPDATE)
684 return refresh_mask;
685
686 // If there is an open slot, refresh that one instead of replacing a
687 // reference.
688 if (free_fb_index != INVALID_IDX) {
689 refresh_mask = 1 << free_fb_index;
690 return refresh_mask;
691 }
Remya Prakasana05ccde2021-06-07 21:24:11 +0530692 const int enable_refresh_skip = !is_one_pass_rt_params(cpi);
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530693 const int update_arf = frame_update_type == ARF_UPDATE;
694 const int refresh_idx =
Remya Prakasandaf23942022-06-08 14:53:46 +0530695 get_refresh_idx(ref_frame_map_pairs, update_arf, &cpi->ppi->gf_group,
696 gf_index, enable_refresh_skip, cur_disp_order);
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530697 return 1 << refresh_idx;
David Turner6e8b4d92019-02-18 15:01:33 +0000698}
699
Cheng Chen7abe3132019-06-19 11:55:28 -0700700#if !CONFIG_REALTIME_ONLY
Cheng Chen5083a9f2019-07-12 15:33:34 -0700701void setup_mi(AV1_COMP *const cpi, YV12_BUFFER_CONFIG *src) {
702 AV1_COMMON *const cm = &cpi->common;
703 const int num_planes = av1_num_planes(cm);
704 MACROBLOCK *const x = &cpi->td.mb;
705 MACROBLOCKD *const xd = &x->e_mbd;
706
Tarundeep Singh4243e622021-04-20 16:10:22 +0530707 av1_setup_src_planes(x, src, 0, 0, num_planes, cm->seq_params->sb_size);
Cheng Chen5083a9f2019-07-12 15:33:34 -0700708
Tarundeep Singh4243e622021-04-20 16:10:22 +0530709 av1_setup_block_planes(xd, cm->seq_params->subsampling_x,
710 cm->seq_params->subsampling_y, num_planes);
Cheng Chen5083a9f2019-07-12 15:33:34 -0700711
Urvang Joshi2603bfe2020-03-25 13:33:18 -0700712 set_mi_offsets(&cm->mi_params, xd, 0, 0);
Cheng Chen5083a9f2019-07-12 15:33:34 -0700713}
714
bohanli0db9c512020-06-12 17:43:06 -0700715// Apply temporal filtering to source frames and encode the filtered frame.
716// If the current frame does not require filtering, this function is identical
717// to av1_encode() except that tpl is not performed.
Cheng Chen7abe3132019-06-19 11:55:28 -0700718static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
719 EncodeFrameInput *const frame_input,
Wan-Teh Chang4b1699e2023-03-10 08:20:51 -0800720 const EncodeFrameParams *const frame_params,
Hui Su028ad7d2020-04-13 23:24:32 -0700721 EncodeFrameResults *const frame_results) {
Yunqing Wang7a3ad542020-11-03 23:40:24 -0800722#if CONFIG_COLLECT_COMPONENT_TIMING
723 if (cpi->oxcf.pass == 2) start_timing(cpi, denoise_and_encode_time);
724#endif
Cheng Chen7abe3132019-06-19 11:55:28 -0700725 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
726 AV1_COMMON *const cm = &cpi->common;
Yunqing Wangd849db42023-06-15 14:21:00 -0700727
Angie Chiangbb201602021-07-13 18:50:15 -0700728 GF_GROUP *const gf_group = &cpi->ppi->gf_group;
Mufaddal Chakeraab20d372021-03-17 12:18:34 +0530729 FRAME_UPDATE_TYPE update_type =
Mufaddal Chakera8ee04fa2021-03-17 13:33:18 +0530730 get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
Angie Chiang5e711222021-11-05 17:28:03 -0700731 const int is_second_arf =
732 av1_gop_is_second_arf(gf_group, cpi->gf_frame_index);
Jingning Han97b64a72019-10-08 11:40:51 -0700733
Hui Su028ad7d2020-04-13 23:24:32 -0700734 // Decide whether to apply temporal filtering to the source frame.
Angie Chiang8f122052021-11-10 18:47:16 -0800735 int apply_filtering =
736 av1_is_temporal_filter_on(oxcf) && !is_stat_generation_stage(cpi);
Angie Chiangdd950e62021-11-19 17:48:25 -0800737 if (update_type != KF_UPDATE && update_type != ARF_UPDATE && !is_second_arf) {
738 apply_filtering = 0;
739 }
Angie Chiang8f122052021-11-10 18:47:16 -0800740 if (apply_filtering) {
741 if (frame_params->frame_type == KEY_FRAME) {
742 // TODO(angiebird): Move the noise level check to av1_tf_info_filtering.
743 // Decide whether it is allowed to perform key frame filtering
744 int allow_kf_filtering = oxcf->kf_cfg.enable_keyframe_filtering &&
745 !frame_params->show_existing_frame &&
746 !is_lossless_requested(&oxcf->rc_cfg);
747 if (allow_kf_filtering) {
Diksha Singh7d7be2a2023-02-15 22:20:08 +0530748 double y_noise_level = 0.0;
749 av1_estimate_noise_level(
750 frame_input->source, &y_noise_level, AOM_PLANE_Y, AOM_PLANE_Y,
751 cm->seq_params->bit_depth, NOISE_ESTIMATION_EDGE_THRESHOLD);
Angie Chiang8f122052021-11-10 18:47:16 -0800752 apply_filtering = y_noise_level > 0;
753 } else {
754 apply_filtering = 0;
755 }
756 // If we are doing kf filtering, set up a few things.
757 if (apply_filtering) {
758 av1_setup_past_independence(cm);
759 }
760 } else if (is_second_arf) {
761 apply_filtering = cpi->sf.hl_sf.second_alt_ref_filtering;
Sarah Parker21f1e6e2019-11-15 16:48:29 -0800762 }
bohanli0db9c512020-06-12 17:43:06 -0700763 }
Jingning Hanf13d7072022-06-14 16:04:41 -0700764
Yunqing Wang7a3ad542020-11-03 23:40:24 -0800765#if CONFIG_COLLECT_COMPONENT_TIMING
766 if (cpi->oxcf.pass == 2) start_timing(cpi, apply_filtering_time);
767#endif
bohanli0db9c512020-06-12 17:43:06 -0700768 // Save the pointer to the original source image.
769 YV12_BUFFER_CONFIG *source_buffer = frame_input->source;
770 // apply filtering to frame
771 if (apply_filtering) {
772 int show_existing_alt_ref = 0;
Angie Chiangbb268f32021-11-06 22:09:10 -0700773 FRAME_DIFF frame_diff;
774 int top_index = 0;
775 int bottom_index = 0;
776 const int q_index = av1_rc_pick_q_and_bounds(
777 cpi, cpi->oxcf.frm_dim_cfg.width, cpi->oxcf.frm_dim_cfg.height,
778 cpi->gf_frame_index, &bottom_index, &top_index);
779
bohanli0db9c512020-06-12 17:43:06 -0700780 // TODO(bohanli): figure out why we need frame_type in cm here.
781 cm->current_frame.frame_type = frame_params->frame_type;
Angie Chiangdeba7482021-09-13 22:43:35 -0700782 if (update_type == KF_UPDATE || update_type == ARF_UPDATE) {
Angie Chiangdeba7482021-09-13 22:43:35 -0700783 YV12_BUFFER_CONFIG *tf_buf = av1_tf_info_get_filtered_buf(
Angie Chiangbb268f32021-11-06 22:09:10 -0700784 &cpi->ppi->tf_info, cpi->gf_frame_index, &frame_diff);
Angie Chiangdeba7482021-09-13 22:43:35 -0700785 if (tf_buf != NULL) {
786 frame_input->source = tf_buf;
Angie Chiangbb268f32021-11-06 22:09:10 -0700787 show_existing_alt_ref = av1_check_show_filtered_frame(
Angie Chiang8f122052021-11-10 18:47:16 -0800788 tf_buf, &frame_diff, q_index, cm->seq_params->bit_depth);
789 if (show_existing_alt_ref) {
790 cpi->common.showable_frame |= 1;
Bohan Lib2b71ae2023-01-17 12:48:57 -0800791 } else {
792 cpi->common.showable_frame = 0;
Angie Chiang8f122052021-11-10 18:47:16 -0800793 }
Angie Chiangdeba7482021-09-13 22:43:35 -0700794 }
Angie Chiang8f122052021-11-10 18:47:16 -0800795 if (gf_group->frame_type[cpi->gf_frame_index] != KEY_FRAME) {
796 cpi->ppi->show_existing_alt_ref = show_existing_alt_ref;
797 }
798 }
799
800 if (is_second_arf) {
Anupam Pandeybb2a98e2022-08-24 11:36:31 +0530801 // Allocate the memory for tf_buf_second_arf buffer, only when it is
802 // required.
803 int ret = aom_realloc_frame_buffer(
804 &cpi->ppi->tf_info.tf_buf_second_arf, oxcf->frm_dim_cfg.width,
805 oxcf->frm_dim_cfg.height, cm->seq_params->subsampling_x,
806 cm->seq_params->subsampling_y, cm->seq_params->use_highbitdepth,
807 cpi->oxcf.border_in_pixels, cm->features.byte_alignment, NULL, NULL,
Rachel Barker674eaa02022-12-22 16:03:44 +0000808 NULL, cpi->image_pyramid_levels, 0);
Anupam Pandeybb2a98e2022-08-24 11:36:31 +0530809 if (ret)
810 aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
811 "Failed to allocate tf_buf_second_arf");
812
Angie Chiang29aaace2021-11-15 16:23:42 -0800813 YV12_BUFFER_CONFIG *tf_buf_second_arf =
814 &cpi->ppi->tf_info.tf_buf_second_arf;
Angie Chiang8f122052021-11-10 18:47:16 -0800815 // We didn't apply temporal filtering for second arf ahead in
816 // av1_tf_info_filtering().
Angie Chiangdeba7482021-09-13 22:43:35 -0700817 const int arf_src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
Angie Chiang29aaace2021-11-15 16:23:42 -0800818 // Right now, we are still using tf_buf_second_arf due to
Angie Chiangdeba7482021-09-13 22:43:35 -0700819 // implementation complexity.
Angie Chiang29aaace2021-11-15 16:23:42 -0800820 // TODO(angiebird): Reuse tf_info->tf_buf here.
Angie Chiangbb268f32021-11-06 22:09:10 -0700821 av1_temporal_filter(cpi, arf_src_index, cpi->gf_frame_index, &frame_diff,
Angie Chiang29aaace2021-11-15 16:23:42 -0800822 tf_buf_second_arf);
823 show_existing_alt_ref = av1_check_show_filtered_frame(
824 tf_buf_second_arf, &frame_diff, q_index, cm->seq_params->bit_depth);
Angie Chiang5e711222021-11-05 17:28:03 -0700825 if (show_existing_alt_ref) {
Angie Chiang29aaace2021-11-15 16:23:42 -0800826 aom_extend_frame_borders(tf_buf_second_arf, av1_num_planes(cm));
827 frame_input->source = tf_buf_second_arf;
Angie Chiangdeba7482021-09-13 22:43:35 -0700828 }
Angie Chiang8f122052021-11-10 18:47:16 -0800829 // Currently INTNL_ARF_UPDATE only do show_existing.
Angie Chiang5e711222021-11-05 17:28:03 -0700830 cpi->common.showable_frame |= 1;
bohanli0db9c512020-06-12 17:43:06 -0700831 }
Bohan Li5f166b82022-08-29 10:00:32 -0700832
833 // Copy source metadata to the temporal filtered frame
Wan-Teh Chang8871a292022-08-30 15:32:51 -0700834 if (source_buffer->metadata &&
835 aom_copy_metadata_to_frame_buffer(frame_input->source,
836 source_buffer->metadata)) {
837 aom_internal_error(
838 cm->error, AOM_CODEC_MEM_ERROR,
839 "Failed to copy source metadata to the temporal filtered frame");
Bohan Li5f166b82022-08-29 10:00:32 -0700840 }
Hui Sub1485372020-04-16 23:31:39 -0700841 }
Yunqing Wang7a3ad542020-11-03 23:40:24 -0800842#if CONFIG_COLLECT_COMPONENT_TIMING
843 if (cpi->oxcf.pass == 2) end_timing(cpi, apply_filtering_time);
844#endif
Jingning Han97b64a72019-10-08 11:40:51 -0700845
Nithya V Sde3a75d2021-11-15 11:27:51 +0530846 int set_mv_params = frame_params->frame_type == KEY_FRAME ||
847 update_type == ARF_UPDATE || update_type == GF_UPDATE;
848 cm->show_frame = frame_params->show_frame;
849 cm->current_frame.frame_type = frame_params->frame_type;
Angie Chiang2b17f852021-11-03 12:07:25 -0700850 // TODO(bohanli): Why is this? what part of it is necessary?
Wan-Teh Chang25e21c72022-09-15 13:17:41 -0700851 av1_set_frame_size(cpi, cm->width, cm->height);
Nithya V Sde3a75d2021-11-15 11:27:51 +0530852 if (set_mv_params) av1_set_mv_search_params(cpi);
Jingning Handcb4fdc2020-09-22 09:48:56 -0700853
Angie Chiang2d4bb0e2021-05-28 18:31:46 -0700854#if CONFIG_RD_COMMAND
855 if (frame_params->frame_type == KEY_FRAME) {
856 char filepath[] = "rd_command.txt";
857 av1_read_rd_command(filepath, &cpi->rd_command);
858 }
859#endif // CONFIG_RD_COMMAND
Angie Chiang2b17f852021-11-03 12:07:25 -0700860 if (cpi->gf_frame_index == 0 && !is_stat_generation_stage(cpi)) {
861 // perform tpl after filtering
862 int allow_tpl =
863 oxcf->gf_cfg.lag_in_frames > 1 && oxcf->algo_cfg.enable_tpl_model;
864 if (gf_group->size > MAX_LENGTH_TPL_FRAME_STATS) {
865 allow_tpl = 0;
Jingning Han5c01a462021-05-11 13:31:21 -0700866 }
Ranjit Kumar Tulabandu5e47dc22022-12-06 23:04:13 +0530867 if (frame_params->frame_type != KEY_FRAME) {
Angie Chiang2b17f852021-11-03 12:07:25 -0700868 // In rare case, it's possible to have non ARF/GF update_type here.
869 // We should set allow_tpl to zero in the situation
870 allow_tpl =
Jingning Han57ff8ca2022-06-08 09:03:57 -0700871 allow_tpl && (update_type == ARF_UPDATE || update_type == GF_UPDATE ||
872 (cpi->use_ducky_encode &&
Jingning Hanb2c39602022-06-24 00:47:29 -0700873 cpi->ducky_encode_info.frame_info.gop_mode ==
874 DUCKY_ENCODE_GOP_MODE_RCL));
Angie Chiang2b17f852021-11-03 12:07:25 -0700875 }
876
877 if (allow_tpl) {
878 if (!cpi->skip_tpl_setup_stats) {
879 av1_tpl_preload_rc_estimate(cpi, frame_params);
Angie Chiang30546ab2021-10-29 18:30:45 -0700880 av1_tpl_setup_stats(cpi, 0, frame_params);
Angie Chiangb17a57d2021-12-28 17:34:39 -0800881#if CONFIG_BITRATE_ACCURACY && !CONFIG_THREE_PASS
Angie Chiang86c82e12021-11-22 18:36:36 -0800882 assert(cpi->gf_frame_index == 0);
Angie Chiang2b17f852021-11-03 12:07:25 -0700883 av1_vbr_rc_update_q_index_list(&cpi->vbr_rc_info, &cpi->ppi->tpl_data,
Angie Chiang86c82e12021-11-22 18:36:36 -0800884 gf_group, cm->seq_params->bit_depth);
Angie Chiang2b17f852021-11-03 12:07:25 -0700885#endif
886 }
887 } else {
Angie Chiang9d359132021-10-01 16:43:11 -0700888 av1_init_tpl_stats(&cpi->ppi->tpl_data);
Angie Chiang2b17f852021-11-03 12:07:25 -0700889 }
Angie Chiangb17a57d2021-12-28 17:34:39 -0800890#if CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS
891 if (cpi->oxcf.pass == AOM_RC_SECOND_PASS &&
892 cpi->second_pass_log_stream != NULL) {
893 TPL_INFO *tpl_info;
894 AOM_CHECK_MEM_ERROR(cm->error, tpl_info, aom_malloc(sizeof(*tpl_info)));
895 av1_pack_tpl_info(tpl_info, gf_group, &cpi->ppi->tpl_data);
896 av1_write_tpl_info(tpl_info, cpi->second_pass_log_stream,
897 cpi->common.error);
898 aom_free(tpl_info);
899 }
900#endif // CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS
Deepa K G5a6eb3b2020-10-16 20:09:14 +0530901 }
Jingning Han97b64a72019-10-08 11:40:51 -0700902
903 if (av1_encode(cpi, dest, frame_input, frame_params, frame_results) !=
904 AOM_CODEC_OK) {
905 return AOM_CODEC_ERROR;
906 }
907
908 // Set frame_input source to true source for psnr calculation.
Mufaddal Chakeraea4c6d52020-06-12 15:09:14 +0530909 if (apply_filtering && is_psnr_calc_enabled(cpi)) {
Jayasanker Jdba4f0f2021-07-27 22:45:35 +0530910 cpi->source = av1_realloc_and_scale_if_required(
911 cm, source_buffer, &cpi->scaled_source, cm->features.interp_filter, 0,
Rachel Barker674eaa02022-12-22 16:03:44 +0000912 false, true, cpi->oxcf.border_in_pixels, cpi->image_pyramid_levels);
bohanli0db9c512020-06-12 17:43:06 -0700913 cpi->unscaled_source = source_buffer;
Jingning Han97b64a72019-10-08 11:40:51 -0700914 }
Yunqing Wang7a3ad542020-11-03 23:40:24 -0800915#if CONFIG_COLLECT_COMPONENT_TIMING
916 if (cpi->oxcf.pass == 2) end_timing(cpi, denoise_and_encode_time);
917#endif
Cheng Chen7abe3132019-06-19 11:55:28 -0700918 return AOM_CODEC_OK;
919}
920#endif // !CONFIG_REALTIME_ONLY
921
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530922/*!\cond */
923// Struct to keep track of relevant reference frame data.
924typedef struct {
925 int map_idx;
926 int disp_order;
927 int pyr_level;
928 int used;
929} RefBufMapData;
930/*!\endcond */
931
932// Comparison function to sort reference frames in ascending display order.
933static int compare_map_idx_pair_asc(const void *a, const void *b) {
934 if (((RefBufMapData *)a)->disp_order == ((RefBufMapData *)b)->disp_order) {
935 return 0;
936 } else if (((const RefBufMapData *)a)->disp_order >
937 ((const RefBufMapData *)b)->disp_order) {
938 return 1;
939 } else {
940 return -1;
941 }
942}
943
944// Checks to see if a particular reference frame is already in the reference
945// frame map.
946static int is_in_ref_map(RefBufMapData *map, int disp_order, int n_frames) {
947 for (int i = 0; i < n_frames; i++) {
948 if (disp_order == map[i].disp_order) return 1;
949 }
950 return 0;
951}
952
953// Add a reference buffer index to a named reference slot.
954static void add_ref_to_slot(RefBufMapData *ref, int *const remapped_ref_idx,
955 int frame) {
956 remapped_ref_idx[frame - LAST_FRAME] = ref->map_idx;
957 ref->used = 1;
958}
959
960// Threshold dictating when we are allowed to start considering
961// leaving lowest level frames unmapped.
962#define LOW_LEVEL_FRAMES_TR 5
963
964// Find which reference buffer should be left out of the named mapping.
965// This is because there are 8 reference buffers and only 7 named slots.
966static void set_unmapped_ref(RefBufMapData *buffer_map, int n_bufs,
967 int n_min_level_refs, int min_level,
968 int cur_frame_disp) {
969 int max_dist = 0;
970 int unmapped_idx = -1;
971 if (n_bufs <= ALTREF_FRAME) return;
972 for (int i = 0; i < n_bufs; i++) {
973 if (buffer_map[i].used) continue;
974 if (buffer_map[i].pyr_level != min_level ||
975 n_min_level_refs >= LOW_LEVEL_FRAMES_TR) {
976 int dist = abs(cur_frame_disp - buffer_map[i].disp_order);
977 if (dist > max_dist) {
978 max_dist = dist;
979 unmapped_idx = i;
980 }
981 }
982 }
983 assert(unmapped_idx >= 0 && "Unmapped reference not found");
984 buffer_map[unmapped_idx].used = 1;
985}
986
Remya Prakasan55318e32022-03-03 23:34:35 +0530987void av1_get_ref_frames(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
Remya Prakasandaf23942022-06-08 14:53:46 +0530988 int cur_frame_disp, const AV1_COMP *cpi, int gf_index,
Remya Prakasan55318e32022-03-03 23:34:35 +0530989 int is_parallel_encode,
Remya Prakasan55318e32022-03-03 23:34:35 +0530990 int remapped_ref_idx[REF_FRAMES]) {
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530991 int buf_map_idx = 0;
992
993 // Initialize reference frame mappings.
994 for (int i = 0; i < REF_FRAMES; ++i) remapped_ref_idx[i] = INVALID_IDX;
995
Jingning Han9b65d232022-08-09 10:27:13 -0700996#if !CONFIG_REALTIME_ONLY
997 if (cpi->use_ducky_encode &&
998 cpi->ducky_encode_info.frame_info.gop_mode == DUCKY_ENCODE_GOP_MODE_RCL) {
999 for (int rf = LAST_FRAME; rf < REF_FRAMES; ++rf) {
1000 if (cpi->ppi->gf_group.ref_frame_list[gf_index][rf] != INVALID_IDX) {
1001 remapped_ref_idx[rf - LAST_FRAME] =
1002 cpi->ppi->gf_group.ref_frame_list[gf_index][rf];
Angie Chianged34bad2023-02-27 23:23:53 -08001003 }
1004 }
1005
1006 int valid_rf_idx = 0;
Wan-Teh Changaa1c26c2023-03-09 16:50:43 -08001007 static const int ref_frame_type_order[REF_FRAMES - LAST_FRAME] = {
Angie Chianged34bad2023-02-27 23:23:53 -08001008 GOLDEN_FRAME, ALTREF_FRAME, LAST_FRAME, BWDREF_FRAME,
1009 ALTREF2_FRAME, LAST2_FRAME, LAST3_FRAME
1010 };
1011 for (int i = 0; i < REF_FRAMES - LAST_FRAME; i++) {
1012 int rf = ref_frame_type_order[i];
1013 if (remapped_ref_idx[rf - LAST_FRAME] != INVALID_IDX) {
Jingning Han5eee3d42022-09-02 16:05:55 -07001014 valid_rf_idx = remapped_ref_idx[rf - LAST_FRAME];
Angie Chianged34bad2023-02-27 23:23:53 -08001015 break;
Jingning Han9b65d232022-08-09 10:27:13 -07001016 }
1017 }
Jingning Hanc7185bd2022-08-31 13:20:23 -07001018
Jingning Han5eee3d42022-09-02 16:05:55 -07001019 for (int i = 0; i < REF_FRAMES; ++i) {
Angie Chianged34bad2023-02-27 23:23:53 -08001020 if (remapped_ref_idx[i] == INVALID_IDX) {
Jingning Han5eee3d42022-09-02 16:05:55 -07001021 remapped_ref_idx[i] = valid_rf_idx;
Angie Chianged34bad2023-02-27 23:23:53 -08001022 }
Jingning Han5eee3d42022-09-02 16:05:55 -07001023 }
1024
Jingning Han8c9ffe52022-08-11 13:00:32 -07001025 return;
Jingning Han9b65d232022-08-09 10:27:13 -07001026 }
1027#endif // !CONFIG_REALTIME_ONLY
1028
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301029 RefBufMapData buffer_map[REF_FRAMES];
1030 int n_bufs = 0;
1031 memset(buffer_map, 0, REF_FRAMES * sizeof(buffer_map[0]));
1032 int min_level = MAX_ARF_LAYERS;
1033 int max_level = 0;
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301034 GF_GROUP *gf_group = &cpi->ppi->gf_group;
1035 int skip_ref_unmapping = 0;
1036 int is_one_pass_rt = is_one_pass_rt_params(cpi);
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301037
1038 // Go through current reference buffers and store display order, pyr level,
1039 // and map index.
1040 for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
1041 // Get reference frame buffer.
1042 RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx];
1043 if (ref_pair.disp_order == -1) continue;
1044 const int frame_order = ref_pair.disp_order;
1045 // Avoid duplicates.
1046 if (is_in_ref_map(buffer_map, frame_order, n_bufs)) continue;
1047 const int reference_frame_level = ref_pair.pyr_level;
1048
1049 // Keep track of the lowest and highest levels that currently exist.
1050 if (reference_frame_level < min_level) min_level = reference_frame_level;
1051 if (reference_frame_level > max_level) max_level = reference_frame_level;
1052
1053 buffer_map[n_bufs].map_idx = map_idx;
1054 buffer_map[n_bufs].disp_order = frame_order;
1055 buffer_map[n_bufs].pyr_level = reference_frame_level;
1056 buffer_map[n_bufs].used = 0;
1057 n_bufs++;
1058 }
1059
1060 // Sort frames in ascending display order.
1061 qsort(buffer_map, n_bufs, sizeof(buffer_map[0]), compare_map_idx_pair_asc);
1062
1063 int n_min_level_refs = 0;
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301064 int closest_past_ref = -1;
1065 int golden_idx = -1;
1066 int altref_idx = -1;
1067
1068 // Find the GOLDEN_FRAME and BWDREF_FRAME.
1069 // Also collect various stats about the reference frames for the remaining
1070 // mappings.
1071 for (int i = n_bufs - 1; i >= 0; i--) {
1072 if (buffer_map[i].pyr_level == min_level) {
1073 // Keep track of the number of lowest level frames.
1074 n_min_level_refs++;
1075 if (buffer_map[i].disp_order < cur_frame_disp && golden_idx == -1 &&
1076 remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] == INVALID_IDX) {
1077 // Save index for GOLDEN.
1078 golden_idx = i;
1079 } else if (buffer_map[i].disp_order > cur_frame_disp &&
1080 altref_idx == -1 &&
1081 remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] == INVALID_IDX) {
1082 // Save index for ALTREF.
1083 altref_idx = i;
1084 }
1085 } else if (buffer_map[i].disp_order == cur_frame_disp) {
1086 // Map the BWDREF_FRAME if this is the show_existing_frame.
1087 add_ref_to_slot(&buffer_map[i], remapped_ref_idx, BWDREF_FRAME);
1088 }
1089
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301090 // During parallel encodes of lower layer frames, exclude the first frame
1091 // (frame_parallel_level 1) from being used for the reference assignment of
1092 // the second frame (frame_parallel_level 2).
Remya Prakasan881cdf92021-07-04 18:43:15 +05301093 if (!is_one_pass_rt && gf_group->frame_parallel_level[gf_index] == 2 &&
1094 gf_group->frame_parallel_level[gf_index - 1] == 1 &&
1095 gf_group->update_type[gf_index - 1] == INTNL_ARF_UPDATE) {
1096 assert(gf_group->update_type[gf_index] == INTNL_ARF_UPDATE);
Remya Prakasanffeb4972022-06-21 20:00:28 +05301097#if CONFIG_FPMT_TEST
Remya Prakasan6566bc82021-11-05 23:21:12 +05301098 is_parallel_encode = (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_ENCODE)
1099 ? is_parallel_encode
1100 : 0;
Remya Prakasanffeb4972022-06-21 20:00:28 +05301101#endif // CONFIG_FPMT_TEST
Remya Prakasan881cdf92021-07-04 18:43:15 +05301102 // If parallel cpis are active, use ref_idx_to_skip, else, use display
1103 // index.
1104 assert(IMPLIES(is_parallel_encode, cpi->ref_idx_to_skip != INVALID_IDX));
1105 assert(IMPLIES(!is_parallel_encode,
1106 gf_group->skip_frame_as_ref[gf_index] != INVALID_IDX));
1107 buffer_map[i].used = is_parallel_encode
1108 ? (buffer_map[i].map_idx == cpi->ref_idx_to_skip)
1109 : (buffer_map[i].disp_order ==
1110 gf_group->skip_frame_as_ref[gf_index]);
1111 // In case a ref frame is excluded from being used during assignment,
1112 // skip the call to set_unmapped_ref(). Applicable in steady state.
1113 if (buffer_map[i].used) skip_ref_unmapping = 1;
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301114 }
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301115
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301116 // Keep track of where the frames change from being past frames to future
1117 // frames.
1118 if (buffer_map[i].disp_order < cur_frame_disp && closest_past_ref < 0)
1119 closest_past_ref = i;
1120 }
1121
1122 // Do not map GOLDEN and ALTREF based on their pyramid level if all reference
1123 // frames have the same level.
1124 if (n_min_level_refs <= n_bufs) {
1125 // Map the GOLDEN_FRAME.
1126 if (golden_idx > -1)
1127 add_ref_to_slot(&buffer_map[golden_idx], remapped_ref_idx, GOLDEN_FRAME);
1128 // Map the ALTREF_FRAME.
1129 if (altref_idx > -1)
1130 add_ref_to_slot(&buffer_map[altref_idx], remapped_ref_idx, ALTREF_FRAME);
1131 }
1132
1133 // Find the buffer to be excluded from the mapping.
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301134 if (!skip_ref_unmapping)
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301135 set_unmapped_ref(buffer_map, n_bufs, n_min_level_refs, min_level,
1136 cur_frame_disp);
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301137
1138 // Place past frames in LAST_FRAME, LAST2_FRAME, and LAST3_FRAME.
1139 for (int frame = LAST_FRAME; frame < GOLDEN_FRAME; frame++) {
1140 // Continue if the current ref slot is already full.
1141 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1142 // Find the next unmapped reference buffer
1143 // in decreasing ouptut order relative to current picture.
1144 int next_buf_max = 0;
1145 int next_disp_order = INT_MIN;
1146 for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) {
1147 if (!buffer_map[buf_map_idx].used &&
1148 buffer_map[buf_map_idx].disp_order < cur_frame_disp &&
1149 buffer_map[buf_map_idx].disp_order > next_disp_order) {
1150 next_disp_order = buffer_map[buf_map_idx].disp_order;
1151 next_buf_max = buf_map_idx;
1152 }
1153 }
1154 buf_map_idx = next_buf_max;
1155 if (buf_map_idx < 0) break;
1156 if (buffer_map[buf_map_idx].used) break;
1157 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1158 }
1159
1160 // Place future frames (if there are any) in BWDREF_FRAME and ALTREF2_FRAME.
1161 for (int frame = BWDREF_FRAME; frame < REF_FRAMES; frame++) {
1162 // Continue if the current ref slot is already full.
1163 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1164 // Find the next unmapped reference buffer
1165 // in increasing ouptut order relative to current picture.
1166 int next_buf_max = 0;
1167 int next_disp_order = INT_MAX;
1168 for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) {
1169 if (!buffer_map[buf_map_idx].used &&
1170 buffer_map[buf_map_idx].disp_order > cur_frame_disp &&
1171 buffer_map[buf_map_idx].disp_order < next_disp_order) {
1172 next_disp_order = buffer_map[buf_map_idx].disp_order;
1173 next_buf_max = buf_map_idx;
1174 }
1175 }
1176 buf_map_idx = next_buf_max;
1177 if (buf_map_idx < 0) break;
1178 if (buffer_map[buf_map_idx].used) break;
1179 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1180 }
1181
1182 // Place remaining past frames.
1183 buf_map_idx = closest_past_ref;
1184 for (int frame = LAST_FRAME; frame < REF_FRAMES; frame++) {
1185 // Continue if the current ref slot is already full.
1186 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1187 // Find the next unmapped reference buffer.
1188 for (; buf_map_idx >= 0; buf_map_idx--) {
1189 if (!buffer_map[buf_map_idx].used) break;
1190 }
1191 if (buf_map_idx < 0) break;
1192 if (buffer_map[buf_map_idx].used) break;
1193 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1194 }
1195
1196 // Place remaining future frames.
1197 buf_map_idx = n_bufs - 1;
1198 for (int frame = ALTREF_FRAME; frame >= LAST_FRAME; frame--) {
1199 // Continue if the current ref slot is already full.
1200 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1201 // Find the next unmapped reference buffer.
1202 for (; buf_map_idx > closest_past_ref; buf_map_idx--) {
1203 if (!buffer_map[buf_map_idx].used) break;
1204 }
1205 if (buf_map_idx < 0) break;
1206 if (buffer_map[buf_map_idx].used) break;
1207 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1208 }
1209
1210 // Fill any slots that are empty (should only happen for the first 7 frames).
1211 for (int i = 0; i < REF_FRAMES; ++i)
1212 if (remapped_ref_idx[i] == INVALID_IDX) remapped_ref_idx[i] = 0;
1213}
Jingning Han0a2af4e2019-07-08 19:30:03 -07001214
David Turner056f7cd2019-01-07 17:48:13 +00001215int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
David Turner1539bb02019-01-24 15:28:13 +00001216 uint8_t *const dest, unsigned int *frame_flags,
David Turnerdedd8ff2019-01-23 13:59:46 +00001217 int64_t *const time_stamp, int64_t *const time_end,
Yue Chen1bc5be62018-08-24 13:57:32 -07001218 const aom_rational64_t *const timestamp_ratio,
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301219 int *const pop_lookahead, int flush) {
Vishesh7e9873d2020-06-08 15:41:33 +05301220 AV1EncoderConfig *const oxcf = &cpi->oxcf;
David Turner475a3132019-01-18 15:17:17 +00001221 AV1_COMMON *const cm = &cpi->common;
Mufaddal Chakera8ee04fa2021-03-17 13:33:18 +05301222 GF_GROUP *gf_group = &cpi->ppi->gf_group;
Vishesha195ca32020-04-07 18:46:20 +05301223 ExternalFlags *const ext_flags = &cpi->ext_flags;
Vishesh5b50e6d2020-06-10 19:20:07 +05301224 GFConfig *const gf_cfg = &oxcf->gf_cfg;
David Turner056f7cd2019-01-07 17:48:13 +00001225
David Turnerdedd8ff2019-01-23 13:59:46 +00001226 EncodeFrameInput frame_input;
David Turner04b70d82019-01-24 15:39:19 +00001227 EncodeFrameParams frame_params;
1228 EncodeFrameResults frame_results;
David Turnerdedd8ff2019-01-23 13:59:46 +00001229 memset(&frame_input, 0, sizeof(frame_input));
David Turner04b70d82019-01-24 15:39:19 +00001230 memset(&frame_params, 0, sizeof(frame_params));
1231 memset(&frame_results, 0, sizeof(frame_results));
1232
Angie Chiangb17a57d2021-12-28 17:34:39 -08001233#if CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS
1234 VBR_RATECTRL_INFO *vbr_rc_info = &cpi->vbr_rc_info;
1235 if (oxcf->pass == AOM_RC_THIRD_PASS && vbr_rc_info->ready == 0) {
1236 THIRD_PASS_FRAME_INFO frame_info[MAX_THIRD_PASS_BUF];
1237 av1_open_second_pass_log(cpi, 1);
1238 FILE *second_pass_log_stream = cpi->second_pass_log_stream;
1239 fseek(second_pass_log_stream, 0, SEEK_END);
1240 size_t file_size = ftell(second_pass_log_stream);
1241 rewind(second_pass_log_stream);
1242 size_t read_size = 0;
1243 while (read_size < file_size) {
1244 THIRD_PASS_GOP_INFO gop_info;
1245 struct aom_internal_error_info *error = cpi->common.error;
1246 // Read in GOP information from the second pass file.
1247 av1_read_second_pass_gop_info(second_pass_log_stream, &gop_info, error);
1248 TPL_INFO *tpl_info;
1249 AOM_CHECK_MEM_ERROR(cm->error, tpl_info, aom_malloc(sizeof(*tpl_info)));
1250 av1_read_tpl_info(tpl_info, second_pass_log_stream, error);
1251 // Read in per-frame info from second-pass encoding
1252 av1_read_second_pass_per_frame_info(second_pass_log_stream, frame_info,
1253 gop_info.num_frames, error);
1254 av1_vbr_rc_append_tpl_info(vbr_rc_info, tpl_info);
1255 read_size = ftell(second_pass_log_stream);
1256 aom_free(tpl_info);
1257 }
1258 av1_close_second_pass_log(cpi);
Angie Chiang215e9062022-04-06 11:35:31 -07001259 if (cpi->oxcf.rc_cfg.mode == AOM_Q) {
1260 vbr_rc_info->base_q_index = cpi->oxcf.rc_cfg.cq_level;
1261 av1_vbr_rc_compute_q_indices(
1262 vbr_rc_info->base_q_index, vbr_rc_info->total_frame_count,
1263 vbr_rc_info->qstep_ratio_list, cm->seq_params->bit_depth,
1264 vbr_rc_info->q_index_list);
1265 } else {
1266 vbr_rc_info->base_q_index = av1_vbr_rc_info_estimate_base_q(
1267 vbr_rc_info->total_bit_budget, cm->seq_params->bit_depth,
1268 vbr_rc_info->scale_factors, vbr_rc_info->total_frame_count,
1269 vbr_rc_info->update_type_list, vbr_rc_info->qstep_ratio_list,
1270 vbr_rc_info->txfm_stats_list, vbr_rc_info->q_index_list, NULL);
1271 }
Angie Chiangb17a57d2021-12-28 17:34:39 -08001272 vbr_rc_info->ready = 1;
Angie Chiang215e9062022-04-06 11:35:31 -07001273#if CONFIG_RATECTRL_LOG
1274 rc_log_record_chunk_info(&cpi->rc_log, vbr_rc_info->base_q_index,
1275 vbr_rc_info->total_frame_count);
1276#endif // CONFIG_RATECTRL_LOG
Angie Chiangb17a57d2021-12-28 17:34:39 -08001277 }
1278#endif // CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS
1279
Jingning Hand3e827d2020-08-16 16:07:24 -07001280 // Check if we need to stuff more src frames
1281 if (flush == 0) {
1282 int srcbuf_size =
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +05301283 av1_lookahead_depth(cpi->ppi->lookahead, cpi->compressor_stage);
1284 int pop_size =
1285 av1_lookahead_pop_sz(cpi->ppi->lookahead, cpi->compressor_stage);
Jingning Hand3e827d2020-08-16 16:07:24 -07001286
1287 // Continue buffering look ahead buffer.
1288 if (srcbuf_size < pop_size) return -1;
1289 }
1290
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +05301291 if (!av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage)) {
Jingning Han3f3318f2020-08-16 16:12:10 -07001292#if !CONFIG_REALTIME_ONLY
Bohan Li445fdf62021-06-03 16:16:00 -07001293 if (flush && oxcf->pass == AOM_RC_FIRST_PASS &&
1294 !cpi->ppi->twopass.first_pass_done) {
Jingning Han3f3318f2020-08-16 16:12:10 -07001295 av1_end_first_pass(cpi); /* get last stats packet */
Mufaddal Chakera358cf212021-02-25 14:41:56 +05301296 cpi->ppi->twopass.first_pass_done = 1;
Jingning Han3f3318f2020-08-16 16:12:10 -07001297 }
1298#endif
1299 return -1;
1300 }
1301
Sarah Parker97803fc2019-05-17 14:15:37 -07001302 // TODO(sarahparker) finish bit allocation for one pass pyramid
Aasaipriya5feffea2020-04-15 12:43:05 +05301303 if (has_no_stats_stage(cpi)) {
Vishesh5b50e6d2020-06-10 19:20:07 +05301304 gf_cfg->gf_max_pyr_height =
1305 AOMMIN(gf_cfg->gf_max_pyr_height, USE_ALTREF_FOR_ONE_PASS);
1306 gf_cfg->gf_min_pyr_height =
1307 AOMMIN(gf_cfg->gf_min_pyr_height, gf_cfg->gf_max_pyr_height);
Urvang Joshib44f48f2020-01-27 11:09:48 -08001308 }
Sarah Parker97803fc2019-05-17 14:15:37 -07001309
Aniket Wanareb7d55292021-11-02 14:07:51 +05301310 // Allocation of mi buffers.
1311 alloc_mb_mode_info_buffers(cpi);
1312
Mufaddal Chakera7260d142021-04-12 01:03:40 +05301313 cpi->skip_tpl_setup_stats = 0;
Jingning Han3f3318f2020-08-16 16:12:10 -07001314#if !CONFIG_REALTIME_ONLY
Cherma Rajan Abad783b2022-04-13 11:06:38 +05301315 if (oxcf->pass != AOM_RC_FIRST_PASS) {
1316 TplParams *const tpl_data = &cpi->ppi->tpl_data;
1317 if (tpl_data->tpl_stats_pool[0] == NULL) {
1318 av1_setup_tpl_buffers(cpi->ppi, &cm->mi_params, oxcf->frm_dim_cfg.width,
1319 oxcf->frm_dim_cfg.height, 0,
1320 oxcf->gf_cfg.lag_in_frames);
1321 }
1322 }
Remya Prakasanf54c2b92021-07-21 15:40:07 +05301323 cpi->twopass_frame.this_frame = NULL;
Deepa K Gc29630a2021-05-31 13:19:41 +05301324 const int use_one_pass_rt_params = is_one_pass_rt_params(cpi);
Jingning Han3f3318f2020-08-16 16:12:10 -07001325 if (!use_one_pass_rt_params && !is_stat_generation_stage(cpi)) {
Yunqing Wang7a3ad542020-11-03 23:40:24 -08001326#if CONFIG_COLLECT_COMPONENT_TIMING
1327 start_timing(cpi, av1_get_second_pass_params_time);
1328#endif
Aasaipriya87e4abc2021-06-07 19:20:00 +05301329
Aasaipriya87e4abc2021-06-07 19:20:00 +05301330 // Initialise frame_level_rate_correction_factors with value previous
1331 // to the parallel frames.
1332 if (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) {
Remya Prakasan6566bc82021-11-05 23:21:12 +05301333 for (int i = 0; i < RATE_FACTOR_LEVELS; i++) {
Aasaipriya87e4abc2021-06-07 19:20:00 +05301334 cpi->rc.frame_level_rate_correction_factors[i] =
Remya Prakasanffeb4972022-06-21 20:00:28 +05301335#if CONFIG_FPMT_TEST
Remya Prakasan6566bc82021-11-05 23:21:12 +05301336 (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE)
1337 ? cpi->ppi->p_rc.temp_rate_correction_factors[i]
1338 :
Remya Prakasanffeb4972022-06-21 20:00:28 +05301339#endif // CONFIG_FPMT_TEST
Remya Prakasan6566bc82021-11-05 23:21:12 +05301340 cpi->ppi->p_rc.rate_correction_factors[i];
1341 }
Aasaipriya87e4abc2021-06-07 19:20:00 +05301342 }
Nithya V S56a23622022-05-10 11:32:43 +05301343
Aasaipriya79734642021-07-22 20:13:15 +05301344 // copy mv_stats from ppi to frame_level cpi.
1345 cpi->mv_stats = cpi->ppi->mv_stats;
Angie Chiang30546ab2021-10-29 18:30:45 -07001346 av1_get_second_pass_params(cpi, &frame_params, *frame_flags);
Yunqing Wang7a3ad542020-11-03 23:40:24 -08001347#if CONFIG_COLLECT_COMPONENT_TIMING
1348 end_timing(cpi, av1_get_second_pass_params_time);
1349#endif
Jingning Han3f3318f2020-08-16 16:12:10 -07001350 }
1351#endif
1352
Mufaddal Chakerae7326122019-12-04 14:49:09 +05301353 if (!is_stat_generation_stage(cpi)) {
Jingning Hanbc5e2c62021-06-20 08:16:18 -07001354 // TODO(jingning): fwd key frame always uses show existing frame?
1355 if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE &&
1356 gf_group->refbuf_state[cpi->gf_frame_index] == REFBUF_RESET) {
Sarah Parker2beb1d12019-10-25 16:30:32 -07001357 frame_params.show_existing_frame = 1;
1358 } else {
1359 frame_params.show_existing_frame =
Remya Prakasan8b35d612021-07-12 22:17:26 +05301360 (cpi->ppi->show_existing_alt_ref &&
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301361 gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) ||
1362 gf_group->update_type[cpi->gf_frame_index] == INTNL_OVERLAY_UPDATE;
Sarah Parker2beb1d12019-10-25 16:30:32 -07001363 }
David Turnere86ee0d2019-02-18 17:16:28 +00001364 frame_params.show_existing_frame &= allow_show_existing(cpi, *frame_flags);
Yunqing Wang1973f112019-10-18 15:50:04 -07001365
Remya Prakasandf38eb12023-03-07 18:27:32 +05301366 // Special handling to reset 'show_existing_frame' in case of dropped
1367 // frames.
1368 if (oxcf->rc_cfg.drop_frames_water_mark &&
1369 (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE ||
1370 gf_group->update_type[cpi->gf_frame_index] == INTNL_OVERLAY_UPDATE)) {
1371 // During the encode of an OVERLAY_UPDATE/INTNL_OVERLAY_UPDATE frame, loop
1372 // over the gf group to check if the corresponding
1373 // ARF_UPDATE/INTNL_ARF_UPDATE frame was dropped.
1374 int cur_disp_idx = gf_group->display_idx[cpi->gf_frame_index];
1375 for (int idx = 0; idx < cpi->gf_frame_index; idx++) {
1376 if (cur_disp_idx == gf_group->display_idx[idx]) {
1377 assert(IMPLIES(
1378 gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE,
1379 gf_group->update_type[idx] == ARF_UPDATE));
1380 assert(IMPLIES(gf_group->update_type[cpi->gf_frame_index] ==
1381 INTNL_OVERLAY_UPDATE,
1382 gf_group->update_type[idx] == INTNL_ARF_UPDATE));
1383 // Reset show_existing_frame and set cpi->is_dropped_frame to true if
1384 // the frame was dropped during its first encode.
1385 if (gf_group->is_frame_dropped[idx]) {
1386 frame_params.show_existing_frame = 0;
1387 assert(!cpi->is_dropped_frame);
1388 cpi->is_dropped_frame = true;
1389 }
1390 break;
1391 }
1392 }
1393 }
1394
Yunqing Wang1973f112019-10-18 15:50:04 -07001395 // Reset show_existing_alt_ref decision to 0 after it is used.
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301396 if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) {
Remya Prakasan8b35d612021-07-12 22:17:26 +05301397 cpi->ppi->show_existing_alt_ref = 0;
Yunqing Wang1973f112019-10-18 15:50:04 -07001398 }
David Turnerb0c0aa32019-01-28 16:17:13 +00001399 } else {
David Turnere86ee0d2019-02-18 17:16:28 +00001400 frame_params.show_existing_frame = 0;
David Turnerb0c0aa32019-01-28 16:17:13 +00001401 }
1402
David Turnerdedd8ff2019-01-23 13:59:46 +00001403 struct lookahead_entry *source = NULL;
1404 struct lookahead_entry *last_source = NULL;
David Turnere86ee0d2019-02-18 17:16:28 +00001405 if (frame_params.show_existing_frame) {
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +05301406 source = av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage);
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301407 *pop_lookahead = 1;
Jingning Hand392c012019-09-19 15:48:08 -07001408 frame_params.show_frame = 1;
David Turnerdedd8ff2019-01-23 13:59:46 +00001409 } else {
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301410 source = choose_frame_source(cpi, &flush, pop_lookahead, &last_source,
Wan-Teh Chang5529fda2022-09-02 11:31:17 -07001411 &frame_params.show_frame);
David Turnerdedd8ff2019-01-23 13:59:46 +00001412 }
1413
1414 if (source == NULL) { // If no source was found, we can't encode a frame.
Jerome Jiang2612b4d2019-05-29 17:46:47 -07001415#if !CONFIG_REALTIME_ONLY
Bohan Li445fdf62021-06-03 16:16:00 -07001416 if (flush && oxcf->pass == AOM_RC_FIRST_PASS &&
1417 !cpi->ppi->twopass.first_pass_done) {
David Turnerdedd8ff2019-01-23 13:59:46 +00001418 av1_end_first_pass(cpi); /* get last stats packet */
Mufaddal Chakera358cf212021-02-25 14:41:56 +05301419 cpi->ppi->twopass.first_pass_done = 1;
David Turnerdedd8ff2019-01-23 13:59:46 +00001420 }
Jerome Jiang2612b4d2019-05-29 17:46:47 -07001421#endif
David Turnerdedd8ff2019-01-23 13:59:46 +00001422 return -1;
1423 }
Mufaddal Chakera97f92712021-05-21 15:10:33 +05301424
Mufaddal Chakera97f92712021-05-21 15:10:33 +05301425 // reset src_offset to allow actual encode call for this frame to get its
1426 // source.
1427 gf_group->src_offset[cpi->gf_frame_index] = 0;
Mufaddal Chakera97f92712021-05-21 15:10:33 +05301428
bohanli0db9c512020-06-12 17:43:06 -07001429 // Source may be changed if temporal filtered later.
1430 frame_input.source = &source->img;
Marco Paniconi6154a1e2023-06-14 14:42:41 -07001431 if ((cpi->ppi->use_svc || cpi->rc.prev_frame_is_dropped) &&
1432 last_source != NULL)
Marco Paniconi6dfcab22022-10-31 00:41:42 -07001433 av1_svc_set_last_source(cpi, &frame_input, &last_source->img);
1434 else
1435 frame_input.last_source = last_source != NULL ? &last_source->img : NULL;
David Turnerdedd8ff2019-01-23 13:59:46 +00001436 frame_input.ts_duration = source->ts_end - source->ts_start;
Cheng Chene1a7a3e2020-03-18 18:23:19 -07001437 // Save unfiltered source. It is used in av1_get_second_pass_params().
1438 cpi->unfiltered_source = frame_input.source;
David Turnerdedd8ff2019-01-23 13:59:46 +00001439
1440 *time_stamp = source->ts_start;
1441 *time_end = source->ts_end;
Yunqing Wang15ab03c2020-11-24 16:45:25 -08001442 if (source->ts_start < cpi->time_stamps.first_ts_start) {
1443 cpi->time_stamps.first_ts_start = source->ts_start;
1444 cpi->time_stamps.prev_ts_end = source->ts_start;
David Turnerdedd8ff2019-01-23 13:59:46 +00001445 }
1446
1447 av1_apply_encoding_flags(cpi, source->flags);
bohanlicbe8e742020-08-17 14:19:17 -07001448 *frame_flags = (source->flags & AOM_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
David Turnerdedd8ff2019-01-23 13:59:46 +00001449
Remya Prakasanffeb4972022-06-21 20:00:28 +05301450#if CONFIG_FPMT_TEST
Remya Prakasan6566bc82021-11-05 23:21:12 +05301451 if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) {
1452 if (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) {
1453 cpi->framerate = cpi->temp_framerate;
1454 }
1455 }
Remya Prakasanffeb4972022-06-21 20:00:28 +05301456#endif // CONFIG_FPMT_TEST
Remya Prakasan6566bc82021-11-05 23:21:12 +05301457
Jingning Hand392c012019-09-19 15:48:08 -07001458 // Shown frames and arf-overlay frames need frame-rate considering
Deepa K Gfb89ce02020-04-06 13:34:42 +05301459 if (frame_params.show_frame)
1460 adjust_frame_rate(cpi, source->ts_start, source->ts_end);
David Turnerdedd8ff2019-01-23 13:59:46 +00001461
Jingning Hand392c012019-09-19 15:48:08 -07001462 if (!frame_params.show_existing_frame) {
David Turnerdedd8ff2019-01-23 13:59:46 +00001463 if (cpi->film_grain_table) {
Neil Birkbeckbd40ca72019-03-02 13:25:50 -08001464 cm->cur_frame->film_grain_params_present = aom_film_grain_table_lookup(
David Turnerdedd8ff2019-01-23 13:59:46 +00001465 cpi->film_grain_table, *time_stamp, *time_end, 0 /* =erase */,
1466 &cm->film_grain_params);
Neil Birkbeckbd40ca72019-03-02 13:25:50 -08001467 } else {
1468 cm->cur_frame->film_grain_params_present =
Tarundeep Singh4243e622021-04-20 16:10:22 +05301469 cm->seq_params->film_grain_params_present;
David Turnerdedd8ff2019-01-23 13:59:46 +00001470 }
David Turnerdedd8ff2019-01-23 13:59:46 +00001471 // only one operating point supported now
Yue Chen1bc5be62018-08-24 13:57:32 -07001472 const int64_t pts64 = ticks_to_timebase_units(timestamp_ratio, *time_stamp);
David Turnerdedd8ff2019-01-23 13:59:46 +00001473 if (pts64 < 0 || pts64 > UINT32_MAX) return AOM_CODEC_ERROR;
Jingning Hanf13d7072022-06-14 16:04:41 -07001474
Jingning Hand392c012019-09-19 15:48:08 -07001475 cm->frame_presentation_time = (uint32_t)pts64;
David Turnerdedd8ff2019-01-23 13:59:46 +00001476 }
1477
Yunqing Wang6eb144b2021-08-09 17:32:03 -07001478#if CONFIG_COLLECT_COMPONENT_TIMING
1479 start_timing(cpi, av1_get_one_pass_rt_params_time);
1480#endif
Marco Paniconicea99e22019-07-16 18:36:31 -07001481#if CONFIG_REALTIME_ONLY
Wan-Teh Chang5529fda2022-09-02 11:31:17 -07001482 av1_get_one_pass_rt_params(cpi, &frame_params.frame_type, &frame_input,
1483 *frame_flags);
Marco Paniconi1a3a74a2022-08-28 22:26:48 -07001484 if (use_rtc_reference_structure_one_layer(cpi))
1485 av1_set_rtc_reference_structure_one_layer(cpi, cpi->gf_frame_index == 0);
Marco Paniconicea99e22019-07-16 18:36:31 -07001486#else
chiyotsai8b8f8a22020-04-21 11:03:47 -07001487 if (use_one_pass_rt_params) {
Wan-Teh Chang5529fda2022-09-02 11:31:17 -07001488 av1_get_one_pass_rt_params(cpi, &frame_params.frame_type, &frame_input,
1489 *frame_flags);
Marco Paniconi1a3a74a2022-08-28 22:26:48 -07001490 if (use_rtc_reference_structure_one_layer(cpi))
1491 av1_set_rtc_reference_structure_one_layer(cpi, cpi->gf_frame_index == 0);
chiyotsai8b8f8a22020-04-21 11:03:47 -07001492 }
Jerome Jiang2612b4d2019-05-29 17:46:47 -07001493#endif
Yunqing Wang6eb144b2021-08-09 17:32:03 -07001494#if CONFIG_COLLECT_COMPONENT_TIMING
1495 end_timing(cpi, av1_get_one_pass_rt_params_time);
1496#endif
Jingning Han3f3318f2020-08-16 16:12:10 -07001497
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301498 FRAME_UPDATE_TYPE frame_update_type =
1499 get_frame_update_type(gf_group, cpi->gf_frame_index);
David Turner4f1f1812019-01-24 17:00:24 +00001500
David Turnere86ee0d2019-02-18 17:16:28 +00001501 if (frame_params.show_existing_frame &&
1502 frame_params.frame_type != KEY_FRAME) {
David Turner475a3132019-01-18 15:17:17 +00001503 // Force show-existing frames to be INTER, except forward keyframes
1504 frame_params.frame_type = INTER_FRAME;
1505 }
1506
bohanli99852502020-07-14 16:22:45 -07001507 // Per-frame encode speed. In theory this can vary, but things may have
1508 // been written assuming speed-level will not change within a sequence, so
1509 // this parameter should be used with caution.
David Turner04b70d82019-01-24 15:39:19 +00001510 frame_params.speed = oxcf->speed;
1511
Bohan Li575ffc92022-09-09 17:01:09 -07001512#if !CONFIG_REALTIME_ONLY
1513 // Set forced key frames when necessary. For two-pass encoding / lap mode,
1514 // this is already handled by av1_get_second_pass_params. However when no
1515 // stats are available, we still need to check if the new frame is a keyframe.
1516 // For one pass rt, this is already checked in av1_get_one_pass_rt_params.
1517 if (!use_one_pass_rt_params &&
1518 (is_stat_generation_stage(cpi) || has_no_stats_stage(cpi))) {
Jayasanker J312b85e2021-02-23 12:44:30 +05301519 // Current frame is coded as a key-frame for any of the following cases:
1520 // 1) First frame of a video
1521 // 2) For all-intra frame encoding
1522 // 3) When a key-frame is forced
1523 const int kf_requested =
1524 (cm->current_frame.frame_number == 0 ||
1525 oxcf->kf_cfg.key_freq_max == 0 || (*frame_flags & FRAMEFLAGS_KEY));
David Turner4f1f1812019-01-24 17:00:24 +00001526 if (kf_requested && frame_update_type != OVERLAY_UPDATE &&
1527 frame_update_type != INTNL_OVERLAY_UPDATE) {
David Turnerddbff442019-01-21 14:58:42 +00001528 frame_params.frame_type = KEY_FRAME;
Bohan Lib911c212022-09-30 11:06:48 -07001529 } else if (is_stat_generation_stage(cpi)) {
1530 // For stats generation, set the frame type to inter here.
David Turnerddbff442019-01-21 14:58:42 +00001531 frame_params.frame_type = INTER_FRAME;
David Turnercb5e36f2019-01-17 17:15:25 +00001532 }
Bohan Li575ffc92022-09-09 17:01:09 -07001533 }
1534#endif
1535
1536 // Work out some encoding parameters specific to the pass:
1537 if (has_no_stats_stage(cpi) && oxcf->q_cfg.aq_mode == CYCLIC_REFRESH_AQ) {
1538 av1_cyclic_refresh_update_parameters(cpi);
1539 } else if (is_stat_generation_stage(cpi)) {
1540 cpi->td.mb.e_mbd.lossless[0] = is_lossless_requested(&oxcf->rc_cfg);
Hamsalekha S37cc1d12019-12-12 19:27:41 +05301541 } else if (is_stat_consumption_stage(cpi)) {
David Turnerddbff442019-01-21 14:58:42 +00001542#if CONFIG_MISMATCH_DEBUG
1543 mismatch_move_frame_idx_w();
1544#endif
1545#if TXCOEFF_COST_TIMER
1546 cm->txcoeff_cost_timer = 0;
1547 cm->txcoeff_cost_count = 0;
1548#endif
1549 }
1550
Vishesha195ca32020-04-07 18:46:20 +05301551 if (!is_stat_generation_stage(cpi))
1552 set_ext_overrides(cm, &frame_params, ext_flags);
David Turnerddbff442019-01-21 14:58:42 +00001553
David Turner4f1f1812019-01-24 17:00:24 +00001554 // Shown keyframes and S frames refresh all reference buffers
1555 const int force_refresh_all =
1556 ((frame_params.frame_type == KEY_FRAME && frame_params.show_frame) ||
1557 frame_params.frame_type == S_FRAME) &&
David Turnere86ee0d2019-02-18 17:16:28 +00001558 !frame_params.show_existing_frame;
David Turner4f1f1812019-01-24 17:00:24 +00001559
Jingning Han880bb352021-06-18 11:57:26 -07001560 av1_configure_buffer_updates(
1561 cpi, &frame_params.refresh_frame, frame_update_type,
1562 gf_group->refbuf_state[cpi->gf_frame_index], force_refresh_all);
David Turner4f1f1812019-01-24 17:00:24 +00001563
Mufaddal Chakerae7326122019-12-04 14:49:09 +05301564 if (!is_stat_generation_stage(cpi)) {
Deepa K G140bc832019-10-30 17:16:29 +05301565 const YV12_BUFFER_CONFIG *ref_frame_buf[INTER_REFS_PER_FRAME];
1566
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301567 RefFrameMapPair ref_frame_map_pairs[REF_FRAMES];
1568 init_ref_map_pair(cpi, ref_frame_map_pairs);
1569 const int order_offset = gf_group->arf_src_offset[cpi->gf_frame_index];
1570 const int cur_frame_disp =
1571 cpi->common.current_frame.frame_number + order_offset;
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301572
Remya Prakasan6566bc82021-11-05 23:21:12 +05301573 int get_ref_frames = 0;
Remya Prakasanffeb4972022-06-21 20:00:28 +05301574#if CONFIG_FPMT_TEST
Remya Prakasan6566bc82021-11-05 23:21:12 +05301575 get_ref_frames =
1576 (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) ? 1 : 0;
Remya Prakasanffeb4972022-06-21 20:00:28 +05301577#endif // CONFIG_FPMT_TEST
Remya Prakasan6566bc82021-11-05 23:21:12 +05301578 if (get_ref_frames ||
1579 gf_group->frame_parallel_level[cpi->gf_frame_index] == 0) {
Remya Prakasanf46244b2021-08-03 14:21:03 +05301580 if (!ext_flags->refresh_frame.update_pending) {
Remya Prakasandaf23942022-06-08 14:53:46 +05301581 av1_get_ref_frames(ref_frame_map_pairs, cur_frame_disp, cpi,
1582 cpi->gf_frame_index, 1, cm->remapped_ref_idx);
Marco Paniconi53218432022-10-10 13:00:27 -07001583 } else if (cpi->ppi->rtc_ref.set_ref_frame_config ||
Marco Paniconi1a3a74a2022-08-28 22:26:48 -07001584 use_rtc_reference_structure_one_layer(cpi)) {
Remya Prakasanf46244b2021-08-03 14:21:03 +05301585 for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++)
Marco Paniconi53218432022-10-10 13:00:27 -07001586 cm->remapped_ref_idx[i] = cpi->ppi->rtc_ref.ref_idx[i];
Remya Prakasanf46244b2021-08-03 14:21:03 +05301587 }
Marco Paniconi67142112019-07-24 15:00:31 -07001588 }
Jingning Han0a2af4e2019-07-08 19:30:03 -07001589
Deepa K G140bc832019-10-30 17:16:29 +05301590 // Get the reference frames
Wan-Teh Chang5f589af2022-08-10 11:57:14 -07001591 bool has_ref_frames = false;
Deepa K G140bc832019-10-30 17:16:29 +05301592 for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) {
Wan-Teh Chang5f589af2022-08-10 11:57:14 -07001593 const RefCntBuffer *ref_frame =
1594 get_ref_frame_buf(cm, ref_frame_priority_order[i]);
1595 ref_frame_buf[i] = ref_frame != NULL ? &ref_frame->buf : NULL;
1596 if (ref_frame != NULL) has_ref_frames = true;
1597 }
1598 if (!has_ref_frames && (frame_params.frame_type == INTER_FRAME ||
1599 frame_params.frame_type == S_FRAME)) {
1600 return AOM_CODEC_ERROR;
Deepa K G140bc832019-10-30 17:16:29 +05301601 }
bohanli99852502020-07-14 16:22:45 -07001602
David Turnerddbff442019-01-21 14:58:42 +00001603 // Work out which reference frame slots may be used.
Marco Paniconi9c45f6f2021-07-19 00:08:25 -07001604 frame_params.ref_frame_flags =
1605 get_ref_frame_flags(&cpi->sf, is_one_pass_rt_params(cpi), ref_frame_buf,
1606 ext_flags->ref_frame_flags);
David Turnerddbff442019-01-21 14:58:42 +00001607
Remya Prakasana68eaef2021-05-19 19:15:52 +05301608 // Set primary_ref_frame of non-reference frames as PRIMARY_REF_NONE.
1609 if (cpi->ppi->gf_group.is_frame_non_ref[cpi->gf_frame_index]) {
1610 frame_params.primary_ref_frame = PRIMARY_REF_NONE;
1611 } else {
1612 frame_params.primary_ref_frame =
1613 choose_primary_ref_frame(cpi, &frame_params);
1614 }
Remya Prakasana68eaef2021-05-19 19:15:52 +05301615
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301616 frame_params.order_offset = gf_group->arf_src_offset[cpi->gf_frame_index];
David Turner6e8b4d92019-02-18 15:01:33 +00001617
Remya Prakasand80b8ce2021-07-04 16:58:09 +05301618 // Call av1_get_refresh_frame_flags() if refresh index not available.
1619 if (!cpi->refresh_idx_available) {
Remya Prakasand80b8ce2021-07-04 16:58:09 +05301620 frame_params.refresh_frame_flags = av1_get_refresh_frame_flags(
1621 cpi, &frame_params, frame_update_type, cpi->gf_frame_index,
Remya Prakasan55318e32022-03-03 23:34:35 +05301622 cur_frame_disp, ref_frame_map_pairs);
Remya Prakasand80b8ce2021-07-04 16:58:09 +05301623 } else {
1624 assert(cpi->ref_refresh_index != INVALID_IDX);
1625 frame_params.refresh_frame_flags = (1 << cpi->ref_refresh_index);
1626 }
Remya Prakasana68eaef2021-05-19 19:15:52 +05301627
Remya Prakasana68eaef2021-05-19 19:15:52 +05301628 // Make the frames marked as is_frame_non_ref to non-reference frames.
1629 if (gf_group->is_frame_non_ref[cpi->gf_frame_index])
1630 frame_params.refresh_frame_flags = 0;
Remya Prakasana68eaef2021-05-19 19:15:52 +05301631
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301632 frame_params.existing_fb_idx_to_show = INVALID_IDX;
1633 // Find the frame buffer to show based on display order.
1634 if (frame_params.show_existing_frame) {
1635 for (int frame = 0; frame < REF_FRAMES; frame++) {
1636 const RefCntBuffer *const buf = cm->ref_frame_map[frame];
1637 if (buf == NULL) continue;
1638 const int frame_order = (int)buf->display_order_hint;
1639 if (frame_order == cur_frame_disp)
1640 frame_params.existing_fb_idx_to_show = frame;
1641 }
1642 }
David Turnera7f133c2019-01-22 14:47:16 +00001643 }
1644
David Turner73245762019-02-11 16:42:34 +00001645 // The way frame_params->remapped_ref_idx is setup is a placeholder.
David Turnerf4592292019-02-21 11:50:30 +00001646 // Currently, reference buffer assignment is done by update_ref_frame_map()
bohanli99852502020-07-14 16:22:45 -07001647 // which is called by high-level strategy AFTER encoding a frame. It
1648 // modifies cm->remapped_ref_idx. If you want to use an alternative method
1649 // to determine reference buffer assignment, just put your assignments into
David Turner73245762019-02-11 16:42:34 +00001650 // frame_params->remapped_ref_idx here and they will be used when encoding
1651 // this frame. If frame_params->remapped_ref_idx is setup independently of
David Turnerf4592292019-02-21 11:50:30 +00001652 // cm->remapped_ref_idx then update_ref_frame_map() will have no effect.
David Turner73245762019-02-11 16:42:34 +00001653 memcpy(frame_params.remapped_ref_idx, cm->remapped_ref_idx,
1654 REF_FRAMES * sizeof(*cm->remapped_ref_idx));
1655
linzhen49073062022-10-31 00:42:59 +00001656 cpi->td.mb.rdmult_delta_qindex = cpi->td.mb.delta_qindex = 0;
Yue Chen4e585cc2019-06-03 14:47:16 -07001657
1658 if (!frame_params.show_existing_frame) {
Vishesh734eff92020-06-20 21:46:36 +05301659 cm->quant_params.using_qmatrix = oxcf->q_cfg.using_qm;
Yue Chen4e585cc2019-06-03 14:47:16 -07001660 }
Marco Paniconi34b0dd02020-07-29 16:41:41 -07001661
Yunqing Wangd849db42023-06-15 14:21:00 -07001662 const int is_intra_frame = frame_params.frame_type == KEY_FRAME ||
1663 frame_params.frame_type == INTRA_ONLY_FRAME;
1664 FeatureFlags *const features = &cm->features;
1665 if (!is_stat_generation_stage(cpi) &&
1666 (oxcf->pass == AOM_RC_ONE_PASS || oxcf->pass >= AOM_RC_SECOND_PASS) &&
1667 is_intra_frame) {
1668 av1_set_screen_content_options(cpi, features);
1669 }
1670
Cheng Chen0fcf6f82019-10-11 11:41:19 -07001671#if CONFIG_REALTIME_ONLY
David Turnerdedd8ff2019-01-23 13:59:46 +00001672 if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) !=
David Turnerddbff442019-01-21 14:58:42 +00001673 AOM_CODEC_OK) {
1674 return AOM_CODEC_ERROR;
1675 }
Cheng Chen0fcf6f82019-10-11 11:41:19 -07001676#else
Marco Paniconieca11952020-05-06 11:30:48 -07001677 if (has_no_stats_stage(cpi) && oxcf->mode == REALTIME &&
Vishesh5b50e6d2020-06-10 19:20:07 +05301678 gf_cfg->lag_in_frames == 0) {
Marco Paniconieca11952020-05-06 11:30:48 -07001679 if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) !=
1680 AOM_CODEC_OK) {
1681 return AOM_CODEC_ERROR;
1682 }
1683 } else if (denoise_and_encode(cpi, dest, &frame_input, &frame_params,
1684 &frame_results) != AOM_CODEC_OK) {
Cheng Chen0fcf6f82019-10-11 11:41:19 -07001685 return AOM_CODEC_ERROR;
1686 }
1687#endif // CONFIG_REALTIME_ONLY
David Turnerddbff442019-01-21 14:58:42 +00001688
Yunqing Wang8cec96e2022-01-28 16:09:41 -08001689 // This is used in rtc temporal filter case. Use true source in the PSNR
1690 // calculation.
1691 if (is_psnr_calc_enabled(cpi) && cpi->sf.rt_sf.use_rtc_tf &&
1692 cpi->common.current_frame.frame_type != KEY_FRAME) {
1693 assert(cpi->orig_source.buffer_alloc_sz > 0);
1694 cpi->source = &cpi->orig_source;
1695 }
1696
Mufaddal Chakerae7326122019-12-04 14:49:09 +05301697 if (!is_stat_generation_stage(cpi)) {
David Turner73245762019-02-11 16:42:34 +00001698 // First pass doesn't modify reference buffer assignment or produce frame
1699 // flags
Jayasanker J24cb9bc2020-04-15 13:43:10 +05301700 update_frame_flags(&cpi->common, &cpi->refresh_frame, frame_flags);
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301701 set_additional_frame_flags(cm, frame_flags);
David Turner73245762019-02-11 16:42:34 +00001702 }
1703
Jerome Jiang2612b4d2019-05-29 17:46:47 -07001704#if !CONFIG_REALTIME_ONLY
David Turnerddbff442019-01-21 14:58:42 +00001705#if TXCOEFF_COST_TIMER
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301706 if (!is_stat_generation_stage(cpi)) {
David Turnerddbff442019-01-21 14:58:42 +00001707 cm->cum_txcoeff_cost_timer += cm->txcoeff_cost_timer;
1708 fprintf(stderr,
1709 "\ntxb coeff cost block number: %ld, frame time: %ld, cum time %ld "
1710 "in us\n",
1711 cm->txcoeff_cost_count, cm->txcoeff_cost_timer,
1712 cm->cum_txcoeff_cost_timer);
David Turnerddbff442019-01-21 14:58:42 +00001713 }
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301714#endif
Jerome Jiang2612b4d2019-05-29 17:46:47 -07001715#endif // !CONFIG_REALTIME_ONLY
David Turnerddbff442019-01-21 14:58:42 +00001716
Sai Dengaff27722020-08-31 12:06:09 -07001717#if CONFIG_TUNE_VMAF
1718 if (!is_stat_generation_stage(cpi) &&
1719 (oxcf->tune_cfg.tuning >= AOM_TUNE_VMAF_WITH_PREPROCESSING &&
1720 oxcf->tune_cfg.tuning <= AOM_TUNE_VMAF_NEG_MAX_GAIN)) {
Sai Deng282fad42020-09-17 15:24:29 -07001721 av1_update_vmaf_curve(cpi);
Sai Dengaff27722020-08-31 12:06:09 -07001722 }
1723#endif
David Turner056f7cd2019-01-07 17:48:13 +00001724
David Turner1539bb02019-01-24 15:28:13 +00001725 // Unpack frame_results:
David Turner056f7cd2019-01-07 17:48:13 +00001726 *size = frame_results.size;
1727
David Turner1539bb02019-01-24 15:28:13 +00001728 // Leave a signal for a higher level caller about if this frame is droppable
1729 if (*size > 0) {
Marco Paniconi7aaca882022-08-28 21:58:59 -07001730 cpi->droppable =
Marco Paniconi53218432022-10-10 13:00:27 -07001731 is_frame_droppable(&cpi->ppi->rtc_ref, &ext_flags->refresh_frame);
David Turner1539bb02019-01-24 15:28:13 +00001732 }
1733
Marco Paniconi6154a1e2023-06-14 14:42:41 -07001734 // For SVC, or when frame-dropper is enabled:
1735 // keep track of the (unscaled) source corresponding to the refresh of LAST
1736 // reference (base temporal layer - TL0). Copy only for the
Marco Paniconi50dfbac2023-01-31 12:48:14 -08001737 // top spatial enhancement layer so all spatial layers of the next
Marco Paniconi6dfcab22022-10-31 00:41:42 -07001738 // superframe have last_source to be aligned with previous TL0 superframe.
Marco Paniconi50dfbac2023-01-31 12:48:14 -08001739 // Avoid cases where resolution changes for unscaled source (top spatial
Marco Paniconi6154a1e2023-06-14 14:42:41 -07001740 // layer). Only needs to be done for frame that are encoded (size > 0).
1741 if (*size > 0 &&
1742 (cpi->ppi->use_svc || cpi->oxcf.rc_cfg.drop_frames_water_mark > 0) &&
Marco Paniconi6dfcab22022-10-31 00:41:42 -07001743 cpi->svc.spatial_layer_id == cpi->svc.number_spatial_layers - 1 &&
Marco Paniconi50dfbac2023-01-31 12:48:14 -08001744 cpi->svc.temporal_layer_id == 0 &&
1745 cpi->unscaled_source->y_width == cpi->svc.source_last_TL0.y_width &&
1746 cpi->unscaled_source->y_height == cpi->svc.source_last_TL0.y_height) {
Marco Paniconi6dfcab22022-10-31 00:41:42 -07001747 aom_yv12_copy_y(cpi->unscaled_source, &cpi->svc.source_last_TL0);
1748 aom_yv12_copy_u(cpi->unscaled_source, &cpi->svc.source_last_TL0);
1749 aom_yv12_copy_v(cpi->unscaled_source, &cpi->svc.source_last_TL0);
Marco Paniconib5b1be82022-10-22 23:37:42 -07001750 }
1751
David Turner056f7cd2019-01-07 17:48:13 +00001752 return AOM_CODEC_OK;
1753}