blob: 23f1e9d91bd0cc497289a2a918ff63aecd576717 [file] [log] [blame]
David Turner056f7cd2019-01-07 17:48:13 +00001/*
2 * Copyright (c) 2019, Alliance for Open Media. All rights reserved
3 *
4 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 */
11
12#include <stdint.h>
13
bohanli2541b8a2020-10-14 17:33:13 -070014#include "av1/common/blockd.h"
David Turner1539bb02019-01-24 15:28:13 +000015#include "config/aom_config.h"
David Turnerdedd8ff2019-01-23 13:59:46 +000016#include "config/aom_scale_rtcd.h"
David Turner1539bb02019-01-24 15:28:13 +000017
David Turner056f7cd2019-01-07 17:48:13 +000018#include "aom/aom_codec.h"
David Turnerdedd8ff2019-01-23 13:59:46 +000019#include "aom/aom_encoder.h"
20
21#include "aom_ports/system_state.h"
David Turner056f7cd2019-01-07 17:48:13 +000022
David Turner1539bb02019-01-24 15:28:13 +000023#if CONFIG_MISMATCH_DEBUG
24#include "aom_util/debug_util.h"
25#endif // CONFIG_MISMATCH_DEBUG
26
Wan-Teh Changf2d15ee2020-03-10 09:24:43 -070027#include "av1/common/av1_common_int.h"
Cheng Chen5083a9f2019-07-12 15:33:34 -070028#include "av1/common/reconinter.h"
David Turner07dbd8e2019-01-08 17:16:25 +000029
David Turner056f7cd2019-01-07 17:48:13 +000030#include "av1/encoder/encoder.h"
31#include "av1/encoder/encode_strategy.h"
Cheng Chen5083a9f2019-07-12 15:33:34 -070032#include "av1/encoder/encodeframe.h"
David Turner475a3132019-01-18 15:17:17 +000033#include "av1/encoder/firstpass.h"
David Turner0fa8c492019-02-06 16:38:13 +000034#include "av1/encoder/pass2_strategy.h"
David Turnerdedd8ff2019-01-23 13:59:46 +000035#include "av1/encoder/temporal_filter.h"
David Turner475a3132019-01-18 15:17:17 +000036#include "av1/encoder/tpl_model.h"
David Turner056f7cd2019-01-07 17:48:13 +000037
Sai Dengaff27722020-08-31 12:06:09 -070038#if CONFIG_TUNE_VMAF
39#include "av1/encoder/tune_vmaf.h"
40#endif
41
Cheng Chen7abe3132019-06-19 11:55:28 -070042#define TEMPORAL_FILTER_KEY_FRAME (CONFIG_REALTIME_ONLY ? 0 : 1)
43
Jayasanker J24cb9bc2020-04-15 13:43:10 +053044static INLINE void set_refresh_frame_flags(
45 RefreshFrameFlagsInfo *const refresh_frame_flags, bool refresh_gf,
46 bool refresh_bwdref, bool refresh_arf) {
47 refresh_frame_flags->golden_frame = refresh_gf;
48 refresh_frame_flags->bwd_ref_frame = refresh_bwdref;
49 refresh_frame_flags->alt_ref_frame = refresh_arf;
50}
51
52void av1_configure_buffer_updates(
53 AV1_COMP *const cpi, RefreshFrameFlagsInfo *const refresh_frame_flags,
Jingning Han880bb352021-06-18 11:57:26 -070054 const FRAME_UPDATE_TYPE type, const REFBUF_STATE refbuf_state,
bohanli99852502020-07-14 16:22:45 -070055 int force_refresh_all) {
David Turnerce9b5902019-01-23 17:25:47 +000056 // NOTE(weitinglin): Should we define another function to take care of
57 // cpi->rc.is_$Source_Type to make this function as it is in the comment?
Vishesh38c05d72020-04-14 12:19:14 +053058 const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags =
59 &cpi->ext_flags.refresh_frame;
David Turner4f1f1812019-01-24 17:00:24 +000060 cpi->rc.is_src_frame_alt_ref = 0;
David Turnerce9b5902019-01-23 17:25:47 +000061
62 switch (type) {
63 case KF_UPDATE:
Jayasanker J24cb9bc2020-04-15 13:43:10 +053064 set_refresh_frame_flags(refresh_frame_flags, true, true, true);
David Turnerce9b5902019-01-23 17:25:47 +000065 break;
66
67 case LF_UPDATE:
Jayasanker J24cb9bc2020-04-15 13:43:10 +053068 set_refresh_frame_flags(refresh_frame_flags, false, false, false);
David Turnerce9b5902019-01-23 17:25:47 +000069 break;
70
71 case GF_UPDATE:
Jayasanker J24cb9bc2020-04-15 13:43:10 +053072 set_refresh_frame_flags(refresh_frame_flags, true, false, false);
David Turnerce9b5902019-01-23 17:25:47 +000073 break;
74
75 case OVERLAY_UPDATE:
Jingning Han880bb352021-06-18 11:57:26 -070076 if (refbuf_state == REFBUF_RESET)
bohanli2541b8a2020-10-14 17:33:13 -070077 set_refresh_frame_flags(refresh_frame_flags, true, true, true);
Jingning Han880bb352021-06-18 11:57:26 -070078 else
bohanli2541b8a2020-10-14 17:33:13 -070079 set_refresh_frame_flags(refresh_frame_flags, true, false, false);
Jingning Han880bb352021-06-18 11:57:26 -070080
David Turnerce9b5902019-01-23 17:25:47 +000081 cpi->rc.is_src_frame_alt_ref = 1;
82 break;
83
84 case ARF_UPDATE:
David Turnerce9b5902019-01-23 17:25:47 +000085 // NOTE: BWDREF does not get updated along with ALTREF_FRAME.
Jingning Han880bb352021-06-18 11:57:26 -070086 if (refbuf_state == REFBUF_RESET)
bohanli99852502020-07-14 16:22:45 -070087 set_refresh_frame_flags(refresh_frame_flags, true, true, true);
Jingning Han880bb352021-06-18 11:57:26 -070088 else
bohanli99852502020-07-14 16:22:45 -070089 set_refresh_frame_flags(refresh_frame_flags, false, false, true);
Jingning Han880bb352021-06-18 11:57:26 -070090
David Turnerce9b5902019-01-23 17:25:47 +000091 break;
92
David Turnerce9b5902019-01-23 17:25:47 +000093 case INTNL_OVERLAY_UPDATE:
Jayasanker J24cb9bc2020-04-15 13:43:10 +053094 set_refresh_frame_flags(refresh_frame_flags, false, false, false);
David Turnerce9b5902019-01-23 17:25:47 +000095 cpi->rc.is_src_frame_alt_ref = 1;
David Turnerce9b5902019-01-23 17:25:47 +000096 break;
97
98 case INTNL_ARF_UPDATE:
Jayasanker J24cb9bc2020-04-15 13:43:10 +053099 set_refresh_frame_flags(refresh_frame_flags, false, true, false);
David Turnerce9b5902019-01-23 17:25:47 +0000100 break;
101
102 default: assert(0); break;
103 }
David Turner4f1f1812019-01-24 17:00:24 +0000104
Vishesh38c05d72020-04-14 12:19:14 +0530105 if (ext_refresh_frame_flags->update_pending &&
Jingning Hana862e202021-05-14 10:18:50 -0700106 (!is_stat_generation_stage(cpi))) {
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530107 set_refresh_frame_flags(refresh_frame_flags,
108 ext_refresh_frame_flags->golden_frame,
109 ext_refresh_frame_flags->bwd_ref_frame,
110 ext_refresh_frame_flags->alt_ref_frame);
Jingning Hana862e202021-05-14 10:18:50 -0700111 GF_GROUP *gf_group = &cpi->ppi->gf_group;
112 if (ext_refresh_frame_flags->golden_frame)
113 gf_group->update_type[cpi->gf_frame_index] = GF_UPDATE;
114 if (ext_refresh_frame_flags->alt_ref_frame)
115 gf_group->update_type[cpi->gf_frame_index] = ARF_UPDATE;
116 if (ext_refresh_frame_flags->bwd_ref_frame)
117 gf_group->update_type[cpi->gf_frame_index] = INTNL_ARF_UPDATE;
118 }
David Turner4f1f1812019-01-24 17:00:24 +0000119
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530120 if (force_refresh_all)
121 set_refresh_frame_flags(refresh_frame_flags, true, true, true);
David Turnerce9b5902019-01-23 17:25:47 +0000122}
123
David Turner1539bb02019-01-24 15:28:13 +0000124static void set_additional_frame_flags(const AV1_COMMON *const cm,
125 unsigned int *const frame_flags) {
Urvang Joshib6409e92020-03-23 11:23:27 -0700126 if (frame_is_intra_only(cm)) {
127 *frame_flags |= FRAMEFLAGS_INTRAONLY;
128 }
129 if (frame_is_sframe(cm)) {
130 *frame_flags |= FRAMEFLAGS_SWITCH;
131 }
132 if (cm->features.error_resilient_mode) {
133 *frame_flags |= FRAMEFLAGS_ERROR_RESILIENT;
134 }
David Turner1539bb02019-01-24 15:28:13 +0000135}
136
Vishesha195ca32020-04-07 18:46:20 +0530137static void set_ext_overrides(AV1_COMMON *const cm,
138 EncodeFrameParams *const frame_params,
139 ExternalFlags *const ext_flags) {
David Turner07dbd8e2019-01-08 17:16:25 +0000140 // Overrides the defaults with the externally supplied values with
141 // av1_update_reference() and av1_update_entropy() calls
142 // Note: The overrides are valid only for the next frame passed
143 // to av1_encode_lowlevel()
144
Vishesha195ca32020-04-07 18:46:20 +0530145 if (ext_flags->use_s_frame) {
David Turner475a3132019-01-18 15:17:17 +0000146 frame_params->frame_type = S_FRAME;
David Turner07dbd8e2019-01-08 17:16:25 +0000147 }
David Turner07dbd8e2019-01-08 17:16:25 +0000148
Vishesha195ca32020-04-07 18:46:20 +0530149 if (ext_flags->refresh_frame_context_pending) {
150 cm->features.refresh_frame_context = ext_flags->refresh_frame_context;
151 ext_flags->refresh_frame_context_pending = 0;
David Turner07dbd8e2019-01-08 17:16:25 +0000152 }
Vishesha195ca32020-04-07 18:46:20 +0530153 cm->features.allow_ref_frame_mvs = ext_flags->use_ref_frame_mvs;
David Turner07dbd8e2019-01-08 17:16:25 +0000154
Vishesha195ca32020-04-07 18:46:20 +0530155 frame_params->error_resilient_mode = ext_flags->use_error_resilient;
David Turner07dbd8e2019-01-08 17:16:25 +0000156 // A keyframe is already error resilient and keyframes with
157 // error_resilient_mode interferes with the use of show_existing_frame
158 // when forward reference keyframes are enabled.
David Turner475a3132019-01-18 15:17:17 +0000159 frame_params->error_resilient_mode &= frame_params->frame_type != KEY_FRAME;
David Turner07dbd8e2019-01-08 17:16:25 +0000160 // For bitstream conformance, s-frames must be error-resilient
David Turner475a3132019-01-18 15:17:17 +0000161 frame_params->error_resilient_mode |= frame_params->frame_type == S_FRAME;
David Turner07dbd8e2019-01-08 17:16:25 +0000162}
163
David Turnera7f133c2019-01-22 14:47:16 +0000164static int choose_primary_ref_frame(
165 const AV1_COMP *const cpi, const EncodeFrameParams *const frame_params) {
166 const AV1_COMMON *const cm = &cpi->common;
167
168 const int intra_only = frame_params->frame_type == KEY_FRAME ||
169 frame_params->frame_type == INTRA_ONLY_FRAME;
Marco Paniconiee968342020-06-08 11:21:48 -0700170 if (intra_only || frame_params->error_resilient_mode ||
Vishesha195ca32020-04-07 18:46:20 +0530171 cpi->ext_flags.use_primary_ref_none) {
David Turnera7f133c2019-01-22 14:47:16 +0000172 return PRIMARY_REF_NONE;
173 }
174
Yunqing Wangd0f0d3b2019-12-23 12:15:44 -0800175 // In large scale case, always use Last frame's frame contexts.
176 // Note(yunqing): In other cases, primary_ref_frame is chosen based on
Mufaddal Chakera8ee04fa2021-03-17 13:33:18 +0530177 // cpi->ppi->gf_group.layer_depth[cpi->gf_frame_index], which also controls
Yunqing Wangd0f0d3b2019-12-23 12:15:44 -0800178 // frame bit allocation.
Urvang Joshi54ffae72020-03-23 13:37:10 -0700179 if (cm->tiles.large_scale) return (LAST_FRAME - LAST_FRAME);
Yunqing Wangd0f0d3b2019-12-23 12:15:44 -0800180
Tarundeep Singh15eb4de2021-04-21 15:53:10 +0530181 if (cpi->ppi->use_svc) return av1_svc_primary_ref_frame(cpi);
Marco Paniconi42e1bdd2020-06-10 16:47:39 -0700182
David Turnera7f133c2019-01-22 14:47:16 +0000183 // Find the most recent reference frame with the same reference type as the
184 // current frame
Tarundeep Singh0734c662021-06-04 14:52:52 +0530185 const int current_ref_type = get_current_frame_ref_type(cpi);
Tarundeep Singhfd11f822021-05-27 13:09:06 +0530186 int wanted_fb = cpi->ppi->fb_of_context_type[current_ref_type];
David Turnera7f133c2019-01-22 14:47:16 +0000187
188 int primary_ref_frame = PRIMARY_REF_NONE;
189 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
190 if (get_ref_frame_map_idx(cm, ref_frame) == wanted_fb) {
191 primary_ref_frame = ref_frame - LAST_FRAME;
192 }
193 }
Jingning Han370d1162019-07-03 10:24:03 -0700194
David Turnera7f133c2019-01-22 14:47:16 +0000195 return primary_ref_frame;
196}
197
Deepa K Gfb89ce02020-04-06 13:34:42 +0530198static void adjust_frame_rate(AV1_COMP *cpi, int64_t ts_start, int64_t ts_end) {
199 TimeStamps *time_stamps = &cpi->time_stamps;
David Turnerdedd8ff2019-01-23 13:59:46 +0000200 int64_t this_duration;
201 int step = 0;
202
203 // Clear down mmx registers
204 aom_clear_system_state();
205
Tarundeep Singh15eb4de2021-04-21 15:53:10 +0530206 if (cpi->ppi->use_svc && cpi->svc.spatial_layer_id > 0) {
Marco Paniconi63971322019-08-15 21:32:05 -0700207 cpi->framerate = cpi->svc.base_framerate;
208 av1_rc_update_framerate(cpi, cpi->common.width, cpi->common.height);
209 return;
210 }
211
Yunqing Wang15ab03c2020-11-24 16:45:25 -0800212 if (ts_start == time_stamps->first_ts_start) {
Deepa K Gfb89ce02020-04-06 13:34:42 +0530213 this_duration = ts_end - ts_start;
David Turnerdedd8ff2019-01-23 13:59:46 +0000214 step = 1;
215 } else {
216 int64_t last_duration =
Yunqing Wang15ab03c2020-11-24 16:45:25 -0800217 time_stamps->prev_ts_end - time_stamps->prev_ts_start;
David Turnerdedd8ff2019-01-23 13:59:46 +0000218
Yunqing Wang15ab03c2020-11-24 16:45:25 -0800219 this_duration = ts_end - time_stamps->prev_ts_end;
David Turnerdedd8ff2019-01-23 13:59:46 +0000220
221 // do a step update if the duration changes by 10%
222 if (last_duration)
223 step = (int)((this_duration - last_duration) * 10 / last_duration);
224 }
225
226 if (this_duration) {
227 if (step) {
228 av1_new_framerate(cpi, 10000000.0 / this_duration);
229 } else {
230 // Average this frame's rate into the last second's average
231 // frame rate. If we haven't seen 1 second yet, then average
232 // over the whole interval seen.
Deepa K Gfb89ce02020-04-06 13:34:42 +0530233 const double interval =
Yunqing Wang15ab03c2020-11-24 16:45:25 -0800234 AOMMIN((double)(ts_end - time_stamps->first_ts_start), 10000000.0);
David Turnerdedd8ff2019-01-23 13:59:46 +0000235 double avg_duration = 10000000.0 / cpi->framerate;
236 avg_duration *= (interval - avg_duration + this_duration);
237 avg_duration /= interval;
238
239 av1_new_framerate(cpi, 10000000.0 / avg_duration);
240 }
241 }
Yunqing Wang15ab03c2020-11-24 16:45:25 -0800242 time_stamps->prev_ts_start = ts_start;
243 time_stamps->prev_ts_end = ts_end;
David Turnerdedd8ff2019-01-23 13:59:46 +0000244}
245
David Turnerdedd8ff2019-01-23 13:59:46 +0000246// Determine whether there is a forced keyframe pending in the lookahead buffer
Aasaipriya9bc1dcb2020-03-13 17:46:07 +0530247int is_forced_keyframe_pending(struct lookahead_ctx *lookahead,
248 const int up_to_index,
249 const COMPRESSOR_STAGE compressor_stage) {
David Turnerdedd8ff2019-01-23 13:59:46 +0000250 for (int i = 0; i <= up_to_index; i++) {
Mufaddal Chakera73b7b702019-12-09 11:44:55 +0530251 const struct lookahead_entry *e =
Mufaddal Chakeraac828682019-12-13 16:31:42 +0530252 av1_lookahead_peek(lookahead, i, compressor_stage);
David Turnerdedd8ff2019-01-23 13:59:46 +0000253 if (e == NULL) {
254 // We have reached the end of the lookahead buffer and not early-returned
255 // so there isn't a forced key-frame pending.
Aasaipriya9bc1dcb2020-03-13 17:46:07 +0530256 return -1;
David Turnerdedd8ff2019-01-23 13:59:46 +0000257 } else if (e->flags == AOM_EFLAG_FORCE_KF) {
Aasaipriya592cb002020-12-14 19:24:24 +0530258 return i;
David Turnerdedd8ff2019-01-23 13:59:46 +0000259 } else {
260 continue;
261 }
262 }
Aasaipriya9bc1dcb2020-03-13 17:46:07 +0530263 return -1; // Never reached
David Turnerdedd8ff2019-01-23 13:59:46 +0000264}
265
Urvang Joshif70375a2019-03-22 23:30:19 -0700266// Check if we should encode an ARF or internal ARF. If not, try a LAST
David Turnerdedd8ff2019-01-23 13:59:46 +0000267// Do some setup associated with the chosen source
David Turner4f1f1812019-01-24 17:00:24 +0000268// temporal_filtered, flush, and frame_update_type are outputs.
David Turnerdedd8ff2019-01-23 13:59:46 +0000269// Return the frame source, or NULL if we couldn't find one
Yaowu Xufac3d862019-04-26 15:43:03 -0700270static struct lookahead_entry *choose_frame_source(
Angie Chiang470d1162020-12-31 13:10:55 -0800271 AV1_COMP *const cpi, int *const flush, int *pop_lookahead,
272 struct lookahead_entry **last_source,
bohanli0db9c512020-06-12 17:43:06 -0700273 EncodeFrameParams *const frame_params) {
David Turnerdedd8ff2019-01-23 13:59:46 +0000274 AV1_COMMON *const cm = &cpi->common;
Mufaddal Chakera8ee04fa2021-03-17 13:33:18 +0530275 const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
David Turnerdedd8ff2019-01-23 13:59:46 +0000276 struct lookahead_entry *source = NULL;
David Turnerdedd8ff2019-01-23 13:59:46 +0000277
bohanli0db9c512020-06-12 17:43:06 -0700278 // Source index in lookahead buffer.
Mufaddal Chakeraab20d372021-03-17 12:18:34 +0530279 int src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
bohanli0db9c512020-06-12 17:43:06 -0700280
Aasaipriya9bc1dcb2020-03-13 17:46:07 +0530281 // TODO(Aasaipriya): Forced key frames need to be fixed when rc_mode != AOM_Q
bohanli0db9c512020-06-12 17:43:06 -0700282 if (src_index &&
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +0530283 (is_forced_keyframe_pending(cpi->ppi->lookahead, src_index,
Aasaipriya9bc1dcb2020-03-13 17:46:07 +0530284 cpi->compressor_stage) != -1) &&
Mufaddal Chakera186f8e32021-03-17 13:20:00 +0530285 cpi->oxcf.rc_cfg.mode != AOM_Q && !is_stat_generation_stage(cpi)) {
bohanli0db9c512020-06-12 17:43:06 -0700286 src_index = 0;
David Turnerdedd8ff2019-01-23 13:59:46 +0000287 *flush = 1;
288 }
289
bohanli0db9c512020-06-12 17:43:06 -0700290 // If the current frame is arf, then we should not pop from the lookahead
291 // buffer. If the current frame is not arf, then pop it. This assumes the
292 // first frame in the GF group is not arf. May need to change if it is not
293 // true.
Angie Chiang470d1162020-12-31 13:10:55 -0800294 *pop_lookahead = (src_index == 0);
bohanli1478d862020-06-17 20:53:44 -0700295 // If this is a key frame and keyframe filtering is enabled with overlay,
296 // then do not pop.
Angie Chiang470d1162020-12-31 13:10:55 -0800297 if (*pop_lookahead && cpi->oxcf.kf_cfg.enable_keyframe_filtering > 1 &&
Mufaddal Chakeraab20d372021-03-17 12:18:34 +0530298 gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE &&
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +0530299 !is_stat_generation_stage(cpi) && cpi->ppi->lookahead) {
300 if (cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz &&
bohanli1478d862020-06-17 20:53:44 -0700301 (*flush ||
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +0530302 cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz ==
303 cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].pop_sz)) {
Angie Chiang470d1162020-12-31 13:10:55 -0800304 *pop_lookahead = 0;
bohanli1478d862020-06-17 20:53:44 -0700305 }
306 }
Mufaddal Chakera186f8e32021-03-17 13:20:00 +0530307
308 // LAP stage does not have ARFs or forward key-frames,
309 // hence, always pop_lookahead here.
310 if (is_stat_generation_stage(cpi)) {
311 *pop_lookahead = 1;
Mufaddal Chakera97f92712021-05-21 15:10:33 +0530312 src_index = 0;
Mufaddal Chakera186f8e32021-03-17 13:20:00 +0530313 }
314
Angie Chiang470d1162020-12-31 13:10:55 -0800315 frame_params->show_frame = *pop_lookahead;
Mufaddal Chakera97f92712021-05-21 15:10:33 +0530316
317#if CONFIG_FRAME_PARALLEL_ENCODE
318 // Future frame in parallel encode set
319 if (gf_group->src_offset[cpi->gf_frame_index] != 0 &&
320 !is_stat_generation_stage(cpi) &&
321 0 /*will be turned on along with frame parallel encode*/) {
322 src_index = gf_group->src_offset[cpi->gf_frame_index];
323 // Don't remove future frames from lookahead_ctx. They will be
324 // removed in their actual encode call.
325 *pop_lookahead = 0;
326 }
327#endif
328 if (frame_params->show_frame) {
bohanli0db9c512020-06-12 17:43:06 -0700329 // show frame, pop from buffer
David Turnerdedd8ff2019-01-23 13:59:46 +0000330 // Get last frame source.
331 if (cm->current_frame.frame_number > 0) {
Mufaddal Chakera97f92712021-05-21 15:10:33 +0530332 *last_source = av1_lookahead_peek(cpi->ppi->lookahead, src_index - 1,
333 cpi->compressor_stage);
David Turnerdedd8ff2019-01-23 13:59:46 +0000334 }
335 // Read in the source frame.
Mufaddal Chakera97f92712021-05-21 15:10:33 +0530336 source = av1_lookahead_peek(cpi->ppi->lookahead, src_index,
337 cpi->compressor_stage);
bohanli0db9c512020-06-12 17:43:06 -0700338 } else {
339 // no show frames are arf frames
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +0530340 source = av1_lookahead_peek(cpi->ppi->lookahead, src_index,
341 cpi->compressor_stage);
bohanli0db9c512020-06-12 17:43:06 -0700342 // When src_index == rc->frames_to_key, it indicates a fwd_kf
bohanli6f22c7a2020-07-14 10:52:50 -0700343 if (src_index == cpi->rc.frames_to_key && src_index != 0) {
344 cpi->no_show_fwd_kf = 1;
bohanli0db9c512020-06-12 17:43:06 -0700345 }
346 if (source != NULL) {
347 cm->showable_frame = 1;
348 }
David Turnerdedd8ff2019-01-23 13:59:46 +0000349 }
350 return source;
351}
352
David Turnerb0c0aa32019-01-28 16:17:13 +0000353// Don't allow a show_existing_frame to coincide with an error resilient or
354// S-Frame. An exception can be made in the case of a keyframe, since it does
355// not depend on any previous frames.
David Turner73245762019-02-11 16:42:34 +0000356static int allow_show_existing(const AV1_COMP *const cpi,
357 unsigned int frame_flags) {
David Turnerb0c0aa32019-01-28 16:17:13 +0000358 if (cpi->common.current_frame.frame_number == 0) return 0;
359
360 const struct lookahead_entry *lookahead_src =
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +0530361 av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage);
David Turnerb0c0aa32019-01-28 16:17:13 +0000362 if (lookahead_src == NULL) return 1;
363
364 const int is_error_resilient =
Visheshaa6a1702020-06-30 19:27:22 +0530365 cpi->oxcf.tool_cfg.error_resilient_mode ||
David Turnerb0c0aa32019-01-28 16:17:13 +0000366 (lookahead_src->flags & AOM_EFLAG_ERROR_RESILIENT);
Vishesh7e9873d2020-06-08 15:41:33 +0530367 const int is_s_frame = cpi->oxcf.kf_cfg.enable_sframe ||
368 (lookahead_src->flags & AOM_EFLAG_SET_S_FRAME);
David Turnerb0c0aa32019-01-28 16:17:13 +0000369 const int is_key_frame =
David Turner73245762019-02-11 16:42:34 +0000370 (cpi->rc.frames_to_key == 0) || (frame_flags & FRAMEFLAGS_KEY);
David Turnerb0c0aa32019-01-28 16:17:13 +0000371 return !(is_error_resilient || is_s_frame) || is_key_frame;
372}
373
David Turner73245762019-02-11 16:42:34 +0000374// Update frame_flags to tell the encoder's caller what sort of frame was
375// encoded.
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530376static void update_frame_flags(
377 const AV1_COMMON *const cm,
378 const RefreshFrameFlagsInfo *const refresh_frame_flags,
379 unsigned int *frame_flags) {
380 if (encode_show_existing_frame(cm)) {
David Turner73245762019-02-11 16:42:34 +0000381 *frame_flags &= ~FRAMEFLAGS_GOLDEN;
382 *frame_flags &= ~FRAMEFLAGS_BWDREF;
383 *frame_flags &= ~FRAMEFLAGS_ALTREF;
384 *frame_flags &= ~FRAMEFLAGS_KEY;
385 return;
386 }
387
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530388 if (refresh_frame_flags->golden_frame) {
David Turner73245762019-02-11 16:42:34 +0000389 *frame_flags |= FRAMEFLAGS_GOLDEN;
390 } else {
391 *frame_flags &= ~FRAMEFLAGS_GOLDEN;
392 }
393
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530394 if (refresh_frame_flags->alt_ref_frame) {
David Turner73245762019-02-11 16:42:34 +0000395 *frame_flags |= FRAMEFLAGS_ALTREF;
396 } else {
397 *frame_flags &= ~FRAMEFLAGS_ALTREF;
398 }
399
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530400 if (refresh_frame_flags->bwd_ref_frame) {
David Turner73245762019-02-11 16:42:34 +0000401 *frame_flags |= FRAMEFLAGS_BWDREF;
402 } else {
403 *frame_flags &= ~FRAMEFLAGS_BWDREF;
404 }
405
Jayasanker J24cb9bc2020-04-15 13:43:10 +0530406 if (cm->current_frame.frame_type == KEY_FRAME) {
David Turner73245762019-02-11 16:42:34 +0000407 *frame_flags |= FRAMEFLAGS_KEY;
408 } else {
409 *frame_flags &= ~FRAMEFLAGS_KEY;
410 }
411}
412
413#define DUMP_REF_FRAME_IMAGES 0
414
415#if DUMP_REF_FRAME_IMAGES == 1
416static int dump_one_image(AV1_COMMON *cm,
417 const YV12_BUFFER_CONFIG *const ref_buf,
418 char *file_name) {
419 int h;
420 FILE *f_ref = NULL;
421
422 if (ref_buf == NULL) {
423 printf("Frame data buffer is NULL.\n");
424 return AOM_CODEC_MEM_ERROR;
425 }
426
427 if ((f_ref = fopen(file_name, "wb")) == NULL) {
428 printf("Unable to open file %s to write.\n", file_name);
429 return AOM_CODEC_MEM_ERROR;
430 }
431
432 // --- Y ---
433 for (h = 0; h < cm->height; ++h) {
434 fwrite(&ref_buf->y_buffer[h * ref_buf->y_stride], 1, cm->width, f_ref);
435 }
436 // --- U ---
437 for (h = 0; h < (cm->height >> 1); ++h) {
438 fwrite(&ref_buf->u_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
439 f_ref);
440 }
441 // --- V ---
442 for (h = 0; h < (cm->height >> 1); ++h) {
443 fwrite(&ref_buf->v_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
444 f_ref);
445 }
446
447 fclose(f_ref);
448
449 return AOM_CODEC_OK;
450}
451
452static void dump_ref_frame_images(AV1_COMP *cpi) {
453 AV1_COMMON *const cm = &cpi->common;
454 MV_REFERENCE_FRAME ref_frame;
455
456 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
457 char file_name[256] = "";
458 snprintf(file_name, sizeof(file_name), "/tmp/enc_F%d_ref_%d.yuv",
459 cm->current_frame.frame_number, ref_frame);
460 dump_one_image(cm, get_ref_frame_yv12_buf(cpi, ref_frame), file_name);
461 }
462}
463#endif // DUMP_REF_FRAME_IMAGES == 1
464
Jingning Han81d6fbb2019-07-15 10:14:13 -0700465int av1_get_refresh_ref_frame_map(int refresh_frame_flags) {
Jingning Han8a6a1b82020-08-11 23:46:15 -0700466 int ref_map_index;
Jingning Hanf58175c2019-07-07 15:02:00 -0700467
468 for (ref_map_index = 0; ref_map_index < REF_FRAMES; ++ref_map_index)
469 if ((refresh_frame_flags >> ref_map_index) & 1) break;
470
Jingning Han8a6a1b82020-08-11 23:46:15 -0700471 if (ref_map_index == REF_FRAMES) ref_map_index = INVALID_IDX;
Jingning Hanf58175c2019-07-07 15:02:00 -0700472 return ref_map_index;
473}
474
Jingning Han4711eb12019-08-01 09:56:59 -0700475static void update_arf_stack(int ref_map_index,
Jingning Han42266ca2019-07-12 14:37:16 -0700476 RefBufferStack *ref_buffer_stack) {
Jingning Han42266ca2019-07-12 14:37:16 -0700477 if (ref_buffer_stack->arf_stack_size >= 0) {
478 if (ref_buffer_stack->arf_stack[0] == ref_map_index)
479 stack_pop(ref_buffer_stack->arf_stack, &ref_buffer_stack->arf_stack_size);
Jingning Hanf58175c2019-07-07 15:02:00 -0700480 }
481
Jingning Han42266ca2019-07-12 14:37:16 -0700482 if (ref_buffer_stack->lst_stack_size) {
483 for (int i = ref_buffer_stack->lst_stack_size - 1; i >= 0; --i) {
484 if (ref_buffer_stack->lst_stack[i] == ref_map_index) {
485 for (int idx = i; idx < ref_buffer_stack->lst_stack_size - 1; ++idx)
486 ref_buffer_stack->lst_stack[idx] =
487 ref_buffer_stack->lst_stack[idx + 1];
Cheng Chenc86d2bf2019-09-05 15:07:47 -0700488 ref_buffer_stack->lst_stack[ref_buffer_stack->lst_stack_size - 1] =
489 INVALID_IDX;
Jingning Han42266ca2019-07-12 14:37:16 -0700490 --ref_buffer_stack->lst_stack_size;
Jingning Hanf58175c2019-07-07 15:02:00 -0700491 }
492 }
493 }
494
Jingning Han42266ca2019-07-12 14:37:16 -0700495 if (ref_buffer_stack->gld_stack_size) {
496 for (int i = ref_buffer_stack->gld_stack_size - 1; i >= 0; --i) {
497 if (ref_buffer_stack->gld_stack[i] == ref_map_index) {
498 for (int idx = i; idx < ref_buffer_stack->gld_stack_size - 1; ++idx)
499 ref_buffer_stack->gld_stack[idx] =
500 ref_buffer_stack->gld_stack[idx + 1];
Cheng Chenc86d2bf2019-09-05 15:07:47 -0700501 ref_buffer_stack->gld_stack[ref_buffer_stack->gld_stack_size - 1] =
502 INVALID_IDX;
Jingning Han42266ca2019-07-12 14:37:16 -0700503 --ref_buffer_stack->gld_stack_size;
Jingning Hanf58175c2019-07-07 15:02:00 -0700504 }
505 }
506 }
507}
508
Jingning Han647f2d12019-07-23 14:15:42 -0700509// Update reference frame stack info.
Jingning Han42266ca2019-07-12 14:37:16 -0700510void av1_update_ref_frame_map(AV1_COMP *cpi,
511 FRAME_UPDATE_TYPE frame_update_type,
Jingning Han17701362021-06-19 00:10:42 -0700512 REFBUF_STATE refbuf_state, int ref_map_index,
Jingning Han42266ca2019-07-12 14:37:16 -0700513 RefBufferStack *ref_buffer_stack) {
David Turner73245762019-02-11 16:42:34 +0000514 AV1_COMMON *const cm = &cpi->common;
Jingning Han17701362021-06-19 00:10:42 -0700515
Jingning Han47aaf872019-07-21 14:56:32 -0700516 // TODO(jingning): Consider the S-frame same as key frame for the
517 // reference frame tracking purpose. The logic might be better
518 // expressed than converting the frame update type.
Jingning Hanb55db1a2021-06-18 13:12:50 -0700519 if (frame_is_sframe(cm)) frame_update_type = KF_UPDATE;
Vishesh38c05d72020-04-14 12:19:14 +0530520 if (is_frame_droppable(&cpi->svc, &cpi->ext_flags.refresh_frame)) return;
Jingning Hanbcbbd8c2019-07-21 16:37:12 -0700521
Jingning Han81d6fbb2019-07-15 10:14:13 -0700522 switch (frame_update_type) {
Jingning Hanb55db1a2021-06-18 13:12:50 -0700523 case KF_UPDATE:
Jingning Han81d6fbb2019-07-15 10:14:13 -0700524 stack_reset(ref_buffer_stack->lst_stack,
525 &ref_buffer_stack->lst_stack_size);
526 stack_reset(ref_buffer_stack->gld_stack,
527 &ref_buffer_stack->gld_stack_size);
528 stack_reset(ref_buffer_stack->arf_stack,
529 &ref_buffer_stack->arf_stack_size);
530 stack_push(ref_buffer_stack->gld_stack, &ref_buffer_stack->gld_stack_size,
531 ref_map_index);
532 break;
533 case GF_UPDATE:
Jingning Han4711eb12019-08-01 09:56:59 -0700534 update_arf_stack(ref_map_index, ref_buffer_stack);
Jingning Han81d6fbb2019-07-15 10:14:13 -0700535 stack_push(ref_buffer_stack->gld_stack, &ref_buffer_stack->gld_stack_size,
536 ref_map_index);
Fyodor Kyslov915b9b82019-11-11 11:53:03 -0800537 // For nonrd_mode: update LAST as well on GF_UPDATE frame.
Jingning Han17701362021-06-19 00:10:42 -0700538 // TODO(jingning, marpan): Why replacing both reference frames with the
539 // same decoded frame?
chiyotsai56681b52019-12-16 11:15:25 -0800540 if (cpi->sf.rt_sf.use_nonrd_pick_mode)
Marco Paniconifa29e162019-08-05 18:08:04 -0700541 stack_push(ref_buffer_stack->lst_stack,
542 &ref_buffer_stack->lst_stack_size, ref_map_index);
Jingning Han81d6fbb2019-07-15 10:14:13 -0700543 break;
544 case LF_UPDATE:
Jingning Han4711eb12019-08-01 09:56:59 -0700545 update_arf_stack(ref_map_index, ref_buffer_stack);
Jingning Han81d6fbb2019-07-15 10:14:13 -0700546 stack_push(ref_buffer_stack->lst_stack, &ref_buffer_stack->lst_stack_size,
547 ref_map_index);
548 break;
549 case ARF_UPDATE:
550 case INTNL_ARF_UPDATE:
Jingning Han17701362021-06-19 00:10:42 -0700551 if (refbuf_state == REFBUF_RESET) {
bohanli99852502020-07-14 16:22:45 -0700552 stack_reset(ref_buffer_stack->lst_stack,
553 &ref_buffer_stack->lst_stack_size);
554 stack_reset(ref_buffer_stack->gld_stack,
555 &ref_buffer_stack->gld_stack_size);
556 stack_reset(ref_buffer_stack->arf_stack,
557 &ref_buffer_stack->arf_stack_size);
558 } else {
559 update_arf_stack(ref_map_index, ref_buffer_stack);
560 }
Jingning Han81d6fbb2019-07-15 10:14:13 -0700561 stack_push(ref_buffer_stack->arf_stack, &ref_buffer_stack->arf_stack_size,
562 ref_map_index);
563 break;
564 case OVERLAY_UPDATE:
Jingning Han17701362021-06-19 00:10:42 -0700565 if (refbuf_state == REFBUF_RESET) {
bohanli2541b8a2020-10-14 17:33:13 -0700566 ref_map_index = stack_pop(ref_buffer_stack->arf_stack,
567 &ref_buffer_stack->arf_stack_size);
568 stack_reset(ref_buffer_stack->lst_stack,
569 &ref_buffer_stack->lst_stack_size);
570 stack_reset(ref_buffer_stack->gld_stack,
571 &ref_buffer_stack->gld_stack_size);
572 stack_reset(ref_buffer_stack->arf_stack,
573 &ref_buffer_stack->arf_stack_size);
574 stack_push(ref_buffer_stack->gld_stack,
575 &ref_buffer_stack->gld_stack_size, ref_map_index);
576 } else {
577 if (ref_map_index != INVALID_IDX) {
578 update_arf_stack(ref_map_index, ref_buffer_stack);
579 stack_push(ref_buffer_stack->lst_stack,
580 &ref_buffer_stack->lst_stack_size, ref_map_index);
581 }
582 ref_map_index = stack_pop(ref_buffer_stack->arf_stack,
583 &ref_buffer_stack->arf_stack_size);
584 stack_push(ref_buffer_stack->gld_stack,
585 &ref_buffer_stack->gld_stack_size, ref_map_index);
Jingning Han1a7fbd72020-09-25 13:30:06 -0700586 }
Jingning Han81d6fbb2019-07-15 10:14:13 -0700587 break;
588 case INTNL_OVERLAY_UPDATE:
589 ref_map_index = stack_pop(ref_buffer_stack->arf_stack,
590 &ref_buffer_stack->arf_stack_size);
591 stack_push(ref_buffer_stack->lst_stack, &ref_buffer_stack->lst_stack_size,
592 ref_map_index);
593 break;
594 default: assert(0 && "unknown type");
Jingning Hancf6d3252019-07-03 14:26:45 -0700595 }
Jingning Han84a70502019-07-19 11:38:14 -0700596 return;
David Turner73245762019-02-11 16:42:34 +0000597}
598
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530599static int get_free_ref_map_index(
600#if CONFIG_FRAME_PARALLEL_ENCODE
601 RefFrameMapPair ref_map_pairs[REF_FRAMES],
602#endif // CONFIG_FRAME_PARALLEL_ENCODE
603 const RefBufferStack *ref_buffer_stack) {
604#if CONFIG_FRAME_PARALLEL_ENCODE
605 (void)ref_buffer_stack;
606 for (int idx = 0; idx < REF_FRAMES; ++idx)
607 if (ref_map_pairs[idx].disp_order == -1) return idx;
608 return INVALID_IDX;
609#else
Jingning Han0a2af4e2019-07-08 19:30:03 -0700610 for (int idx = 0; idx < REF_FRAMES; ++idx) {
611 int is_free = 1;
Jingning Han42266ca2019-07-12 14:37:16 -0700612 for (int i = 0; i < ref_buffer_stack->arf_stack_size; ++i) {
613 if (ref_buffer_stack->arf_stack[i] == idx) {
Jingning Han0a2af4e2019-07-08 19:30:03 -0700614 is_free = 0;
615 break;
616 }
617 }
618
Jingning Han42266ca2019-07-12 14:37:16 -0700619 for (int i = 0; i < ref_buffer_stack->lst_stack_size; ++i) {
620 if (ref_buffer_stack->lst_stack[i] == idx) {
Jingning Han0a2af4e2019-07-08 19:30:03 -0700621 is_free = 0;
622 break;
623 }
624 }
625
Jingning Han42266ca2019-07-12 14:37:16 -0700626 for (int i = 0; i < ref_buffer_stack->gld_stack_size; ++i) {
627 if (ref_buffer_stack->gld_stack[i] == idx) {
Jingning Han0a2af4e2019-07-08 19:30:03 -0700628 is_free = 0;
629 break;
630 }
631 }
632
633 if (is_free) return idx;
634 }
635 return INVALID_IDX;
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530636#endif // CONFIG_FRAME_PARALLEL_ENCODE
Jingning Han0a2af4e2019-07-08 19:30:03 -0700637}
638
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530639#if CONFIG_FRAME_PARALLEL_ENCODE
640static int get_refresh_idx(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
Remya Prakasana05ccde2021-06-07 21:24:11 +0530641 int update_arf,
642#if CONFIG_FRAME_PARALLEL_ENCODE_2
643 GF_GROUP *gf_group, int gf_index,
644 int enable_refresh_skip,
645#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
646 int cur_frame_disp) {
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530647 int arf_count = 0;
648 int oldest_arf_order = INT32_MAX;
649 int oldest_arf_idx = -1;
650
651 int oldest_frame_order = INT32_MAX;
652 int oldest_idx = -1;
653
654 for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
655 RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx];
656 if (ref_pair.disp_order == -1) continue;
657 const int frame_order = ref_pair.disp_order;
658 const int reference_frame_level = ref_pair.pyr_level;
659 // Do not refresh a future frame.
660 if (frame_order > cur_frame_disp) continue;
661
Remya Prakasana05ccde2021-06-07 21:24:11 +0530662#if CONFIG_FRAME_PARALLEL_ENCODE_2
663 if (enable_refresh_skip) {
664 int skip_frame = 0;
665 // Prevent refreshing a frame in gf_group->skip_frame_refresh.
666 for (int i = 0; i < REF_FRAMES; i++) {
667 int frame_to_skip = gf_group->skip_frame_refresh[gf_index][i];
668 if (frame_to_skip == INVALID_IDX) break;
669 if (frame_order == frame_to_skip) {
670 skip_frame = 1;
671 break;
672 }
673 }
674 if (skip_frame) continue;
675 }
676#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
677
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530678 // Keep track of the oldest level 1 frame if the current frame is also level
679 // 1.
680 if (reference_frame_level == 1) {
681 // If there are more than 2 level 1 frames in the reference list,
682 // discard the oldest.
683 if (frame_order < oldest_arf_order) {
684 oldest_arf_order = frame_order;
685 oldest_arf_idx = map_idx;
686 }
687 arf_count++;
688 continue;
689 }
690
691 // Update the overall oldest reference frame.
692 if (frame_order < oldest_frame_order) {
693 oldest_frame_order = frame_order;
694 oldest_idx = map_idx;
695 }
696 }
697 if (update_arf && arf_count > 2) return oldest_arf_idx;
698 if (oldest_idx >= 0) return oldest_idx;
699 if (oldest_arf_idx >= 0) return oldest_arf_idx;
Remya Prakasana05ccde2021-06-07 21:24:11 +0530700#if CONFIG_FRAME_PARALLEL_ENCODE_2
701 if (oldest_idx == -1) {
702 assert(arf_count > 2 && enable_refresh_skip);
703 return oldest_arf_idx;
704 }
705#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530706 assert(0 && "No valid refresh index found");
707 return -1;
708}
709#endif // CONFIG_FRAME_PARALLEL_ENCODE
710
Jingning Han42266ca2019-07-12 14:37:16 -0700711int av1_get_refresh_frame_flags(const AV1_COMP *const cpi,
712 const EncodeFrameParams *const frame_params,
713 FRAME_UPDATE_TYPE frame_update_type,
Jingning Han11eaaa92021-06-18 01:13:48 -0700714 int gf_index,
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530715#if CONFIG_FRAME_PARALLEL_ENCODE
716 int cur_disp_order,
717 RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
718#endif // CONFIG_FRAME_PARALLEL_ENCODE
Jingning Han42266ca2019-07-12 14:37:16 -0700719 const RefBufferStack *const ref_buffer_stack) {
David Turner6e8b4d92019-02-18 15:01:33 +0000720 const AV1_COMMON *const cm = &cpi->common;
Vishesh38c05d72020-04-14 12:19:14 +0530721 const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags =
722 &cpi->ext_flags.refresh_frame;
723
Jingning Han11eaaa92021-06-18 01:13:48 -0700724 GF_GROUP *gf_group = &cpi->ppi->gf_group;
725 if (gf_group->refbuf_state[gf_index] == REFBUF_RESET)
726 return SELECT_ALL_BUF_SLOTS;
727
728 // TODO(jingning): Deprecate the following operations.
David Turner6e8b4d92019-02-18 15:01:33 +0000729 // Switch frames and shown key-frames overwrite all reference slots
Jingning Hanaa542f42021-06-19 14:45:37 -0700730 if (frame_params->frame_type == S_FRAME) return SELECT_ALL_BUF_SLOTS;
David Turner6e8b4d92019-02-18 15:01:33 +0000731
732 // show_existing_frames don't actually send refresh_frame_flags so set the
733 // flags to 0 to keep things consistent.
David Turnere86ee0d2019-02-18 17:16:28 +0000734 if (frame_params->show_existing_frame &&
735 (!frame_params->error_resilient_mode ||
736 frame_params->frame_type == KEY_FRAME)) {
David Turner6e8b4d92019-02-18 15:01:33 +0000737 return 0;
738 }
739
Jingning Han11eaaa92021-06-18 01:13:48 -0700740 const SVC *const svc = &cpi->svc;
Vishesh38c05d72020-04-14 12:19:14 +0530741 if (is_frame_droppable(svc, ext_refresh_frame_flags)) return 0;
Jingning Hanbcbbd8c2019-07-21 16:37:12 -0700742
David Turner6e8b4d92019-02-18 15:01:33 +0000743 int refresh_mask = 0;
744
Vishesh38c05d72020-04-14 12:19:14 +0530745 if (ext_refresh_frame_flags->update_pending) {
Marco Paniconie5de3322021-03-22 22:03:17 -0700746 if (svc->set_ref_frame_config) {
Marco Paniconid8574e32019-08-04 21:30:12 -0700747 for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++) {
Vishesha195ca32020-04-07 18:46:20 +0530748 int ref_frame_map_idx = svc->ref_idx[i];
749 refresh_mask |= svc->refresh[ref_frame_map_idx] << ref_frame_map_idx;
Marco Paniconid8574e32019-08-04 21:30:12 -0700750 }
751 return refresh_mask;
752 }
David Turner6e8b4d92019-02-18 15:01:33 +0000753 // Unfortunately the encoder interface reflects the old refresh_*_frame
754 // flags so we have to replicate the old refresh_frame_flags logic here in
755 // order to preserve the behaviour of the flag overrides.
Marco Paniconi314bc362019-08-13 10:53:02 -0700756 int ref_frame_map_idx = get_ref_frame_map_idx(cm, LAST_FRAME);
Jingning Han0a2af4e2019-07-08 19:30:03 -0700757 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530758 refresh_mask |= ext_refresh_frame_flags->last_frame << ref_frame_map_idx;
Jingning Han0a2af4e2019-07-08 19:30:03 -0700759
760 ref_frame_map_idx = get_ref_frame_map_idx(cm, EXTREF_FRAME);
761 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530762 refresh_mask |= ext_refresh_frame_flags->bwd_ref_frame
763 << ref_frame_map_idx;
Jingning Han0a2af4e2019-07-08 19:30:03 -0700764
765 ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF2_FRAME);
766 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530767 refresh_mask |= ext_refresh_frame_flags->alt2_ref_frame
768 << ref_frame_map_idx;
Jingning Han0a2af4e2019-07-08 19:30:03 -0700769
David Turner6e8b4d92019-02-18 15:01:33 +0000770 if (frame_update_type == OVERLAY_UPDATE) {
Jingning Han5738e032019-07-22 15:22:52 -0700771 ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
772 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530773 refresh_mask |= ext_refresh_frame_flags->golden_frame
774 << ref_frame_map_idx;
David Turner6e8b4d92019-02-18 15:01:33 +0000775 } else {
Jingning Han0a2af4e2019-07-08 19:30:03 -0700776 ref_frame_map_idx = get_ref_frame_map_idx(cm, GOLDEN_FRAME);
777 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530778 refresh_mask |= ext_refresh_frame_flags->golden_frame
779 << ref_frame_map_idx;
Jingning Han0a2af4e2019-07-08 19:30:03 -0700780
781 ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
782 if (ref_frame_map_idx != INVALID_IDX)
Vishesh38c05d72020-04-14 12:19:14 +0530783 refresh_mask |= ext_refresh_frame_flags->alt_ref_frame
784 << ref_frame_map_idx;
David Turner6e8b4d92019-02-18 15:01:33 +0000785 }
786 return refresh_mask;
787 }
788
Jingning Hanc9c172d2019-07-23 14:10:32 -0700789 // Search for the open slot to store the current frame.
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530790 int free_fb_index = get_free_ref_map_index(
791#if CONFIG_FRAME_PARALLEL_ENCODE
792 ref_frame_map_pairs,
793#endif // CONFIG_FRAME_PARALLEL_ENCODE
794 ref_buffer_stack);
795
796#if CONFIG_FRAME_PARALLEL_ENCODE
797 // No refresh necessary for these frame types.
798 if (frame_update_type == OVERLAY_UPDATE ||
799 frame_update_type == INTNL_OVERLAY_UPDATE)
800 return refresh_mask;
801
802 // If there is an open slot, refresh that one instead of replacing a
803 // reference.
804 if (free_fb_index != INVALID_IDX) {
805 refresh_mask = 1 << free_fb_index;
806 return refresh_mask;
807 }
Remya Prakasana05ccde2021-06-07 21:24:11 +0530808#if CONFIG_FRAME_PARALLEL_ENCODE_2
809 const int enable_refresh_skip = !is_one_pass_rt_params(cpi);
810#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530811 const int update_arf = frame_update_type == ARF_UPDATE;
812 const int refresh_idx =
Remya Prakasana05ccde2021-06-07 21:24:11 +0530813 get_refresh_idx(ref_frame_map_pairs, update_arf,
814#if CONFIG_FRAME_PARALLEL_ENCODE_2
815 &cpi->ppi->gf_group, gf_index, enable_refresh_skip,
816#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
817 cur_disp_order);
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530818 return 1 << refresh_idx;
819#else
Jingning Han0a2af4e2019-07-08 19:30:03 -0700820 switch (frame_update_type) {
821 case KF_UPDATE:
822 case GF_UPDATE:
823 if (free_fb_index != INVALID_IDX) {
824 refresh_mask = 1 << free_fb_index;
825 } else {
Jingning Han42266ca2019-07-12 14:37:16 -0700826 if (ref_buffer_stack->gld_stack_size)
827 refresh_mask =
828 1 << ref_buffer_stack
829 ->gld_stack[ref_buffer_stack->gld_stack_size - 1];
Jingning Han0a2af4e2019-07-08 19:30:03 -0700830 else
Jingning Han42266ca2019-07-12 14:37:16 -0700831 refresh_mask =
832 1 << ref_buffer_stack
833 ->lst_stack[ref_buffer_stack->lst_stack_size - 1];
Jingning Han0a2af4e2019-07-08 19:30:03 -0700834 }
835 break;
836 case LF_UPDATE:
837 if (free_fb_index != INVALID_IDX) {
838 refresh_mask = 1 << free_fb_index;
839 } else {
Jingning Han42266ca2019-07-12 14:37:16 -0700840 if (ref_buffer_stack->lst_stack_size >= 2)
841 refresh_mask =
842 1 << ref_buffer_stack
843 ->lst_stack[ref_buffer_stack->lst_stack_size - 1];
Jingning Han1af1c012020-02-10 10:51:56 -0800844 else if (ref_buffer_stack->gld_stack_size >= 2)
845 refresh_mask =
846 1 << ref_buffer_stack
847 ->gld_stack[ref_buffer_stack->gld_stack_size - 1];
Jingning Han0a2af4e2019-07-08 19:30:03 -0700848 else
849 assert(0 && "No ref map index found");
850 }
851 break;
852 case ARF_UPDATE:
853 if (free_fb_index != INVALID_IDX) {
854 refresh_mask = 1 << free_fb_index;
855 } else {
Jingning Han42266ca2019-07-12 14:37:16 -0700856 if (ref_buffer_stack->gld_stack_size >= 3)
857 refresh_mask =
858 1 << ref_buffer_stack
859 ->gld_stack[ref_buffer_stack->gld_stack_size - 1];
860 else if (ref_buffer_stack->lst_stack_size >= 2)
861 refresh_mask =
862 1 << ref_buffer_stack
863 ->lst_stack[ref_buffer_stack->lst_stack_size - 1];
Jingning Han0a2af4e2019-07-08 19:30:03 -0700864 else
865 assert(0 && "No ref map index found");
866 }
867 break;
868 case INTNL_ARF_UPDATE:
869 if (free_fb_index != INVALID_IDX) {
870 refresh_mask = 1 << free_fb_index;
871 } else {
Jingning Han42266ca2019-07-12 14:37:16 -0700872 refresh_mask =
873 1 << ref_buffer_stack
874 ->lst_stack[ref_buffer_stack->lst_stack_size - 1];
Jingning Han0a2af4e2019-07-08 19:30:03 -0700875 }
876 break;
Jingning Han1a7fbd72020-09-25 13:30:06 -0700877 case OVERLAY_UPDATE:
878 if (free_fb_index != INVALID_IDX) refresh_mask = 1 << free_fb_index;
879 break;
Jingning Han0a2af4e2019-07-08 19:30:03 -0700880 case INTNL_OVERLAY_UPDATE: break;
881 default: assert(0); break;
882 }
883
884 return refresh_mask;
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +0530885#endif // CONFIG_FRAME_PARALLEL_ENCODE
David Turner6e8b4d92019-02-18 15:01:33 +0000886}
887
Cheng Chen7abe3132019-06-19 11:55:28 -0700888#if !CONFIG_REALTIME_ONLY
Cheng Chen5083a9f2019-07-12 15:33:34 -0700889void setup_mi(AV1_COMP *const cpi, YV12_BUFFER_CONFIG *src) {
890 AV1_COMMON *const cm = &cpi->common;
891 const int num_planes = av1_num_planes(cm);
892 MACROBLOCK *const x = &cpi->td.mb;
893 MACROBLOCKD *const xd = &x->e_mbd;
894
Tarundeep Singh4243e622021-04-20 16:10:22 +0530895 av1_setup_src_planes(x, src, 0, 0, num_planes, cm->seq_params->sb_size);
Cheng Chen5083a9f2019-07-12 15:33:34 -0700896
Tarundeep Singh4243e622021-04-20 16:10:22 +0530897 av1_setup_block_planes(xd, cm->seq_params->subsampling_x,
898 cm->seq_params->subsampling_y, num_planes);
Cheng Chen5083a9f2019-07-12 15:33:34 -0700899
Urvang Joshi2603bfe2020-03-25 13:33:18 -0700900 set_mi_offsets(&cm->mi_params, xd, 0, 0);
Cheng Chen5083a9f2019-07-12 15:33:34 -0700901}
902
bohanli0db9c512020-06-12 17:43:06 -0700903// Apply temporal filtering to source frames and encode the filtered frame.
904// If the current frame does not require filtering, this function is identical
905// to av1_encode() except that tpl is not performed.
Cheng Chen7abe3132019-06-19 11:55:28 -0700906static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
907 EncodeFrameInput *const frame_input,
908 EncodeFrameParams *const frame_params,
Hui Su028ad7d2020-04-13 23:24:32 -0700909 EncodeFrameResults *const frame_results) {
Yunqing Wang7a3ad542020-11-03 23:40:24 -0800910#if CONFIG_COLLECT_COMPONENT_TIMING
911 if (cpi->oxcf.pass == 2) start_timing(cpi, denoise_and_encode_time);
912#endif
Cheng Chen7abe3132019-06-19 11:55:28 -0700913 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
914 AV1_COMMON *const cm = &cpi->common;
Mufaddal Chakera8ee04fa2021-03-17 13:33:18 +0530915 const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
Mufaddal Chakeraab20d372021-03-17 12:18:34 +0530916 FRAME_UPDATE_TYPE update_type =
Mufaddal Chakera8ee04fa2021-03-17 13:33:18 +0530917 get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
Jingning Han97b64a72019-10-08 11:40:51 -0700918
Hui Su028ad7d2020-04-13 23:24:32 -0700919 // Decide whether to apply temporal filtering to the source frame.
bohanli0db9c512020-06-12 17:43:06 -0700920 int apply_filtering = 0;
bohanli0db9c512020-06-12 17:43:06 -0700921 if (frame_params->frame_type == KEY_FRAME) {
922 // Decide whether it is allowed to perform key frame filtering
923 int allow_kf_filtering =
924 oxcf->kf_cfg.enable_keyframe_filtering &&
925 !is_stat_generation_stage(cpi) && !frame_params->show_existing_frame &&
926 cpi->rc.frames_to_key > cpi->oxcf.algo_cfg.arnr_max_frames &&
927 !is_lossless_requested(&oxcf->rc_cfg) &&
Jayasanker J90aa0df2021-01-27 13:03:13 +0530928 oxcf->algo_cfg.arnr_max_frames > 0 && oxcf->gf_cfg.lag_in_frames > 1;
bohanli0db9c512020-06-12 17:43:06 -0700929 if (allow_kf_filtering) {
930 const double y_noise_level = av1_estimate_noise_from_single_plane(
Tarundeep Singh4243e622021-04-20 16:10:22 +0530931 frame_input->source, 0, cm->seq_params->bit_depth);
bohanli0db9c512020-06-12 17:43:06 -0700932 apply_filtering = y_noise_level > 0;
Sarah Parker21f1e6e2019-11-15 16:48:29 -0800933 } else {
bohanli0db9c512020-06-12 17:43:06 -0700934 apply_filtering = 0;
Sarah Parker21f1e6e2019-11-15 16:48:29 -0800935 }
bohanli0db9c512020-06-12 17:43:06 -0700936 // If we are doing kf filtering, set up a few things.
937 if (apply_filtering) {
bohanli0db9c512020-06-12 17:43:06 -0700938 av1_setup_past_independence(cm);
bohanli0db9c512020-06-12 17:43:06 -0700939 }
Angie Chiang7b0628b2021-01-10 18:07:49 -0800940 } else if (update_type == ARF_UPDATE || update_type == INTNL_ARF_UPDATE) {
bohanli0db9c512020-06-12 17:43:06 -0700941 // ARF
942 apply_filtering = oxcf->algo_cfg.arnr_max_frames > 0;
bohanli0db9c512020-06-12 17:43:06 -0700943 }
Mufaddal Chakera186f8e32021-03-17 13:20:00 +0530944 if (is_stat_generation_stage(cpi)) {
945 apply_filtering = 0;
946 }
Yunqing Wang7a3ad542020-11-03 23:40:24 -0800947
948#if CONFIG_COLLECT_COMPONENT_TIMING
949 if (cpi->oxcf.pass == 2) start_timing(cpi, apply_filtering_time);
950#endif
bohanli0db9c512020-06-12 17:43:06 -0700951 // Save the pointer to the original source image.
952 YV12_BUFFER_CONFIG *source_buffer = frame_input->source;
953 // apply filtering to frame
954 if (apply_filtering) {
955 int show_existing_alt_ref = 0;
956 // TODO(bohanli): figure out why we need frame_type in cm here.
957 cm->current_frame.frame_type = frame_params->frame_type;
Mufaddal Chakeraab20d372021-03-17 12:18:34 +0530958 int arf_src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
Angie Chiang7b0628b2021-01-10 18:07:49 -0800959 int is_forward_keyframe = 0;
Angie Chiang470d1162020-12-31 13:10:55 -0800960 if (!frame_params->show_frame && cpi->no_show_fwd_kf) {
Angie Chiang7b0628b2021-01-10 18:07:49 -0800961 // TODO(angiebird): Figure out why this condition yields forward keyframe.
Angie Chiang470d1162020-12-31 13:10:55 -0800962 // fwd kf
Angie Chiang7b0628b2021-01-10 18:07:49 -0800963 is_forward_keyframe = 1;
Angie Chiang470d1162020-12-31 13:10:55 -0800964 }
bohanli0db9c512020-06-12 17:43:06 -0700965 const int code_arf =
Angie Chiang7b0628b2021-01-10 18:07:49 -0800966 av1_temporal_filter(cpi, arf_src_index, update_type,
967 is_forward_keyframe, &show_existing_alt_ref);
bohanli0db9c512020-06-12 17:43:06 -0700968 if (code_arf) {
Tarundeep Singh4593fcf2021-03-31 00:53:31 +0530969 aom_extend_frame_borders(&cpi->ppi->alt_ref_buffer, av1_num_planes(cm));
970 frame_input->source = &cpi->ppi->alt_ref_buffer;
Daniel Max Valenzuelacab4d392020-07-10 15:28:11 -0700971 aom_copy_metadata_to_frame_buffer(frame_input->source,
972 source_buffer->metadata);
bohanli0db9c512020-06-12 17:43:06 -0700973 }
bohanlicbe8e742020-08-17 14:19:17 -0700974 // Currently INTNL_ARF_UPDATE only do show_existing.
Angie Chiang7b0628b2021-01-10 18:07:49 -0800975 if (update_type == ARF_UPDATE && !cpi->no_show_fwd_kf) {
bohanli0db9c512020-06-12 17:43:06 -0700976 cpi->show_existing_alt_ref = show_existing_alt_ref;
977 }
Hui Sub1485372020-04-16 23:31:39 -0700978 }
Yunqing Wang7a3ad542020-11-03 23:40:24 -0800979#if CONFIG_COLLECT_COMPONENT_TIMING
980 if (cpi->oxcf.pass == 2) end_timing(cpi, apply_filtering_time);
981#endif
Jingning Han97b64a72019-10-08 11:40:51 -0700982
bohanli0db9c512020-06-12 17:43:06 -0700983 // perform tpl after filtering
Deepa K G23d914b2020-07-08 14:04:13 +0530984 int allow_tpl = oxcf->gf_cfg.lag_in_frames > 1 &&
bohanli0db9c512020-06-12 17:43:06 -0700985 !is_stat_generation_stage(cpi) &&
986 oxcf->algo_cfg.enable_tpl_model;
987 if (frame_params->frame_type == KEY_FRAME) {
Bohan Li8e6d8b22021-01-14 14:57:15 -0800988 // Don't do tpl for fwd key frames or fwd key frame overlays
bohanli0db9c512020-06-12 17:43:06 -0700989 allow_tpl = allow_tpl && !cpi->sf.tpl_sf.disable_filtered_key_tpl &&
Bohan Li8e6d8b22021-01-14 14:57:15 -0800990 !cpi->no_show_fwd_kf &&
Mufaddal Chakeraab20d372021-03-17 12:18:34 +0530991 gf_group->update_type[cpi->gf_frame_index] != OVERLAY_UPDATE;
bohanli0db9c512020-06-12 17:43:06 -0700992 } else {
993 // Do tpl after ARF is filtered, or if no ARF, at the second frame of GF
994 // group.
995 // TODO(bohanli): if no ARF, just do it at the first frame.
Mufaddal Chakeraab20d372021-03-17 12:18:34 +0530996 int gf_index = cpi->gf_frame_index;
Jingning Hand5a40c92020-08-13 00:28:28 -0700997 allow_tpl = allow_tpl && (gf_group->update_type[gf_index] == ARF_UPDATE ||
998 gf_group->update_type[gf_index] == GF_UPDATE);
bohanli0db9c512020-06-12 17:43:06 -0700999 if (allow_tpl) {
1000 // Need to set the size for TPL for ARF
1001 // TODO(bohanli): Why is this? what part of it is necessary?
Urvang Joshi2e592e32020-10-05 23:23:09 -07001002 av1_set_frame_size(cpi, cm->superres_upscaled_width,
1003 cm->superres_upscaled_height);
Yunqing Wang8c3f2752020-06-18 16:06:54 -07001004 }
Jingning Han97b64a72019-10-08 11:40:51 -07001005 }
Jingning Handcb4fdc2020-09-22 09:48:56 -07001006
Angie Chiang2d4bb0e2021-05-28 18:31:46 -07001007#if CONFIG_RD_COMMAND
1008 if (frame_params->frame_type == KEY_FRAME) {
1009 char filepath[] = "rd_command.txt";
1010 av1_read_rd_command(filepath, &cpi->rd_command);
1011 }
1012#endif // CONFIG_RD_COMMAND
1013
Deepa K G5a6eb3b2020-10-16 20:09:14 +05301014 if (allow_tpl == 0) {
1015 // Avoid the use of unintended TPL stats from previous GOP's results.
Mufaddal Chakera7260d142021-04-12 01:03:40 +05301016 if (cpi->gf_frame_index == 0 && !is_stat_generation_stage(cpi))
1017 av1_init_tpl_stats(&cpi->ppi->tpl_data);
Deepa K G5a6eb3b2020-10-16 20:09:14 +05301018 } else {
Jingning Han5c01a462021-05-11 13:31:21 -07001019 if (!cpi->skip_tpl_setup_stats) {
1020 av1_tpl_preload_rc_estimate(cpi, frame_params);
Deepa K G5a6eb3b2020-10-16 20:09:14 +05301021 av1_tpl_setup_stats(cpi, 0, frame_params, frame_input);
Jingning Han5c01a462021-05-11 13:31:21 -07001022 }
Deepa K G5a6eb3b2020-10-16 20:09:14 +05301023 }
Jingning Han97b64a72019-10-08 11:40:51 -07001024
1025 if (av1_encode(cpi, dest, frame_input, frame_params, frame_results) !=
1026 AOM_CODEC_OK) {
1027 return AOM_CODEC_ERROR;
1028 }
1029
1030 // Set frame_input source to true source for psnr calculation.
Mufaddal Chakeraea4c6d52020-06-12 15:09:14 +05301031 if (apply_filtering && is_psnr_calc_enabled(cpi)) {
Urvang Joshi708a34f2020-10-06 16:05:26 -07001032 cpi->source =
1033 av1_scale_if_required(cm, source_buffer, &cpi->scaled_source,
1034 cm->features.interp_filter, 0, false, true);
bohanli0db9c512020-06-12 17:43:06 -07001035 cpi->unscaled_source = source_buffer;
Jingning Han97b64a72019-10-08 11:40:51 -07001036 }
Yunqing Wang7a3ad542020-11-03 23:40:24 -08001037#if CONFIG_COLLECT_COMPONENT_TIMING
1038 if (cpi->oxcf.pass == 2) end_timing(cpi, denoise_and_encode_time);
1039#endif
Cheng Chen7abe3132019-06-19 11:55:28 -07001040 return AOM_CODEC_OK;
1041}
1042#endif // !CONFIG_REALTIME_ONLY
1043
Hui Su5f7ee812020-02-20 15:58:04 -08001044static INLINE int find_unused_ref_frame(const int *used_ref_frames,
1045 const int *stack, int stack_size) {
1046 for (int i = 0; i < stack_size; ++i) {
1047 const int this_ref = stack[i];
1048 int ref_idx = 0;
1049 for (ref_idx = 0; ref_idx <= ALTREF_FRAME - LAST_FRAME; ++ref_idx) {
1050 if (this_ref == used_ref_frames[ref_idx]) break;
1051 }
1052
1053 // not in use
1054 if (ref_idx > ALTREF_FRAME - LAST_FRAME) return this_ref;
1055 }
1056
1057 return INVALID_IDX;
1058}
1059
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301060#if CONFIG_FRAME_PARALLEL_ENCODE
1061/*!\cond */
1062// Struct to keep track of relevant reference frame data.
1063typedef struct {
1064 int map_idx;
1065 int disp_order;
1066 int pyr_level;
1067 int used;
1068} RefBufMapData;
1069/*!\endcond */
1070
1071// Comparison function to sort reference frames in ascending display order.
1072static int compare_map_idx_pair_asc(const void *a, const void *b) {
1073 if (((RefBufMapData *)a)->disp_order == ((RefBufMapData *)b)->disp_order) {
1074 return 0;
1075 } else if (((const RefBufMapData *)a)->disp_order >
1076 ((const RefBufMapData *)b)->disp_order) {
1077 return 1;
1078 } else {
1079 return -1;
1080 }
1081}
1082
1083// Checks to see if a particular reference frame is already in the reference
1084// frame map.
1085static int is_in_ref_map(RefBufMapData *map, int disp_order, int n_frames) {
1086 for (int i = 0; i < n_frames; i++) {
1087 if (disp_order == map[i].disp_order) return 1;
1088 }
1089 return 0;
1090}
1091
1092// Add a reference buffer index to a named reference slot.
1093static void add_ref_to_slot(RefBufMapData *ref, int *const remapped_ref_idx,
1094 int frame) {
1095 remapped_ref_idx[frame - LAST_FRAME] = ref->map_idx;
1096 ref->used = 1;
1097}
1098
1099// Threshold dictating when we are allowed to start considering
1100// leaving lowest level frames unmapped.
1101#define LOW_LEVEL_FRAMES_TR 5
1102
1103// Find which reference buffer should be left out of the named mapping.
1104// This is because there are 8 reference buffers and only 7 named slots.
1105static void set_unmapped_ref(RefBufMapData *buffer_map, int n_bufs,
1106 int n_min_level_refs, int min_level,
1107 int cur_frame_disp) {
1108 int max_dist = 0;
1109 int unmapped_idx = -1;
1110 if (n_bufs <= ALTREF_FRAME) return;
1111 for (int i = 0; i < n_bufs; i++) {
1112 if (buffer_map[i].used) continue;
1113 if (buffer_map[i].pyr_level != min_level ||
1114 n_min_level_refs >= LOW_LEVEL_FRAMES_TR) {
1115 int dist = abs(cur_frame_disp - buffer_map[i].disp_order);
1116 if (dist > max_dist) {
1117 max_dist = dist;
1118 unmapped_idx = i;
1119 }
1120 }
1121 }
1122 assert(unmapped_idx >= 0 && "Unmapped reference not found");
1123 buffer_map[unmapped_idx].used = 1;
1124}
1125
1126static void get_ref_frames(AV1_COMP *const cpi,
1127 RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301128#if CONFIG_FRAME_PARALLEL_ENCODE_2
1129 int gf_index,
1130#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301131 int cur_frame_disp) {
1132 AV1_COMMON *cm = &cpi->common;
1133 int *const remapped_ref_idx = cm->remapped_ref_idx;
1134
1135 int buf_map_idx = 0;
1136
1137 // Initialize reference frame mappings.
1138 for (int i = 0; i < REF_FRAMES; ++i) remapped_ref_idx[i] = INVALID_IDX;
1139
1140 RefBufMapData buffer_map[REF_FRAMES];
1141 int n_bufs = 0;
1142 memset(buffer_map, 0, REF_FRAMES * sizeof(buffer_map[0]));
1143 int min_level = MAX_ARF_LAYERS;
1144 int max_level = 0;
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301145#if CONFIG_FRAME_PARALLEL_ENCODE_2
1146 GF_GROUP *gf_group = &cpi->ppi->gf_group;
1147 int skip_ref_unmapping = 0;
1148 int is_one_pass_rt = is_one_pass_rt_params(cpi);
1149#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301150
1151 // Go through current reference buffers and store display order, pyr level,
1152 // and map index.
1153 for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
1154 // Get reference frame buffer.
1155 RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx];
1156 if (ref_pair.disp_order == -1) continue;
1157 const int frame_order = ref_pair.disp_order;
1158 // Avoid duplicates.
1159 if (is_in_ref_map(buffer_map, frame_order, n_bufs)) continue;
1160 const int reference_frame_level = ref_pair.pyr_level;
1161
1162 // Keep track of the lowest and highest levels that currently exist.
1163 if (reference_frame_level < min_level) min_level = reference_frame_level;
1164 if (reference_frame_level > max_level) max_level = reference_frame_level;
1165
1166 buffer_map[n_bufs].map_idx = map_idx;
1167 buffer_map[n_bufs].disp_order = frame_order;
1168 buffer_map[n_bufs].pyr_level = reference_frame_level;
1169 buffer_map[n_bufs].used = 0;
1170 n_bufs++;
1171 }
1172
1173 // Sort frames in ascending display order.
1174 qsort(buffer_map, n_bufs, sizeof(buffer_map[0]), compare_map_idx_pair_asc);
1175
1176 int n_min_level_refs = 0;
1177 int n_past_high_level = 0;
1178 int closest_past_ref = -1;
1179 int golden_idx = -1;
1180 int altref_idx = -1;
1181
1182 // Find the GOLDEN_FRAME and BWDREF_FRAME.
1183 // Also collect various stats about the reference frames for the remaining
1184 // mappings.
1185 for (int i = n_bufs - 1; i >= 0; i--) {
1186 if (buffer_map[i].pyr_level == min_level) {
1187 // Keep track of the number of lowest level frames.
1188 n_min_level_refs++;
1189 if (buffer_map[i].disp_order < cur_frame_disp && golden_idx == -1 &&
1190 remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] == INVALID_IDX) {
1191 // Save index for GOLDEN.
1192 golden_idx = i;
1193 } else if (buffer_map[i].disp_order > cur_frame_disp &&
1194 altref_idx == -1 &&
1195 remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] == INVALID_IDX) {
1196 // Save index for ALTREF.
1197 altref_idx = i;
1198 }
1199 } else if (buffer_map[i].disp_order == cur_frame_disp) {
1200 // Map the BWDREF_FRAME if this is the show_existing_frame.
1201 add_ref_to_slot(&buffer_map[i], remapped_ref_idx, BWDREF_FRAME);
1202 }
1203
1204 // Keep track of the number of past frames that are not at the lowest level.
1205 if (buffer_map[i].disp_order < cur_frame_disp &&
1206 buffer_map[i].pyr_level != min_level)
1207 n_past_high_level++;
1208
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301209#if CONFIG_FRAME_PARALLEL_ENCODE_2
1210 // During parallel encodes of lower layer frames, exclude the first frame
1211 // (frame_parallel_level 1) from being used for the reference assignment of
1212 // the second frame (frame_parallel_level 2).
1213 if (!is_one_pass_rt &&
1214 gf_group->skip_frame_as_ref[gf_index] != INVALID_IDX) {
1215 assert(gf_group->frame_parallel_level[gf_index] == 2 &&
1216 gf_group->update_type[gf_index] == INTNL_ARF_UPDATE);
1217 assert(gf_group->frame_parallel_level[gf_index - 1] == 1 &&
1218 gf_group->update_type[gf_index - 1] == INTNL_ARF_UPDATE);
1219 if (buffer_map[i].disp_order == gf_group->skip_frame_as_ref[gf_index]) {
1220 buffer_map[i].used = 1;
1221 // In case a ref frame is excluded from being used during assignment,
1222 // skip the call to set_unmapped_ref(). Applicable in steady state.
1223 skip_ref_unmapping = 1;
1224 }
1225 }
1226#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
1227
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301228 // Keep track of where the frames change from being past frames to future
1229 // frames.
1230 if (buffer_map[i].disp_order < cur_frame_disp && closest_past_ref < 0)
1231 closest_past_ref = i;
1232 }
1233
1234 // Do not map GOLDEN and ALTREF based on their pyramid level if all reference
1235 // frames have the same level.
1236 if (n_min_level_refs <= n_bufs) {
1237 // Map the GOLDEN_FRAME.
1238 if (golden_idx > -1)
1239 add_ref_to_slot(&buffer_map[golden_idx], remapped_ref_idx, GOLDEN_FRAME);
1240 // Map the ALTREF_FRAME.
1241 if (altref_idx > -1)
1242 add_ref_to_slot(&buffer_map[altref_idx], remapped_ref_idx, ALTREF_FRAME);
1243 }
1244
1245 // Find the buffer to be excluded from the mapping.
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301246#if CONFIG_FRAME_PARALLEL_ENCODE_2
1247 if (!skip_ref_unmapping)
1248#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
1249 set_unmapped_ref(buffer_map, n_bufs, n_min_level_refs, min_level,
1250 cur_frame_disp);
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301251
1252 // Place past frames in LAST_FRAME, LAST2_FRAME, and LAST3_FRAME.
1253 for (int frame = LAST_FRAME; frame < GOLDEN_FRAME; frame++) {
1254 // Continue if the current ref slot is already full.
1255 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1256 // Find the next unmapped reference buffer
1257 // in decreasing ouptut order relative to current picture.
1258 int next_buf_max = 0;
1259 int next_disp_order = INT_MIN;
1260 for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) {
1261 if (!buffer_map[buf_map_idx].used &&
1262 buffer_map[buf_map_idx].disp_order < cur_frame_disp &&
1263 buffer_map[buf_map_idx].disp_order > next_disp_order) {
1264 next_disp_order = buffer_map[buf_map_idx].disp_order;
1265 next_buf_max = buf_map_idx;
1266 }
1267 }
1268 buf_map_idx = next_buf_max;
1269 if (buf_map_idx < 0) break;
1270 if (buffer_map[buf_map_idx].used) break;
1271 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1272 }
1273
1274 // Place future frames (if there are any) in BWDREF_FRAME and ALTREF2_FRAME.
1275 for (int frame = BWDREF_FRAME; frame < REF_FRAMES; frame++) {
1276 // Continue if the current ref slot is already full.
1277 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1278 // Find the next unmapped reference buffer
1279 // in increasing ouptut order relative to current picture.
1280 int next_buf_max = 0;
1281 int next_disp_order = INT_MAX;
1282 for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) {
1283 if (!buffer_map[buf_map_idx].used &&
1284 buffer_map[buf_map_idx].disp_order > cur_frame_disp &&
1285 buffer_map[buf_map_idx].disp_order < next_disp_order) {
1286 next_disp_order = buffer_map[buf_map_idx].disp_order;
1287 next_buf_max = buf_map_idx;
1288 }
1289 }
1290 buf_map_idx = next_buf_max;
1291 if (buf_map_idx < 0) break;
1292 if (buffer_map[buf_map_idx].used) break;
1293 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1294 }
1295
1296 // Place remaining past frames.
1297 buf_map_idx = closest_past_ref;
1298 for (int frame = LAST_FRAME; frame < REF_FRAMES; frame++) {
1299 // Continue if the current ref slot is already full.
1300 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1301 // Find the next unmapped reference buffer.
1302 for (; buf_map_idx >= 0; buf_map_idx--) {
1303 if (!buffer_map[buf_map_idx].used) break;
1304 }
1305 if (buf_map_idx < 0) break;
1306 if (buffer_map[buf_map_idx].used) break;
1307 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1308 }
1309
1310 // Place remaining future frames.
1311 buf_map_idx = n_bufs - 1;
1312 for (int frame = ALTREF_FRAME; frame >= LAST_FRAME; frame--) {
1313 // Continue if the current ref slot is already full.
1314 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1315 // Find the next unmapped reference buffer.
1316 for (; buf_map_idx > closest_past_ref; buf_map_idx--) {
1317 if (!buffer_map[buf_map_idx].used) break;
1318 }
1319 if (buf_map_idx < 0) break;
1320 if (buffer_map[buf_map_idx].used) break;
1321 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1322 }
1323
1324 // Fill any slots that are empty (should only happen for the first 7 frames).
1325 for (int i = 0; i < REF_FRAMES; ++i)
1326 if (remapped_ref_idx[i] == INVALID_IDX) remapped_ref_idx[i] = 0;
1327}
1328#endif // CONFIG_FRAME_PARALLEL_ENCODE
1329
Angie Chiangd96bce12021-03-24 19:52:18 -07001330void av1_get_ref_frames(const RefBufferStack *ref_buffer_stack,
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301331#if CONFIG_FRAME_PARALLEL_ENCODE
1332 AV1_COMP *cpi,
1333 RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
1334 int cur_frame_disp,
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301335#if CONFIG_FRAME_PARALLEL_ENCODE_2
1336 int gf_index,
1337#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301338#endif // CONFIG_FRAME_PARALLEL_ENCODE
Angie Chiangd96bce12021-03-24 19:52:18 -07001339 int remapped_ref_idx[REF_FRAMES]) {
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301340#if CONFIG_FRAME_PARALLEL_ENCODE
1341 (void)ref_buffer_stack;
1342 (void)remapped_ref_idx;
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301343 get_ref_frames(cpi, ref_frame_map_pairs,
1344#if CONFIG_FRAME_PARALLEL_ENCODE_2
1345 gf_index,
1346#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
1347 cur_frame_disp);
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301348 return;
1349#else
Angie Chiangd96bce12021-03-24 19:52:18 -07001350 const int *const arf_stack = ref_buffer_stack->arf_stack;
1351 const int *const lst_stack = ref_buffer_stack->lst_stack;
1352 const int *const gld_stack = ref_buffer_stack->gld_stack;
Jingning Han42266ca2019-07-12 14:37:16 -07001353 const int arf_stack_size = ref_buffer_stack->arf_stack_size;
1354 const int lst_stack_size = ref_buffer_stack->lst_stack_size;
1355 const int gld_stack_size = ref_buffer_stack->gld_stack_size;
Jingning Han0a2af4e2019-07-08 19:30:03 -07001356
1357 // Initialization
Hui Su5f7ee812020-02-20 15:58:04 -08001358 for (int i = 0; i < REF_FRAMES; ++i) remapped_ref_idx[i] = INVALID_IDX;
Jingning Han0a2af4e2019-07-08 19:30:03 -07001359
1360 if (arf_stack_size) {
Hui Su5f7ee812020-02-20 15:58:04 -08001361 remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] = arf_stack[arf_stack_size - 1];
Jingning Han0a2af4e2019-07-08 19:30:03 -07001362
1363 if (arf_stack_size > 1)
Hui Su5f7ee812020-02-20 15:58:04 -08001364 remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = arf_stack[0];
Jingning Han0a2af4e2019-07-08 19:30:03 -07001365
1366 if (arf_stack_size > 2)
Hui Su5f7ee812020-02-20 15:58:04 -08001367 remapped_ref_idx[ALTREF2_FRAME - LAST_FRAME] = arf_stack[1];
Jingning Han0a2af4e2019-07-08 19:30:03 -07001368 }
1369
1370 if (lst_stack_size) {
Hui Su5f7ee812020-02-20 15:58:04 -08001371 remapped_ref_idx[LAST_FRAME - LAST_FRAME] = lst_stack[0];
Jingning Han0a2af4e2019-07-08 19:30:03 -07001372
1373 if (lst_stack_size > 1)
Hui Su5f7ee812020-02-20 15:58:04 -08001374 remapped_ref_idx[LAST2_FRAME - LAST_FRAME] = lst_stack[1];
Jingning Han0a2af4e2019-07-08 19:30:03 -07001375 }
1376
1377 if (gld_stack_size) {
Hui Su5f7ee812020-02-20 15:58:04 -08001378 remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] = gld_stack[0];
Jingning Han0a2af4e2019-07-08 19:30:03 -07001379
Hui Su483d25d2020-04-15 00:28:08 -07001380 // If there are more frames in the golden stack, assign them to BWDREF,
1381 // ALTREF2, or LAST3.
Jingning Han0a2af4e2019-07-08 19:30:03 -07001382 if (gld_stack_size > 1) {
Hui Su483d25d2020-04-15 00:28:08 -07001383 if (arf_stack_size <= 2) {
1384 if (arf_stack_size <= 1) {
1385 remapped_ref_idx[BWDREF_FRAME - LAST_FRAME] = gld_stack[1];
1386 if (gld_stack_size > 2)
1387 remapped_ref_idx[ALTREF2_FRAME - LAST_FRAME] = gld_stack[2];
1388 } else {
1389 remapped_ref_idx[ALTREF2_FRAME - LAST_FRAME] = gld_stack[1];
1390 }
1391 } else {
Hui Su5f7ee812020-02-20 15:58:04 -08001392 remapped_ref_idx[LAST3_FRAME - LAST_FRAME] = gld_stack[1];
Hui Su483d25d2020-04-15 00:28:08 -07001393 }
Jingning Han0a2af4e2019-07-08 19:30:03 -07001394 }
1395 }
1396
1397 for (int idx = ALTREF_FRAME - LAST_FRAME; idx >= 0; --idx) {
Hui Su5f7ee812020-02-20 15:58:04 -08001398 int ref_map_index = remapped_ref_idx[idx];
Jingning Han0a2af4e2019-07-08 19:30:03 -07001399
1400 if (ref_map_index != INVALID_IDX) continue;
1401
Hui Su5f7ee812020-02-20 15:58:04 -08001402 ref_map_index =
1403 find_unused_ref_frame(remapped_ref_idx, arf_stack, arf_stack_size);
Jingning Han0a2af4e2019-07-08 19:30:03 -07001404
Hui Su5f7ee812020-02-20 15:58:04 -08001405 if (ref_map_index == INVALID_IDX) {
1406 ref_map_index =
1407 find_unused_ref_frame(remapped_ref_idx, gld_stack, gld_stack_size);
Jingning Han0a2af4e2019-07-08 19:30:03 -07001408 }
1409
Hui Su5f7ee812020-02-20 15:58:04 -08001410 if (ref_map_index == INVALID_IDX) {
1411 ref_map_index =
1412 find_unused_ref_frame(remapped_ref_idx, lst_stack, lst_stack_size);
Jingning Han0a2af4e2019-07-08 19:30:03 -07001413 }
1414
bohanli99852502020-07-14 16:22:45 -07001415 if (ref_map_index != INVALID_IDX) {
Hui Su5f7ee812020-02-20 15:58:04 -08001416 remapped_ref_idx[idx] = ref_map_index;
bohanli99852502020-07-14 16:22:45 -07001417 } else if (!gld_stack_size && arf_stack_size) {
1418 remapped_ref_idx[idx] = ref_buffer_stack->arf_stack[0];
1419 } else {
Hui Su5f7ee812020-02-20 15:58:04 -08001420 remapped_ref_idx[idx] = ref_buffer_stack->gld_stack[0];
bohanli99852502020-07-14 16:22:45 -07001421 }
Jingning Han0a2af4e2019-07-08 19:30:03 -07001422 }
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301423#endif // CONFIG_FRAME_PARALLEL_ENCODE
Jingning Han0a2af4e2019-07-08 19:30:03 -07001424}
1425
David Turner056f7cd2019-01-07 17:48:13 +00001426int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
David Turner1539bb02019-01-24 15:28:13 +00001427 uint8_t *const dest, unsigned int *frame_flags,
David Turnerdedd8ff2019-01-23 13:59:46 +00001428 int64_t *const time_stamp, int64_t *const time_end,
Yue Chen1bc5be62018-08-24 13:57:32 -07001429 const aom_rational64_t *const timestamp_ratio,
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301430 int *const pop_lookahead, int flush) {
Vishesh7e9873d2020-06-08 15:41:33 +05301431 AV1EncoderConfig *const oxcf = &cpi->oxcf;
David Turner475a3132019-01-18 15:17:17 +00001432 AV1_COMMON *const cm = &cpi->common;
Mufaddal Chakera8ee04fa2021-03-17 13:33:18 +05301433 GF_GROUP *gf_group = &cpi->ppi->gf_group;
Vishesha195ca32020-04-07 18:46:20 +05301434 ExternalFlags *const ext_flags = &cpi->ext_flags;
Vishesh5b50e6d2020-06-10 19:20:07 +05301435 GFConfig *const gf_cfg = &oxcf->gf_cfg;
David Turner056f7cd2019-01-07 17:48:13 +00001436
David Turnerdedd8ff2019-01-23 13:59:46 +00001437 EncodeFrameInput frame_input;
David Turner04b70d82019-01-24 15:39:19 +00001438 EncodeFrameParams frame_params;
1439 EncodeFrameResults frame_results;
David Turnerdedd8ff2019-01-23 13:59:46 +00001440 memset(&frame_input, 0, sizeof(frame_input));
David Turner04b70d82019-01-24 15:39:19 +00001441 memset(&frame_params, 0, sizeof(frame_params));
1442 memset(&frame_results, 0, sizeof(frame_results));
1443
Jingning Hand3e827d2020-08-16 16:07:24 -07001444 // Check if we need to stuff more src frames
1445 if (flush == 0) {
1446 int srcbuf_size =
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +05301447 av1_lookahead_depth(cpi->ppi->lookahead, cpi->compressor_stage);
1448 int pop_size =
1449 av1_lookahead_pop_sz(cpi->ppi->lookahead, cpi->compressor_stage);
Jingning Hand3e827d2020-08-16 16:07:24 -07001450
1451 // Continue buffering look ahead buffer.
1452 if (srcbuf_size < pop_size) return -1;
1453 }
1454
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +05301455 if (!av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage)) {
Jingning Han3f3318f2020-08-16 16:12:10 -07001456#if !CONFIG_REALTIME_ONLY
Bohan Li445fdf62021-06-03 16:16:00 -07001457 if (flush && oxcf->pass == AOM_RC_FIRST_PASS &&
1458 !cpi->ppi->twopass.first_pass_done) {
Jingning Han3f3318f2020-08-16 16:12:10 -07001459 av1_end_first_pass(cpi); /* get last stats packet */
Mufaddal Chakera358cf212021-02-25 14:41:56 +05301460 cpi->ppi->twopass.first_pass_done = 1;
Jingning Han3f3318f2020-08-16 16:12:10 -07001461 }
1462#endif
1463 return -1;
1464 }
1465
Sarah Parker97803fc2019-05-17 14:15:37 -07001466 // TODO(sarahparker) finish bit allocation for one pass pyramid
Aasaipriya5feffea2020-04-15 12:43:05 +05301467 if (has_no_stats_stage(cpi)) {
Vishesh5b50e6d2020-06-10 19:20:07 +05301468 gf_cfg->gf_max_pyr_height =
1469 AOMMIN(gf_cfg->gf_max_pyr_height, USE_ALTREF_FOR_ONE_PASS);
1470 gf_cfg->gf_min_pyr_height =
1471 AOMMIN(gf_cfg->gf_min_pyr_height, gf_cfg->gf_max_pyr_height);
Urvang Joshib44f48f2020-01-27 11:09:48 -08001472 }
Sarah Parker97803fc2019-05-17 14:15:37 -07001473
Mufaddal Chakera7260d142021-04-12 01:03:40 +05301474 cpi->skip_tpl_setup_stats = 0;
Jingning Han3f3318f2020-08-16 16:12:10 -07001475#if !CONFIG_REALTIME_ONLY
Deepa K Gc29630a2021-05-31 13:19:41 +05301476 const int use_one_pass_rt_params = is_one_pass_rt_params(cpi);
Jingning Han3f3318f2020-08-16 16:12:10 -07001477 if (!use_one_pass_rt_params && !is_stat_generation_stage(cpi)) {
Yunqing Wang7a3ad542020-11-03 23:40:24 -08001478#if CONFIG_COLLECT_COMPONENT_TIMING
1479 start_timing(cpi, av1_get_second_pass_params_time);
1480#endif
Jingning Han3f3318f2020-08-16 16:12:10 -07001481 av1_get_second_pass_params(cpi, &frame_params, &frame_input, *frame_flags);
Yunqing Wang7a3ad542020-11-03 23:40:24 -08001482#if CONFIG_COLLECT_COMPONENT_TIMING
1483 end_timing(cpi, av1_get_second_pass_params_time);
1484#endif
Jingning Han3f3318f2020-08-16 16:12:10 -07001485 }
1486#endif
1487
Mufaddal Chakerae7326122019-12-04 14:49:09 +05301488 if (!is_stat_generation_stage(cpi)) {
Sarah Parker2beb1d12019-10-25 16:30:32 -07001489 // If this is a forward keyframe, mark as a show_existing_frame
bohanli1629a4b2020-06-11 16:15:14 -07001490 // TODO(bohanli): find a consistent condition for fwd keyframes
Jingning Han2dcb0502020-08-20 20:35:26 -07001491 if (oxcf->kf_cfg.fwd_kf_enabled &&
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301492 gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE &&
Jingning Han2dcb0502020-08-20 20:35:26 -07001493 cpi->rc.frames_to_key == 0) {
Sarah Parker2beb1d12019-10-25 16:30:32 -07001494 frame_params.show_existing_frame = 1;
1495 } else {
1496 frame_params.show_existing_frame =
Jingning Han2dcb0502020-08-20 20:35:26 -07001497 (cpi->show_existing_alt_ref &&
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301498 gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) ||
1499 gf_group->update_type[cpi->gf_frame_index] == INTNL_OVERLAY_UPDATE;
Sarah Parker2beb1d12019-10-25 16:30:32 -07001500 }
David Turnere86ee0d2019-02-18 17:16:28 +00001501 frame_params.show_existing_frame &= allow_show_existing(cpi, *frame_flags);
Yunqing Wang1973f112019-10-18 15:50:04 -07001502
1503 // Reset show_existing_alt_ref decision to 0 after it is used.
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301504 if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) {
Yunqing Wang1973f112019-10-18 15:50:04 -07001505 cpi->show_existing_alt_ref = 0;
1506 }
David Turnerb0c0aa32019-01-28 16:17:13 +00001507 } else {
David Turnere86ee0d2019-02-18 17:16:28 +00001508 frame_params.show_existing_frame = 0;
David Turnerb0c0aa32019-01-28 16:17:13 +00001509 }
1510
David Turnerdedd8ff2019-01-23 13:59:46 +00001511 struct lookahead_entry *source = NULL;
1512 struct lookahead_entry *last_source = NULL;
David Turnere86ee0d2019-02-18 17:16:28 +00001513 if (frame_params.show_existing_frame) {
Mufaddal Chakeraa65d2ce2021-02-15 12:20:48 +05301514 source = av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage);
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301515 *pop_lookahead = 1;
Jingning Hand392c012019-09-19 15:48:08 -07001516 frame_params.show_frame = 1;
David Turnerdedd8ff2019-01-23 13:59:46 +00001517 } else {
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301518 source = choose_frame_source(cpi, &flush, pop_lookahead, &last_source,
Angie Chiang470d1162020-12-31 13:10:55 -08001519 &frame_params);
David Turnerdedd8ff2019-01-23 13:59:46 +00001520 }
1521
1522 if (source == NULL) { // If no source was found, we can't encode a frame.
Jerome Jiang2612b4d2019-05-29 17:46:47 -07001523#if !CONFIG_REALTIME_ONLY
Bohan Li445fdf62021-06-03 16:16:00 -07001524 if (flush && oxcf->pass == AOM_RC_FIRST_PASS &&
1525 !cpi->ppi->twopass.first_pass_done) {
David Turnerdedd8ff2019-01-23 13:59:46 +00001526 av1_end_first_pass(cpi); /* get last stats packet */
Mufaddal Chakera358cf212021-02-25 14:41:56 +05301527 cpi->ppi->twopass.first_pass_done = 1;
David Turnerdedd8ff2019-01-23 13:59:46 +00001528 }
Jerome Jiang2612b4d2019-05-29 17:46:47 -07001529#endif
David Turnerdedd8ff2019-01-23 13:59:46 +00001530 return -1;
1531 }
Mufaddal Chakera97f92712021-05-21 15:10:33 +05301532
1533#if CONFIG_FRAME_PARALLEL_ENCODE
1534 // reset src_offset to allow actual encode call for this frame to get its
1535 // source.
1536 gf_group->src_offset[cpi->gf_frame_index] = 0;
1537#endif
1538
bohanli0db9c512020-06-12 17:43:06 -07001539 // Source may be changed if temporal filtered later.
1540 frame_input.source = &source->img;
David Turnerdedd8ff2019-01-23 13:59:46 +00001541 frame_input.last_source = last_source != NULL ? &last_source->img : NULL;
1542 frame_input.ts_duration = source->ts_end - source->ts_start;
Cheng Chene1a7a3e2020-03-18 18:23:19 -07001543 // Save unfiltered source. It is used in av1_get_second_pass_params().
1544 cpi->unfiltered_source = frame_input.source;
David Turnerdedd8ff2019-01-23 13:59:46 +00001545
1546 *time_stamp = source->ts_start;
1547 *time_end = source->ts_end;
Yunqing Wang15ab03c2020-11-24 16:45:25 -08001548 if (source->ts_start < cpi->time_stamps.first_ts_start) {
1549 cpi->time_stamps.first_ts_start = source->ts_start;
1550 cpi->time_stamps.prev_ts_end = source->ts_start;
David Turnerdedd8ff2019-01-23 13:59:46 +00001551 }
1552
1553 av1_apply_encoding_flags(cpi, source->flags);
bohanlicbe8e742020-08-17 14:19:17 -07001554 *frame_flags = (source->flags & AOM_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
David Turnerdedd8ff2019-01-23 13:59:46 +00001555
Jingning Hand392c012019-09-19 15:48:08 -07001556 // Shown frames and arf-overlay frames need frame-rate considering
Deepa K Gfb89ce02020-04-06 13:34:42 +05301557 if (frame_params.show_frame)
1558 adjust_frame_rate(cpi, source->ts_start, source->ts_end);
David Turnerdedd8ff2019-01-23 13:59:46 +00001559
Jingning Hand392c012019-09-19 15:48:08 -07001560 if (!frame_params.show_existing_frame) {
David Turnerdedd8ff2019-01-23 13:59:46 +00001561 if (cpi->film_grain_table) {
Neil Birkbeckbd40ca72019-03-02 13:25:50 -08001562 cm->cur_frame->film_grain_params_present = aom_film_grain_table_lookup(
David Turnerdedd8ff2019-01-23 13:59:46 +00001563 cpi->film_grain_table, *time_stamp, *time_end, 0 /* =erase */,
1564 &cm->film_grain_params);
Neil Birkbeckbd40ca72019-03-02 13:25:50 -08001565 } else {
1566 cm->cur_frame->film_grain_params_present =
Tarundeep Singh4243e622021-04-20 16:10:22 +05301567 cm->seq_params->film_grain_params_present;
David Turnerdedd8ff2019-01-23 13:59:46 +00001568 }
David Turnerdedd8ff2019-01-23 13:59:46 +00001569 // only one operating point supported now
Yue Chen1bc5be62018-08-24 13:57:32 -07001570 const int64_t pts64 = ticks_to_timebase_units(timestamp_ratio, *time_stamp);
David Turnerdedd8ff2019-01-23 13:59:46 +00001571 if (pts64 < 0 || pts64 > UINT32_MAX) return AOM_CODEC_ERROR;
Jingning Hand392c012019-09-19 15:48:08 -07001572 cm->frame_presentation_time = (uint32_t)pts64;
David Turnerdedd8ff2019-01-23 13:59:46 +00001573 }
1574
Marco Paniconicea99e22019-07-16 18:36:31 -07001575#if CONFIG_REALTIME_ONLY
1576 av1_get_one_pass_rt_params(cpi, &frame_params, *frame_flags);
Tarundeep Singh9e77b302021-03-19 16:07:48 +05301577 if (cpi->oxcf.speed >= 5 && cpi->ppi->number_spatial_layers == 1 &&
1578 cpi->ppi->number_temporal_layers == 1)
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301579 av1_set_reference_structure_one_pass_rt(cpi, cpi->gf_frame_index == 0);
Marco Paniconicea99e22019-07-16 18:36:31 -07001580#else
chiyotsai8b8f8a22020-04-21 11:03:47 -07001581 if (use_one_pass_rt_params) {
Marco Paniconicea99e22019-07-16 18:36:31 -07001582 av1_get_one_pass_rt_params(cpi, &frame_params, *frame_flags);
Tarundeep Singh9e77b302021-03-19 16:07:48 +05301583 if (cpi->oxcf.speed >= 5 && cpi->ppi->number_spatial_layers == 1 &&
1584 cpi->ppi->number_temporal_layers == 1)
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301585 av1_set_reference_structure_one_pass_rt(cpi, cpi->gf_frame_index == 0);
chiyotsai8b8f8a22020-04-21 11:03:47 -07001586 }
Jerome Jiang2612b4d2019-05-29 17:46:47 -07001587#endif
Jingning Han3f3318f2020-08-16 16:12:10 -07001588
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301589 FRAME_UPDATE_TYPE frame_update_type =
1590 get_frame_update_type(gf_group, cpi->gf_frame_index);
David Turner4f1f1812019-01-24 17:00:24 +00001591
David Turnere86ee0d2019-02-18 17:16:28 +00001592 if (frame_params.show_existing_frame &&
1593 frame_params.frame_type != KEY_FRAME) {
David Turner475a3132019-01-18 15:17:17 +00001594 // Force show-existing frames to be INTER, except forward keyframes
1595 frame_params.frame_type = INTER_FRAME;
1596 }
1597
David Turner056f7cd2019-01-07 17:48:13 +00001598 // TODO(david.turner@argondesign.com): Move all the encode strategy
1599 // (largely near av1_get_compressed_data) in here
1600
1601 // TODO(david.turner@argondesign.com): Change all the encode strategy to
1602 // modify frame_params instead of cm or cpi.
1603
bohanli99852502020-07-14 16:22:45 -07001604 // Per-frame encode speed. In theory this can vary, but things may have
1605 // been written assuming speed-level will not change within a sequence, so
1606 // this parameter should be used with caution.
David Turner04b70d82019-01-24 15:39:19 +00001607 frame_params.speed = oxcf->speed;
1608
David Turnerddbff442019-01-21 14:58:42 +00001609 // Work out some encoding parameters specific to the pass:
Vishesh734eff92020-06-20 21:46:36 +05301610 if (has_no_stats_stage(cpi) && oxcf->q_cfg.aq_mode == CYCLIC_REFRESH_AQ) {
Sarah Parker97803fc2019-05-17 14:15:37 -07001611 av1_cyclic_refresh_update_parameters(cpi);
Mufaddal Chakerae7326122019-12-04 14:49:09 +05301612 } else if (is_stat_generation_stage(cpi)) {
Vishesh39e74092020-06-16 17:13:48 +05301613 cpi->td.mb.e_mbd.lossless[0] = is_lossless_requested(&oxcf->rc_cfg);
Jayasanker J312b85e2021-02-23 12:44:30 +05301614 // Current frame is coded as a key-frame for any of the following cases:
1615 // 1) First frame of a video
1616 // 2) For all-intra frame encoding
1617 // 3) When a key-frame is forced
1618 const int kf_requested =
1619 (cm->current_frame.frame_number == 0 ||
1620 oxcf->kf_cfg.key_freq_max == 0 || (*frame_flags & FRAMEFLAGS_KEY));
David Turner4f1f1812019-01-24 17:00:24 +00001621 if (kf_requested && frame_update_type != OVERLAY_UPDATE &&
1622 frame_update_type != INTNL_OVERLAY_UPDATE) {
David Turnerddbff442019-01-21 14:58:42 +00001623 frame_params.frame_type = KEY_FRAME;
1624 } else {
1625 frame_params.frame_type = INTER_FRAME;
David Turnercb5e36f2019-01-17 17:15:25 +00001626 }
Hamsalekha S37cc1d12019-12-12 19:27:41 +05301627 } else if (is_stat_consumption_stage(cpi)) {
David Turnerddbff442019-01-21 14:58:42 +00001628#if CONFIG_MISMATCH_DEBUG
1629 mismatch_move_frame_idx_w();
1630#endif
1631#if TXCOEFF_COST_TIMER
1632 cm->txcoeff_cost_timer = 0;
1633 cm->txcoeff_cost_count = 0;
1634#endif
1635 }
1636
Vishesha195ca32020-04-07 18:46:20 +05301637 if (!is_stat_generation_stage(cpi))
1638 set_ext_overrides(cm, &frame_params, ext_flags);
David Turnerddbff442019-01-21 14:58:42 +00001639
David Turner4f1f1812019-01-24 17:00:24 +00001640 // Shown keyframes and S frames refresh all reference buffers
1641 const int force_refresh_all =
1642 ((frame_params.frame_type == KEY_FRAME && frame_params.show_frame) ||
1643 frame_params.frame_type == S_FRAME) &&
David Turnere86ee0d2019-02-18 17:16:28 +00001644 !frame_params.show_existing_frame;
David Turner4f1f1812019-01-24 17:00:24 +00001645
Jingning Han880bb352021-06-18 11:57:26 -07001646 av1_configure_buffer_updates(
1647 cpi, &frame_params.refresh_frame, frame_update_type,
1648 gf_group->refbuf_state[cpi->gf_frame_index], force_refresh_all);
David Turner4f1f1812019-01-24 17:00:24 +00001649
Mufaddal Chakerae7326122019-12-04 14:49:09 +05301650 if (!is_stat_generation_stage(cpi)) {
Deepa K G140bc832019-10-30 17:16:29 +05301651 const RefCntBuffer *ref_frames[INTER_REFS_PER_FRAME];
1652 const YV12_BUFFER_CONFIG *ref_frame_buf[INTER_REFS_PER_FRAME];
1653
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301654#if CONFIG_FRAME_PARALLEL_ENCODE
1655 RefFrameMapPair ref_frame_map_pairs[REF_FRAMES];
1656 init_ref_map_pair(cpi, ref_frame_map_pairs);
1657 const int order_offset = gf_group->arf_src_offset[cpi->gf_frame_index];
1658 const int cur_frame_disp =
1659 cpi->common.current_frame.frame_number + order_offset;
1660#endif // CONFIG_FRAME_PARALLEL_ENCODE
1661
Vishesh38c05d72020-04-14 12:19:14 +05301662 if (!ext_flags->refresh_frame.update_pending) {
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301663 av1_get_ref_frames(&cpi->ref_buffer_stack,
1664#if CONFIG_FRAME_PARALLEL_ENCODE
1665 cpi, ref_frame_map_pairs, cur_frame_disp,
Remya Prakasanfa7262a2021-05-29 16:00:30 +05301666#if CONFIG_FRAME_PARALLEL_ENCODE_2
1667 cpi->gf_frame_index,
1668#endif // CONFIG_FRAME_PARALLEL_ENCODE_2
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301669#endif // CONFIG_FRAME_PARALLEL_ENCODE
1670 cm->remapped_ref_idx);
Marco Paniconie5de3322021-03-22 22:03:17 -07001671 } else if (cpi->svc.set_ref_frame_config) {
Marco Paniconi67142112019-07-24 15:00:31 -07001672 for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++)
1673 cm->remapped_ref_idx[i] = cpi->svc.ref_idx[i];
1674 }
Jingning Han0a2af4e2019-07-08 19:30:03 -07001675
Deepa K G140bc832019-10-30 17:16:29 +05301676 // Get the reference frames
1677 for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1678 ref_frames[i] = get_ref_frame_buf(cm, ref_frame_priority_order[i]);
1679 ref_frame_buf[i] = ref_frames[i] != NULL ? &ref_frames[i]->buf : NULL;
1680 }
bohanli99852502020-07-14 16:22:45 -07001681
David Turnerddbff442019-01-21 14:58:42 +00001682 // Work out which reference frame slots may be used.
Jingning Hanb16c3da2020-09-22 22:37:02 -07001683 frame_params.ref_frame_flags = get_ref_frame_flags(
1684 &cpi->sf, ref_frame_buf, ext_flags->ref_frame_flags);
David Turnerddbff442019-01-21 14:58:42 +00001685
Remya Prakasana68eaef2021-05-19 19:15:52 +05301686#if CONFIG_FRAME_PARALLEL_ENCODE
1687 // Set primary_ref_frame of non-reference frames as PRIMARY_REF_NONE.
1688 if (cpi->ppi->gf_group.is_frame_non_ref[cpi->gf_frame_index]) {
1689 frame_params.primary_ref_frame = PRIMARY_REF_NONE;
1690 } else {
1691 frame_params.primary_ref_frame =
1692 choose_primary_ref_frame(cpi, &frame_params);
1693 }
1694#else
David Turnera7f133c2019-01-22 14:47:16 +00001695 frame_params.primary_ref_frame =
1696 choose_primary_ref_frame(cpi, &frame_params);
Remya Prakasana68eaef2021-05-19 19:15:52 +05301697#endif // CONFIG_FRAME_PARALLEL_ENCODE
1698
Mufaddal Chakeraab20d372021-03-17 12:18:34 +05301699 frame_params.order_offset = gf_group->arf_src_offset[cpi->gf_frame_index];
David Turner6e8b4d92019-02-18 15:01:33 +00001700
Jingning Han11eaaa92021-06-18 01:13:48 -07001701 frame_params.refresh_frame_flags = av1_get_refresh_frame_flags(
1702 cpi, &frame_params, frame_update_type, cpi->gf_frame_index,
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301703#if CONFIG_FRAME_PARALLEL_ENCODE
Jingning Han11eaaa92021-06-18 01:13:48 -07001704 cur_frame_disp, ref_frame_map_pairs,
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301705#endif // CONFIG_FRAME_PARALLEL_ENCODE
Jingning Han11eaaa92021-06-18 01:13:48 -07001706 &cpi->ref_buffer_stack);
Remya Prakasana68eaef2021-05-19 19:15:52 +05301707
1708#if CONFIG_FRAME_PARALLEL_ENCODE
1709 // Make the frames marked as is_frame_non_ref to non-reference frames.
1710 if (gf_group->is_frame_non_ref[cpi->gf_frame_index])
1711 frame_params.refresh_frame_flags = 0;
1712#endif // CONFIG_FRAME_PARALLEL_ENCODE
1713
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301714#if CONFIG_FRAME_PARALLEL_ENCODE
1715 frame_params.existing_fb_idx_to_show = INVALID_IDX;
1716 // Find the frame buffer to show based on display order.
1717 if (frame_params.show_existing_frame) {
1718 for (int frame = 0; frame < REF_FRAMES; frame++) {
1719 const RefCntBuffer *const buf = cm->ref_frame_map[frame];
1720 if (buf == NULL) continue;
1721 const int frame_order = (int)buf->display_order_hint;
1722 if (frame_order == cur_frame_disp)
1723 frame_params.existing_fb_idx_to_show = frame;
1724 }
1725 }
1726#else
Jingning Hana7293932019-08-15 15:46:49 -07001727 frame_params.existing_fb_idx_to_show =
1728 frame_params.show_existing_frame
Debargha Mukherjeea2074dd2019-09-04 10:03:44 -07001729 ? (frame_update_type == INTNL_OVERLAY_UPDATE
1730 ? get_ref_frame_map_idx(cm, BWDREF_FRAME)
1731 : get_ref_frame_map_idx(cm, ALTREF_FRAME))
Jingning Hana7293932019-08-15 15:46:49 -07001732 : INVALID_IDX;
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301733#endif // CONFIG_FRAME_PARALLEL_ENCODE
David Turnera7f133c2019-01-22 14:47:16 +00001734 }
1735
David Turner73245762019-02-11 16:42:34 +00001736 // The way frame_params->remapped_ref_idx is setup is a placeholder.
David Turnerf4592292019-02-21 11:50:30 +00001737 // Currently, reference buffer assignment is done by update_ref_frame_map()
bohanli99852502020-07-14 16:22:45 -07001738 // which is called by high-level strategy AFTER encoding a frame. It
1739 // modifies cm->remapped_ref_idx. If you want to use an alternative method
1740 // to determine reference buffer assignment, just put your assignments into
David Turner73245762019-02-11 16:42:34 +00001741 // frame_params->remapped_ref_idx here and they will be used when encoding
1742 // this frame. If frame_params->remapped_ref_idx is setup independently of
David Turnerf4592292019-02-21 11:50:30 +00001743 // cm->remapped_ref_idx then update_ref_frame_map() will have no effect.
David Turner73245762019-02-11 16:42:34 +00001744 memcpy(frame_params.remapped_ref_idx, cm->remapped_ref_idx,
1745 REF_FRAMES * sizeof(*cm->remapped_ref_idx));
1746
Urvang Joshi69a986c2020-04-27 16:13:11 -07001747 cpi->td.mb.delta_qindex = 0;
Yue Chen4e585cc2019-06-03 14:47:16 -07001748
1749 if (!frame_params.show_existing_frame) {
Vishesh734eff92020-06-20 21:46:36 +05301750 cm->quant_params.using_qmatrix = oxcf->q_cfg.using_qm;
Yue Chen4e585cc2019-06-03 14:47:16 -07001751 }
Marco Paniconi34b0dd02020-07-29 16:41:41 -07001752
Aasaipriya Chandranad281242021-05-25 20:25:39 +05301753#if CONFIG_FRAME_PARALLEL_ENCODE
1754 // Copy previous frame's largest MV component from ppi to cpi.
1755 if (!is_stat_generation_stage(cpi) && cpi->do_frame_data_update)
1756 cpi->mv_search_params.max_mv_magnitude = cpi->ppi->max_mv_magnitude;
1757#endif // CONFIG_FRAME_PARALLEL_ENCODE
1758
Cheng Chen0fcf6f82019-10-11 11:41:19 -07001759#if CONFIG_REALTIME_ONLY
David Turnerdedd8ff2019-01-23 13:59:46 +00001760 if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) !=
David Turnerddbff442019-01-21 14:58:42 +00001761 AOM_CODEC_OK) {
1762 return AOM_CODEC_ERROR;
1763 }
Cheng Chen0fcf6f82019-10-11 11:41:19 -07001764#else
Marco Paniconieca11952020-05-06 11:30:48 -07001765 if (has_no_stats_stage(cpi) && oxcf->mode == REALTIME &&
Vishesh5b50e6d2020-06-10 19:20:07 +05301766 gf_cfg->lag_in_frames == 0) {
Marco Paniconieca11952020-05-06 11:30:48 -07001767 if (av1_encode(cpi, dest, &frame_input, &frame_params, &frame_results) !=
1768 AOM_CODEC_OK) {
1769 return AOM_CODEC_ERROR;
1770 }
1771 } else if (denoise_and_encode(cpi, dest, &frame_input, &frame_params,
1772 &frame_results) != AOM_CODEC_OK) {
Cheng Chen0fcf6f82019-10-11 11:41:19 -07001773 return AOM_CODEC_ERROR;
1774 }
1775#endif // CONFIG_REALTIME_ONLY
David Turnerddbff442019-01-21 14:58:42 +00001776
Aasaipriya Chandranad281242021-05-25 20:25:39 +05301777#if CONFIG_FRAME_PARALLEL_ENCODE
1778 // Store current frame's largest MV component in ppi.
1779 if (!is_stat_generation_stage(cpi) && cpi->do_frame_data_update)
1780 cpi->ppi->max_mv_magnitude = cpi->mv_search_params.max_mv_magnitude;
1781#endif
1782
Mufaddal Chakerae7326122019-12-04 14:49:09 +05301783 if (!is_stat_generation_stage(cpi)) {
David Turner73245762019-02-11 16:42:34 +00001784 // First pass doesn't modify reference buffer assignment or produce frame
1785 // flags
Jayasanker J24cb9bc2020-04-15 13:43:10 +05301786 update_frame_flags(&cpi->common, &cpi->refresh_frame, frame_flags);
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301787 set_additional_frame_flags(cm, frame_flags);
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301788#if !CONFIG_FRAME_PARALLEL_ENCODE
Vishesh38c05d72020-04-14 12:19:14 +05301789 if (!ext_flags->refresh_frame.update_pending) {
Marco Paniconi8d335b72019-08-06 09:07:07 -07001790 int ref_map_index =
1791 av1_get_refresh_ref_frame_map(cm->current_frame.refresh_frame_flags);
Jingning Han17701362021-06-19 00:10:42 -07001792 av1_update_ref_frame_map(cpi, frame_update_type,
1793 gf_group->refbuf_state[cpi->gf_frame_index],
1794 ref_map_index, &cpi->ref_buffer_stack);
Marco Paniconi8d335b72019-08-06 09:07:07 -07001795 }
Remya Prakasan5ba0bfe2021-01-28 15:19:19 +05301796#endif // CONFIG_FRAME_PARALLEL_ENCODE
David Turner73245762019-02-11 16:42:34 +00001797 }
1798
Jerome Jiang2612b4d2019-05-29 17:46:47 -07001799#if !CONFIG_REALTIME_ONLY
David Turnerddbff442019-01-21 14:58:42 +00001800#if TXCOEFF_COST_TIMER
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301801 if (!is_stat_generation_stage(cpi)) {
David Turnerddbff442019-01-21 14:58:42 +00001802 cm->cum_txcoeff_cost_timer += cm->txcoeff_cost_timer;
1803 fprintf(stderr,
1804 "\ntxb coeff cost block number: %ld, frame time: %ld, cum time %ld "
1805 "in us\n",
1806 cm->txcoeff_cost_count, cm->txcoeff_cost_timer,
1807 cm->cum_txcoeff_cost_timer);
David Turnerddbff442019-01-21 14:58:42 +00001808 }
Tarundeep Singh27b80f02021-05-31 19:27:59 +05301809#endif
Jerome Jiang2612b4d2019-05-29 17:46:47 -07001810#endif // !CONFIG_REALTIME_ONLY
David Turnerddbff442019-01-21 14:58:42 +00001811
Sai Dengaff27722020-08-31 12:06:09 -07001812#if CONFIG_TUNE_VMAF
1813 if (!is_stat_generation_stage(cpi) &&
1814 (oxcf->tune_cfg.tuning >= AOM_TUNE_VMAF_WITH_PREPROCESSING &&
1815 oxcf->tune_cfg.tuning <= AOM_TUNE_VMAF_NEG_MAX_GAIN)) {
Sai Deng282fad42020-09-17 15:24:29 -07001816 av1_update_vmaf_curve(cpi);
Sai Dengaff27722020-08-31 12:06:09 -07001817 }
1818#endif
David Turner056f7cd2019-01-07 17:48:13 +00001819
David Turner1539bb02019-01-24 15:28:13 +00001820 // Unpack frame_results:
David Turner056f7cd2019-01-07 17:48:13 +00001821 *size = frame_results.size;
1822
David Turner1539bb02019-01-24 15:28:13 +00001823 // Leave a signal for a higher level caller about if this frame is droppable
1824 if (*size > 0) {
Vishesh38c05d72020-04-14 12:19:14 +05301825 cpi->droppable = is_frame_droppable(&cpi->svc, &ext_flags->refresh_frame);
David Turner1539bb02019-01-24 15:28:13 +00001826 }
1827
David Turner056f7cd2019-01-07 17:48:13 +00001828 return AOM_CODEC_OK;
1829}