blob: b8adb49fc9d536500af429b07cff5b518c755660 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Krishna Rapaka7319db52021-09-28 20:35:29 -07002 * Copyright (c) 2021, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Vibhoothi41c6dd72021-10-12 18:48:26 +00004 * This source code is subject to the terms of the BSD 3-Clause Clear License
5 * and the Alliance for Open Media Patent License 1.0. If the BSD 3-Clause Clear
6 * License was not distributed with this source code in the LICENSE file, you
7 * can obtain it at aomedia.org/license/software-license/bsd-3-c-c/. If the
8 * Alliance for Open Media Patent License 1.0 was not distributed with this
9 * source code in the PATENTS file, you can obtain it at
10 * aomedia.org/license/patent-license/.
Yaowu Xuc27fc142016-08-22 16:08:15 -070011 */
12
13#include <limits.h>
14
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070016
17#include "av1/common/pred_common.h"
18#include "av1/common/tile_common.h"
19
20#include "av1/encoder/cost.h"
21#include "av1/encoder/segmentation.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070022
Yaowu Xuf883b422016-08-30 14:01:10 -070023void av1_enable_segmentation(struct segmentation *seg) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070024 seg->enabled = 1;
25 seg->update_map = 1;
26 seg->update_data = 1;
Yushin Chob42e98d2018-01-25 12:06:05 -080027 seg->temporal_update = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -070028}
29
Yaowu Xuf883b422016-08-30 14:01:10 -070030void av1_disable_segmentation(struct segmentation *seg) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070031 seg->enabled = 0;
32 seg->update_map = 0;
33 seg->update_data = 0;
Yushin Chob42e98d2018-01-25 12:06:05 -080034 seg->temporal_update = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -070035}
36
Yaowu Xuf883b422016-08-30 14:01:10 -070037void av1_disable_segfeature(struct segmentation *seg, int segment_id,
38 SEG_LVL_FEATURES feature_id) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070039 seg->feature_mask[segment_id] &= ~(1 << feature_id);
40}
41
Yaowu Xuf883b422016-08-30 14:01:10 -070042void av1_clear_segdata(struct segmentation *seg, int segment_id,
43 SEG_LVL_FEATURES feature_id) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070044 seg->feature_data[segment_id][feature_id] = 0;
45}
46
Yaowu Xuf883b422016-08-30 14:01:10 -070047static void count_segs(const AV1_COMMON *cm, MACROBLOCKD *xd,
Yue Chen53b53f02018-03-29 14:31:23 -070048 const TileInfo *tile, MB_MODE_INFO **mi,
Yaowu Xuc27fc142016-08-22 16:08:15 -070049 unsigned *no_pred_segcounts,
50 unsigned (*temporal_predictor_count)[2],
51 unsigned *t_unpred_seg_counts, int bw, int bh,
52 int mi_row, int mi_col) {
Urvang Joshi9dc909d2020-03-23 16:07:02 -070053 const CommonModeInfoParams *const mi_params = &cm->mi_params;
54 if (mi_row >= mi_params->mi_rows || mi_col >= mi_params->mi_cols) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -070055
56 xd->mi = mi;
Chi Yo Tsaie05bc1d2023-01-12 20:23:30 +000057 assert(xd->mi && xd->mi[0]);
Urvang Joshi9dc909d2020-03-23 16:07:02 -070058 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, mi_params->mi_rows,
Chi Yo Tsaie05bc1d2023-01-12 20:23:30 +000059 mi_params->mi_cols, &xd->mi[0]->chroma_ref_info);
Yaowu Xuc27fc142016-08-22 16:08:15 -070060
61 // Count the number of hits on each segment with no prediction
Urvang Joshi9dc909d2020-03-23 16:07:02 -070062 const int segment_id = xd->mi[0]->segment_id;
Yaowu Xuc27fc142016-08-22 16:08:15 -070063 no_pred_segcounts[segment_id]++;
64
65 // Temporal prediction not allowed on key frames
David Turnerd2a592e2018-11-16 14:59:31 +000066 if (cm->current_frame.frame_type != KEY_FRAME) {
liang zhaoc6f775a2020-12-17 11:54:58 -080067 const BLOCK_SIZE bsize = xd->mi[0]->sb_type[xd->tree_type == CHROMA_PART];
Yaowu Xuc27fc142016-08-22 16:08:15 -070068 // Test to see if the segment id matches the predicted value.
69 const int pred_segment_id =
Yue Chend90d3432018-03-16 11:28:42 -070070 cm->last_frame_seg_map
Urvang Joshi9dc909d2020-03-23 16:07:02 -070071 ? get_segment_id(mi_params, cm->last_frame_seg_map, bsize, mi_row,
72 mi_col)
Yue Chend90d3432018-03-16 11:28:42 -070073 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -070074 const int pred_flag = pred_segment_id == segment_id;
Yaowu Xuf883b422016-08-30 14:01:10 -070075 const int pred_context = av1_get_pred_context_seg_id(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -070076
77 // Store the prediction status for this mb and update counts
78 // as appropriate
Yue Chen53b53f02018-03-29 14:31:23 -070079 xd->mi[0]->seg_id_predicted = pred_flag;
Yaowu Xuc27fc142016-08-22 16:08:15 -070080 temporal_predictor_count[pred_context][pred_flag]++;
81
82 // Update the "unpredicted" segment count
83 if (!pred_flag) t_unpred_seg_counts[segment_id]++;
84 }
85}
86
Yaowu Xuf883b422016-08-30 14:01:10 -070087static void count_segs_sb(const AV1_COMMON *cm, MACROBLOCKD *xd,
Yue Chen53b53f02018-03-29 14:31:23 -070088 const TileInfo *tile, MB_MODE_INFO **mi,
Yaowu Xuc27fc142016-08-22 16:08:15 -070089 unsigned *no_pred_segcounts,
90 unsigned (*temporal_predictor_count)[2],
91 unsigned *t_unpred_seg_counts, int mi_row, int mi_col,
chiyotsai16eb6852023-03-14 23:24:25 -070092#if !CONFIG_EXT_RECUR_PARTITIONS
93 BLOCK_SIZE bsize,
94#endif // !CONFIG_EXT_RECUR_PARTITIONS
95 const PARTITION_TREE *ptree) {
Urvang Joshi9dc909d2020-03-23 16:07:02 -070096 const CommonModeInfoParams *const mi_params = &cm->mi_params;
97 const int mis = mi_params->mi_stride;
chiyotsai16eb6852023-03-14 23:24:25 -070098#if CONFIG_EXT_RECUR_PARTITIONS
99 BLOCK_SIZE bsize = ptree->bsize;
100 const int bw = mi_size_wide[bsize], bh = mi_size_high[bsize];
101 const int hbw = bw / 2, hbh = bh / 2;
102 const int qbw = bw / 4, qbh = bh / 4;
Urvang Joshie2d05662023-06-21 21:40:27 +0000103 const int ebw = bw / 8, ebh = bh / 8;
chiyotsai16eb6852023-03-14 23:24:25 -0700104#else
Jingning Hanc709e1f2016-12-06 14:48:09 -0800105 const int bs = mi_size_wide[bsize], hbs = bs / 2;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +0100106 const int qbs = bs / 4;
chiyotsai16eb6852023-03-14 23:24:25 -0700107#endif // CONFIG_EXT_RECUR_PARTITIONS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700108
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700109 if (mi_row >= mi_params->mi_rows || mi_col >= mi_params->mi_cols) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700110
Rupert Swarbrick114e81a2017-09-11 15:08:25 +0100111#define CSEGS(cs_bw, cs_bh, cs_rowoff, cs_coloff) \
112 count_segs(cm, xd, tile, mi + mis * (cs_rowoff) + (cs_coloff), \
113 no_pred_segcounts, temporal_predictor_count, t_unpred_seg_counts, \
114 (cs_bw), (cs_bh), mi_row + (cs_rowoff), mi_col + (cs_coloff));
chiyotsai16eb6852023-03-14 23:24:25 -0700115#if CONFIG_EXT_RECUR_PARTITIONS
116#define CSEGS_RECURSIVE(cs_rowoff, cs_coloff, subtree) \
117 count_segs_sb(cm, xd, tile, mi + mis * (cs_rowoff) + (cs_coloff), \
118 no_pred_segcounts, temporal_predictor_count, \
119 t_unpred_seg_counts, mi_row + (cs_rowoff), \
120 mi_col + (cs_coloff), subtree);
Rupert Swarbrick114e81a2017-09-11 15:08:25 +0100121
chiyotsai16eb6852023-03-14 23:24:25 -0700122 int tree_idx = 0;
123 const PARTITION_TYPE partition = ptree->partition;
124#else
125 PARTITION_TYPE partition;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700126 if (bsize == BLOCK_8X8)
127 partition = PARTITION_NONE;
128 else
leolzhao3db7cca2021-01-26 16:53:07 -0800129 partition =
130 get_partition(cm, xd->tree_type == CHROMA_PART, mi_row, mi_col, bsize);
chiyotsai16eb6852023-03-14 23:24:25 -0700131#endif // CONFIG_EXT_RECUR_PARTITIONS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700132 switch (partition) {
chiyotsai16eb6852023-03-14 23:24:25 -0700133#if CONFIG_EXT_RECUR_PARTITIONS
134 case PARTITION_NONE: CSEGS(bw, bh, 0, 0); break;
135 case PARTITION_HORZ:
136 CSEGS_RECURSIVE(0, 0, ptree->sub_tree[tree_idx++]);
137 CSEGS_RECURSIVE(hbh, 0, ptree->sub_tree[tree_idx++]);
138 break;
139 case PARTITION_VERT:
140 CSEGS_RECURSIVE(0, 0, ptree->sub_tree[tree_idx++]);
141 CSEGS_RECURSIVE(0, hbw, ptree->sub_tree[tree_idx++]);
142 break;
143 case PARTITION_HORZ_3:
144 CSEGS_RECURSIVE(0, 0, ptree->sub_tree[tree_idx++]);
145 CSEGS_RECURSIVE(qbh, 0, ptree->sub_tree[tree_idx++]);
chiyotsai16eb6852023-03-14 23:24:25 -0700146 CSEGS_RECURSIVE(qbh, hbw, ptree->sub_tree[tree_idx++]);
chiyotsai16eb6852023-03-14 23:24:25 -0700147 if (mi_row + 3 * qbh < mi_params->mi_rows)
148 CSEGS_RECURSIVE(3 * qbh, 0, ptree->sub_tree[tree_idx++]);
149 break;
150 case PARTITION_VERT_3:
151 CSEGS_RECURSIVE(0, 0, ptree->sub_tree[tree_idx++]);
152 CSEGS_RECURSIVE(0, qbw, ptree->sub_tree[tree_idx++]);
chiyotsai16eb6852023-03-14 23:24:25 -0700153 CSEGS_RECURSIVE(hbh, qbw, ptree->sub_tree[tree_idx++]);
chiyotsai16eb6852023-03-14 23:24:25 -0700154 if (mi_col + 3 * qbw < mi_params->mi_cols)
155 CSEGS_RECURSIVE(0, 3 * qbw, ptree->sub_tree[tree_idx++]);
156 break;
Urvang Joshie2d05662023-06-21 21:40:27 +0000157 case PARTITION_HORZ_4A:
158 CSEGS_RECURSIVE(0, 0, ptree->sub_tree[tree_idx++]);
159 if (mi_row + ebh < mi_params->mi_rows)
160 CSEGS_RECURSIVE(ebh, 0, ptree->sub_tree[tree_idx++]);
161 if (mi_row + 3 * ebh < mi_params->mi_rows)
162 CSEGS_RECURSIVE(3 * ebh, 0, ptree->sub_tree[tree_idx++]);
163 if (mi_row + 7 * ebh < mi_params->mi_rows)
164 CSEGS_RECURSIVE(7 * ebh, 0, ptree->sub_tree[tree_idx++]);
165 break;
166 case PARTITION_HORZ_4B:
167 CSEGS_RECURSIVE(0, 0, ptree->sub_tree[tree_idx++]);
168 if (mi_row + ebh < mi_params->mi_rows)
169 CSEGS_RECURSIVE(ebh, 0, ptree->sub_tree[tree_idx++]);
170 if (mi_row + 5 * ebh < mi_params->mi_rows)
171 CSEGS_RECURSIVE(5 * ebh, 0, ptree->sub_tree[tree_idx++]);
172 if (mi_row + 7 * ebh < mi_params->mi_rows)
173 CSEGS_RECURSIVE(7 * ebh, 0, ptree->sub_tree[tree_idx++]);
174 break;
175 case PARTITION_VERT_4A:
176 CSEGS_RECURSIVE(0, 0, ptree->sub_tree[tree_idx++]);
177 if (mi_col + ebw < mi_params->mi_cols)
178 CSEGS_RECURSIVE(0, ebw, ptree->sub_tree[tree_idx++]);
179 if (mi_col + 3 * ebw < mi_params->mi_cols)
180 CSEGS_RECURSIVE(0, 3 * ebw, ptree->sub_tree[tree_idx++]);
181 if (mi_col + 7 * ebw < mi_params->mi_cols)
182 CSEGS_RECURSIVE(0, 7 * ebw, ptree->sub_tree[tree_idx++]);
183 break;
184 case PARTITION_VERT_4B:
185 CSEGS_RECURSIVE(0, 0, ptree->sub_tree[tree_idx++]);
186 if (mi_col + ebw < mi_params->mi_cols)
187 CSEGS_RECURSIVE(0, ebw, ptree->sub_tree[tree_idx++]);
188 if (mi_col + 5 * ebw < mi_params->mi_cols)
189 CSEGS_RECURSIVE(0, 5 * ebw, ptree->sub_tree[tree_idx++]);
190 if (mi_col + 7 * ebw < mi_params->mi_cols)
191 CSEGS_RECURSIVE(0, 7 * ebw, ptree->sub_tree[tree_idx++]);
192 break;
chiyotsai16eb6852023-03-14 23:24:25 -0700193#else // CONFIG_EXT_RECUR_PARTITIONS
Rupert Swarbrick114e81a2017-09-11 15:08:25 +0100194 case PARTITION_NONE: CSEGS(bs, bs, 0, 0); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700195 case PARTITION_HORZ:
Rupert Swarbrick114e81a2017-09-11 15:08:25 +0100196 CSEGS(bs, hbs, 0, 0);
197 CSEGS(bs, hbs, hbs, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700198 break;
199 case PARTITION_VERT:
Rupert Swarbrick114e81a2017-09-11 15:08:25 +0100200 CSEGS(hbs, bs, 0, 0);
201 CSEGS(hbs, bs, 0, hbs);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700202 break;
203 case PARTITION_HORZ_A:
Rupert Swarbrick114e81a2017-09-11 15:08:25 +0100204 CSEGS(hbs, hbs, 0, 0);
205 CSEGS(hbs, hbs, 0, hbs);
206 CSEGS(bs, hbs, hbs, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207 break;
208 case PARTITION_HORZ_B:
Rupert Swarbrick114e81a2017-09-11 15:08:25 +0100209 CSEGS(bs, hbs, 0, 0);
210 CSEGS(hbs, hbs, hbs, 0);
211 CSEGS(hbs, hbs, hbs, hbs);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700212 break;
213 case PARTITION_VERT_A:
Rupert Swarbrick114e81a2017-09-11 15:08:25 +0100214 CSEGS(hbs, hbs, 0, 0);
215 CSEGS(hbs, hbs, hbs, 0);
216 CSEGS(hbs, bs, 0, hbs);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700217 break;
218 case PARTITION_VERT_B:
Rupert Swarbrick114e81a2017-09-11 15:08:25 +0100219 CSEGS(hbs, bs, 0, 0);
220 CSEGS(hbs, hbs, 0, hbs);
221 CSEGS(hbs, hbs, hbs, hbs);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700222 break;
Rupert Swarbrick27256932017-10-11 15:54:36 +0100223 case PARTITION_HORZ_4:
224 CSEGS(bs, qbs, 0, 0);
225 CSEGS(bs, qbs, qbs, 0);
226 CSEGS(bs, qbs, 2 * qbs, 0);
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700227 if (mi_row + 3 * qbs < mi_params->mi_rows) CSEGS(bs, qbs, 3 * qbs, 0);
Rupert Swarbrick27256932017-10-11 15:54:36 +0100228 break;
Rupert Swarbrick27256932017-10-11 15:54:36 +0100229 case PARTITION_VERT_4:
230 CSEGS(qbs, bs, 0, 0);
231 CSEGS(qbs, bs, 0, qbs);
232 CSEGS(qbs, bs, 0, 2 * qbs);
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700233 if (mi_col + 3 * qbs < mi_params->mi_cols) CSEGS(qbs, bs, 0, 3 * qbs);
Rupert Swarbrick27256932017-10-11 15:54:36 +0100234 break;
Chi Yo Tsaie05bc1d2023-01-12 20:23:30 +0000235#endif // CONFIG_EXT_RECUR_PARTITIONS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700236 case PARTITION_SPLIT: {
chiyotsai16eb6852023-03-14 23:24:25 -0700237#if !CONFIG_EXT_RECUR_PARTITIONS
Cheng Chen82b4fa12018-05-02 18:43:17 -0700238 const BLOCK_SIZE subsize = get_partition_subsize(bsize, PARTITION_SPLIT);
kyslov5859dca2019-04-08 12:13:11 -0700239 assert(subsize < BLOCK_SIZES_ALL);
chiyotsai16eb6852023-03-14 23:24:25 -0700240#endif // !CONFIG_EXT_RECUR_PARTITIONS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700241
chiyotsai16eb6852023-03-14 23:24:25 -0700242 for (int n = 0; n < 4; n++) {
243#if CONFIG_EXT_RECUR_PARTITIONS
244 const int mi_dc = hbw * (n & 1);
245 const int mi_dr = hbh * (n >> 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700246 count_segs_sb(cm, xd, tile, &mi[mi_dr * mis + mi_dc], no_pred_segcounts,
247 temporal_predictor_count, t_unpred_seg_counts,
chiyotsai16eb6852023-03-14 23:24:25 -0700248 mi_row + mi_dr, mi_col + mi_dc, ptree->sub_tree[n]);
249#else
250 const int mi_dc = hbs * (n & 1);
251 const int mi_dr = hbs * (n >> 1);
252 count_segs_sb(cm, xd, tile, &mi[mi_dr * mis + mi_dc], no_pred_segcounts,
253 temporal_predictor_count, t_unpred_seg_counts,
254 mi_row + mi_dr, mi_col + mi_dc, subsize,
255 ptree->sub_tree[n]);
256#endif // CONFIG_EXT_RECUR_PARTITIONS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700257 }
258 } break;
259 default: assert(0);
260 }
Rupert Swarbrick114e81a2017-09-11 15:08:25 +0100261
262#undef CSEGS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700263}
264
Jerome Jiangf966eb12019-10-17 14:58:50 -0700265void av1_choose_segmap_coding_method(AV1_COMMON *cm, MACROBLOCKD *xd) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700266 struct segmentation *seg = &cm->seg;
267 struct segmentation_probs *segp = &cm->fc->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700268 int no_pred_cost;
269 int t_pred_cost = INT_MAX;
Thomas Davies00021352017-07-11 16:07:55 +0100270 int tile_col, tile_row, mi_row, mi_col;
Hui Su52b1ba22017-12-27 14:25:25 -0800271 unsigned temporal_predictor_count[SEG_TEMPORAL_PRED_CTXS][2] = { { 0 } };
272 unsigned no_pred_segcounts[MAX_SEGMENTS] = { 0 };
273 unsigned t_unpred_seg_counts[MAX_SEGMENTS] = { 0 };
Yaowu Xuc27fc142016-08-22 16:08:15 -0700274 (void)xd;
Jerome Jiangd4e4e582019-05-14 10:29:28 -0700275 int scale_up = cm->prev_frame && (cm->width > cm->prev_frame->width ||
276 cm->height > cm->prev_frame->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700277 // First of all generate stats regarding how well the last segment map
278 // predicts this one
Jerome Jiangd4e4e582019-05-14 10:29:28 -0700279 if (!scale_up) {
Urvang Joshi54ffae72020-03-23 13:37:10 -0700280 for (tile_row = 0; tile_row < cm->tiles.rows; tile_row++) {
Jerome Jiangd4e4e582019-05-14 10:29:28 -0700281 TileInfo tile_info;
282 av1_tile_set_row(&tile_info, cm, tile_row);
Urvang Joshi54ffae72020-03-23 13:37:10 -0700283 for (tile_col = 0; tile_col < cm->tiles.cols; tile_col++) {
Jerome Jiangd4e4e582019-05-14 10:29:28 -0700284 MB_MODE_INFO **mi_ptr;
285 av1_tile_set_col(&tile_info, cm, tile_col);
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700286 mi_ptr = cm->mi_params.mi_grid_base +
287 tile_info.mi_row_start * cm->mi_params.mi_stride +
Jerome Jiangd4e4e582019-05-14 10:29:28 -0700288 tile_info.mi_col_start;
289 for (mi_row = tile_info.mi_row_start; mi_row < tile_info.mi_row_end;
Chi Yo Tsaiac6a5c82023-10-16 21:56:18 +0000290 mi_row += cm->mib_size,
291 mi_ptr += cm->mib_size * cm->mi_params.mi_stride) {
Jerome Jiangd4e4e582019-05-14 10:29:28 -0700292 MB_MODE_INFO **mi = mi_ptr;
293 for (mi_col = tile_info.mi_col_start; mi_col < tile_info.mi_col_end;
Chi Yo Tsaiac6a5c82023-10-16 21:56:18 +0000294 mi_col += cm->mib_size, mi += cm->mib_size) {
chiyotsai16eb6852023-03-14 23:24:25 -0700295 const SB_INFO *sbi = av1_get_sb_info(cm, mi_row, mi_col);
296 const PARTITION_TREE *ptree = sbi->ptree_root[AOM_PLANE_Y];
Jerome Jiangd4e4e582019-05-14 10:29:28 -0700297 count_segs_sb(cm, xd, &tile_info, mi, no_pred_segcounts,
298 temporal_predictor_count, t_unpred_seg_counts, mi_row,
chiyotsai16eb6852023-03-14 23:24:25 -0700299 mi_col,
300#if !CONFIG_EXT_RECUR_PARTITIONS
Chi Yo Tsaiac6a5c82023-10-16 21:56:18 +0000301 cm->sb_size,
chiyotsai16eb6852023-03-14 23:24:25 -0700302#endif // !CONFIG_EXT_RECUR_PARTITIONS
303 ptree);
Jerome Jiangd4e4e582019-05-14 10:29:28 -0700304 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700305 }
306 }
307 }
308 }
309
Hui Su52b1ba22017-12-27 14:25:25 -0800310 int seg_id_cost[MAX_SEGMENTS];
311 av1_cost_tokens_from_cdf(seg_id_cost, segp->tree_cdf, NULL);
312 no_pred_cost = 0;
313 for (int i = 0; i < MAX_SEGMENTS; ++i)
314 no_pred_cost += no_pred_segcounts[i] * seg_id_cost[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700315
David Barkercc615a82018-03-19 14:38:51 +0000316 // Frames without past dependency cannot use temporal prediction
Urvang Joshi6237b882020-03-26 15:02:26 -0700317 if (cm->features.primary_ref_frame != PRIMARY_REF_NONE) {
Hui Su52b1ba22017-12-27 14:25:25 -0800318 int pred_flag_cost[SEG_TEMPORAL_PRED_CTXS][2];
319 for (int i = 0; i < SEG_TEMPORAL_PRED_CTXS; ++i)
320 av1_cost_tokens_from_cdf(pred_flag_cost[i], segp->pred_cdf[i], NULL);
321 t_pred_cost = 0;
322 // Cost for signaling the prediction flag.
323 for (int i = 0; i < SEG_TEMPORAL_PRED_CTXS; ++i) {
324 for (int j = 0; j < 2; ++j)
325 t_pred_cost += temporal_predictor_count[i][j] * pred_flag_cost[i][j];
326 }
327 // Cost for signaling the unpredicted segment id.
328 for (int i = 0; i < MAX_SEGMENTS; ++i)
329 t_pred_cost += t_unpred_seg_counts[i] * seg_id_cost[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700330 }
331
332 // Now choose which coding method to use.
333 if (t_pred_cost < no_pred_cost) {
Urvang Joshib6409e92020-03-23 11:23:27 -0700334 assert(!cm->features.error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700335 seg->temporal_update = 1;
336 } else {
337 seg->temporal_update = 0;
338 }
339}
340
Yaowu Xuf883b422016-08-30 14:01:10 -0700341void av1_reset_segment_features(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700342 struct segmentation *seg = &cm->seg;
343
344 // Set up default state for MB feature flags
345 seg->enabled = 0;
346 seg->update_map = 0;
347 seg->update_data = 0;
Yaowu Xuf883b422016-08-30 14:01:10 -0700348 av1_clearall_segfeatures(seg);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700349}