blob: 19c78291c0c97a1bb90684fff5ea8523c768c6d6 [file] [log] [blame]
Jayasanker Je9ad4752020-06-30 19:30:03 +05301/*
2 * Copyright (c) 2020, Alliance for Open Media. All rights reserved
3 *
4 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 */
11
12#include "aom_ports/system_state.h"
13
14#include "av1/common/reconintra.h"
15
16#include "av1/encoder/encoder.h"
17#include "av1/encoder/encodeframe_utils.h"
18#include "av1/encoder/partition_strategy.h"
19#include "av1/encoder/rdopt.h"
20
21static AOM_INLINE int set_deltaq_rdmult(const AV1_COMP *const cpi,
22 const MACROBLOCK *const x) {
23 const AV1_COMMON *const cm = &cpi->common;
24 const CommonQuantParams *quant_params = &cm->quant_params;
25 return av1_compute_rd_mult(cpi, quant_params->base_qindex + x->delta_qindex +
26 quant_params->y_dc_delta_q);
27}
28
29void av1_set_ssim_rdmult(const AV1_COMP *const cpi, MvCosts *const mv_costs,
30 const BLOCK_SIZE bsize, const int mi_row,
31 const int mi_col, int *const rdmult) {
32 const AV1_COMMON *const cm = &cpi->common;
33
34 const int bsize_base = BLOCK_16X16;
35 const int num_mi_w = mi_size_wide[bsize_base];
36 const int num_mi_h = mi_size_high[bsize_base];
37 const int num_cols = (cm->mi_params.mi_cols + num_mi_w - 1) / num_mi_w;
38 const int num_rows = (cm->mi_params.mi_rows + num_mi_h - 1) / num_mi_h;
39 const int num_bcols = (mi_size_wide[bsize] + num_mi_w - 1) / num_mi_w;
40 const int num_brows = (mi_size_high[bsize] + num_mi_h - 1) / num_mi_h;
41 int row, col;
42 double num_of_mi = 0.0;
43 double geom_mean_of_scale = 0.0;
44
Vishesh94a65292020-07-01 15:28:53 +053045 assert(cpi->oxcf.tune_cfg.tuning == AOM_TUNE_SSIM);
Jayasanker Je9ad4752020-06-30 19:30:03 +053046
47 aom_clear_system_state();
48 for (row = mi_row / num_mi_w;
49 row < num_rows && row < mi_row / num_mi_w + num_brows; ++row) {
50 for (col = mi_col / num_mi_h;
51 col < num_cols && col < mi_col / num_mi_h + num_bcols; ++col) {
52 const int index = row * num_cols + col;
53 geom_mean_of_scale += log(cpi->ssim_rdmult_scaling_factors[index]);
54 num_of_mi += 1.0;
55 }
56 }
57 geom_mean_of_scale = exp(geom_mean_of_scale / num_of_mi);
58
59 *rdmult = (int)((double)(*rdmult) * geom_mean_of_scale + 0.5);
60 *rdmult = AOMMAX(*rdmult, 0);
61 av1_set_error_per_bit(mv_costs, *rdmult);
62 aom_clear_system_state();
63}
64
Urvang Joshie198bf12020-10-08 15:37:55 -070065// Return the end column for the current superblock, in unit of TPL blocks.
66static int get_superblock_tpl_column_end(const AV1_COMMON *const cm, int mi_col,
67 int num_mi_w) {
68 // Find the start column of this superblock.
69 const int sb_mi_col_start = (mi_col >> cm->seq_params.mib_size_log2)
70 << cm->seq_params.mib_size_log2;
71 // Same but in superres upscaled dimension.
72 const int sb_mi_col_start_sr =
73 coded_to_superres_mi(sb_mi_col_start, cm->superres_scale_denominator);
74 // Width of this superblock in mi units.
75 const int sb_mi_width = mi_size_wide[cm->seq_params.sb_size];
76 // Same but in superres upscaled dimension.
77 const int sb_mi_width_sr =
78 coded_to_superres_mi(sb_mi_width, cm->superres_scale_denominator);
79 // Superblock end in mi units.
80 const int sb_mi_end = sb_mi_col_start_sr + sb_mi_width_sr;
81 // Superblock end in TPL units.
82 return (sb_mi_end + num_mi_w - 1) / num_mi_w;
83}
84
Jayasanker Je9ad4752020-06-30 19:30:03 +053085int av1_get_hier_tpl_rdmult(const AV1_COMP *const cpi, MACROBLOCK *const x,
86 const BLOCK_SIZE bsize, const int mi_row,
87 const int mi_col, int orig_rdmult) {
88 const AV1_COMMON *const cm = &cpi->common;
89 const GF_GROUP *const gf_group = &cpi->gf_group;
90 assert(IMPLIES(cpi->gf_group.size > 0,
91 cpi->gf_group.index < cpi->gf_group.size));
92 const int tpl_idx = cpi->gf_group.index;
93 const TplDepFrame *tpl_frame = &cpi->tpl_data.tpl_frame[tpl_idx];
94 const int deltaq_rdmult = set_deltaq_rdmult(cpi, x);
95 if (tpl_frame->is_valid == 0) return deltaq_rdmult;
Deepa K G21e5e8e2020-03-28 13:26:09 +053096 if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return deltaq_rdmult;
Jayasanker Je9ad4752020-06-30 19:30:03 +053097 if (tpl_idx >= MAX_TPL_FRAME_IDX) return deltaq_rdmult;
Jayasanker Je9ad4752020-06-30 19:30:03 +053098 if (cpi->oxcf.q_cfg.aq_mode != NO_AQ) return deltaq_rdmult;
99
Urvang Joshie198bf12020-10-08 15:37:55 -0700100 const int mi_col_sr =
101 coded_to_superres_mi(mi_col, cm->superres_scale_denominator);
102 const int mi_cols_sr = av1_pixels_to_mi(cm->superres_upscaled_width);
103 const int block_mi_width_sr =
104 coded_to_superres_mi(mi_size_wide[bsize], cm->superres_scale_denominator);
105
Jayasanker Je9ad4752020-06-30 19:30:03 +0530106 const int bsize_base = BLOCK_16X16;
107 const int num_mi_w = mi_size_wide[bsize_base];
108 const int num_mi_h = mi_size_high[bsize_base];
Urvang Joshie198bf12020-10-08 15:37:55 -0700109 const int num_cols = (mi_cols_sr + num_mi_w - 1) / num_mi_w;
Jayasanker Je9ad4752020-06-30 19:30:03 +0530110 const int num_rows = (cm->mi_params.mi_rows + num_mi_h - 1) / num_mi_h;
Urvang Joshie198bf12020-10-08 15:37:55 -0700111 const int num_bcols = (block_mi_width_sr + num_mi_w - 1) / num_mi_w;
Jayasanker Je9ad4752020-06-30 19:30:03 +0530112 const int num_brows = (mi_size_high[bsize] + num_mi_h - 1) / num_mi_h;
Urvang Joshie198bf12020-10-08 15:37:55 -0700113 // This is required because the end col of superblock may be off by 1 in case
114 // of superres.
115 const int sb_bcol_end = get_superblock_tpl_column_end(cm, mi_col, num_mi_w);
Jayasanker Je9ad4752020-06-30 19:30:03 +0530116 int row, col;
117 double base_block_count = 0.0;
118 double geom_mean_of_scale = 0.0;
119 aom_clear_system_state();
120 for (row = mi_row / num_mi_w;
121 row < num_rows && row < mi_row / num_mi_w + num_brows; ++row) {
Urvang Joshie198bf12020-10-08 15:37:55 -0700122 for (col = mi_col_sr / num_mi_h;
123 col < num_cols && col < mi_col_sr / num_mi_h + num_bcols &&
124 col < sb_bcol_end;
125 ++col) {
Jayasanker Je9ad4752020-06-30 19:30:03 +0530126 const int index = row * num_cols + col;
127 geom_mean_of_scale += log(cpi->tpl_sb_rdmult_scaling_factors[index]);
128 base_block_count += 1.0;
129 }
130 }
131 geom_mean_of_scale = exp(geom_mean_of_scale / base_block_count);
132 int rdmult = (int)((double)orig_rdmult * geom_mean_of_scale + 0.5);
133 rdmult = AOMMAX(rdmult, 0);
134 av1_set_error_per_bit(&x->mv_costs, rdmult);
135 aom_clear_system_state();
136 if (bsize == cm->seq_params.sb_size) {
137 const int rdmult_sb = set_deltaq_rdmult(cpi, x);
138 assert(rdmult_sb == rdmult);
139 (void)rdmult_sb;
140 }
141 return rdmult;
142}
143
144static AOM_INLINE void update_filter_type_count(FRAME_COUNTS *counts,
145 const MACROBLOCKD *xd,
146 const MB_MODE_INFO *mbmi) {
Hui Su93c395b2020-10-05 12:00:20 -0700147#if CONFIG_REMOVE_DUAL_FILTER
148 const int ctx = av1_get_pred_context_switchable_interp(xd, 0);
149 ++counts->switchable_interp[ctx][mbmi->interp_fltr];
150#else
151 for (int dir = 0; dir < 2; ++dir) {
Jayasanker Je9ad4752020-06-30 19:30:03 +0530152 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
153 InterpFilter filter = av1_extract_interp_filter(mbmi->interp_filters, dir);
154 ++counts->switchable_interp[ctx][filter];
155 }
Hui Su93c395b2020-10-05 12:00:20 -0700156#endif // CONFIG_REMOVE_DUAL_FILTER
Jayasanker Je9ad4752020-06-30 19:30:03 +0530157}
158
159static void reset_tx_size(MACROBLOCK *x, MB_MODE_INFO *mbmi,
160 const TX_MODE tx_mode) {
161 MACROBLOCKD *const xd = &x->e_mbd;
162 TxfmSearchInfo *txfm_info = &x->txfm_search_info;
liang zhaoc6f775a2020-12-17 11:54:58 -0800163#if CONFIG_SDP
164 int plane_index = xd->tree_type == CHROMA_PART;
165#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530166 if (xd->lossless[mbmi->segment_id]) {
167 mbmi->tx_size = TX_4X4;
168 } else if (tx_mode != TX_MODE_SELECT) {
liang zhaoc6f775a2020-12-17 11:54:58 -0800169#if CONFIG_SDP
170 mbmi->tx_size = tx_size_from_tx_mode(mbmi->sb_type[plane_index], tx_mode);
171#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530172 mbmi->tx_size = tx_size_from_tx_mode(mbmi->sb_type, tx_mode);
liang zhaoc6f775a2020-12-17 11:54:58 -0800173#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530174 } else {
liang zhaoc6f775a2020-12-17 11:54:58 -0800175#if CONFIG_SDP
176 BLOCK_SIZE bsize = mbmi->sb_type[plane_index];
177#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530178 BLOCK_SIZE bsize = mbmi->sb_type;
liang zhaoc6f775a2020-12-17 11:54:58 -0800179#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530180 TX_SIZE min_tx_size = depth_to_tx_size(MAX_TX_DEPTH, bsize);
181 mbmi->tx_size = (TX_SIZE)TXSIZEMAX(mbmi->tx_size, min_tx_size);
182 }
leolzhao3ab59842021-05-11 10:07:48 -0700183#if CONFIG_SDP
184 if (is_inter_block(mbmi, xd->tree_type)) {
185#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530186 if (is_inter_block(mbmi)) {
leolzhao3ab59842021-05-11 10:07:48 -0700187#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530188 memset(mbmi->inter_tx_size, mbmi->tx_size, sizeof(mbmi->inter_tx_size));
189 }
190 const int stride = xd->tx_type_map_stride;
liang zhaoc6f775a2020-12-17 11:54:58 -0800191#if CONFIG_SDP
192 const int bw = mi_size_wide[mbmi->sb_type[plane_index]];
193 for (int row = 0; row < mi_size_high[mbmi->sb_type[plane_index]]; ++row) {
194#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530195 const int bw = mi_size_wide[mbmi->sb_type];
196 for (int row = 0; row < mi_size_high[mbmi->sb_type]; ++row) {
liang zhaoc6f775a2020-12-17 11:54:58 -0800197#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530198 memset(xd->tx_type_map + row * stride, DCT_DCT,
199 bw * sizeof(xd->tx_type_map[0]));
200 }
201 av1_zero(txfm_info->blk_skip);
202 txfm_info->skip_txfm = 0;
203}
204
205// This function will copy the best reference mode information from
206// MB_MODE_INFO_EXT_FRAME to MB_MODE_INFO_EXT.
207static INLINE void copy_mbmi_ext_frame_to_mbmi_ext(
208 MB_MODE_INFO_EXT *mbmi_ext,
209 const MB_MODE_INFO_EXT_FRAME *const mbmi_ext_best, uint8_t ref_frame_type) {
210 memcpy(mbmi_ext->ref_mv_stack[ref_frame_type], mbmi_ext_best->ref_mv_stack,
211 sizeof(mbmi_ext->ref_mv_stack[USABLE_REF_MV_STACK_SIZE]));
212 memcpy(mbmi_ext->weight[ref_frame_type], mbmi_ext_best->weight,
213 sizeof(mbmi_ext->weight[USABLE_REF_MV_STACK_SIZE]));
214 mbmi_ext->mode_context[ref_frame_type] = mbmi_ext_best->mode_context;
215 mbmi_ext->ref_mv_count[ref_frame_type] = mbmi_ext_best->ref_mv_count;
216 memcpy(mbmi_ext->global_mvs, mbmi_ext_best->global_mvs,
217 sizeof(mbmi_ext->global_mvs));
218}
219
220void av1_update_state(const AV1_COMP *const cpi, ThreadData *td,
221 const PICK_MODE_CONTEXT *const ctx, int mi_row,
222 int mi_col, BLOCK_SIZE bsize, RUN_TYPE dry_run) {
223 int i, x_idx, y;
224 const AV1_COMMON *const cm = &cpi->common;
225 const CommonModeInfoParams *const mi_params = &cm->mi_params;
226 const int num_planes = av1_num_planes(cm);
227 RD_COUNTS *const rdc = &td->rd_counts;
228 MACROBLOCK *const x = &td->mb;
229 MACROBLOCKD *const xd = &x->e_mbd;
230 struct macroblock_plane *const p = x->plane;
231 struct macroblockd_plane *const pd = xd->plane;
232 const MB_MODE_INFO *const mi = &ctx->mic;
233 MB_MODE_INFO *const mi_addr = xd->mi[0];
234 const struct segmentation *const seg = &cm->seg;
venkat sanampudi24055022020-07-03 06:52:28 +0530235 assert(bsize < BLOCK_SIZES_ALL);
liang zhaoc6f775a2020-12-17 11:54:58 -0800236#if CONFIG_SDP
237 const int bw = mi_size_wide[mi->sb_type[xd->tree_type == CHROMA_PART]];
238 const int bh = mi_size_high[mi->sb_type[xd->tree_type == CHROMA_PART]];
239#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530240 const int bw = mi_size_wide[mi->sb_type];
241 const int bh = mi_size_high[mi->sb_type];
liang zhaoc6f775a2020-12-17 11:54:58 -0800242#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530243 const int mis = mi_params->mi_stride;
244 const int mi_width = mi_size_wide[bsize];
245 const int mi_height = mi_size_high[bsize];
246 TxfmSearchInfo *txfm_info = &x->txfm_search_info;
liang zhaoc6f775a2020-12-17 11:54:58 -0800247#if CONFIG_SDP
248 assert(mi->sb_type[xd->tree_type == CHROMA_PART] == bsize);
249#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530250 assert(mi->sb_type == bsize);
liang zhaoc6f775a2020-12-17 11:54:58 -0800251#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530252
253 *mi_addr = *mi;
liang zhaoc6f775a2020-12-17 11:54:58 -0800254#if CONFIG_SDP
255 if (xd->tree_type != CHROMA_PART)
256#endif
257 copy_mbmi_ext_frame_to_mbmi_ext(x->mbmi_ext, &ctx->mbmi_ext_best,
258 av1_ref_frame_type(ctx->mic.ref_frame));
Jayasanker Je9ad4752020-06-30 19:30:03 +0530259
260 memcpy(txfm_info->blk_skip, ctx->blk_skip,
261 sizeof(txfm_info->blk_skip[0]) * ctx->num_4x4_blk);
262
263 txfm_info->skip_txfm = ctx->rd_stats.skip_txfm;
liang zhaoc6f775a2020-12-17 11:54:58 -0800264#if CONFIG_SDP
265 if (xd->tree_type != CHROMA_PART) {
266#endif
267 xd->tx_type_map = ctx->tx_type_map;
268 xd->tx_type_map_stride = mi_size_wide[bsize];
269 // If not dry_run, copy the transform type data into the frame level buffer.
270 // Encoder will fetch tx types when writing bitstream.
271 if (!dry_run) {
272 const int grid_idx = get_mi_grid_idx(mi_params, mi_row, mi_col);
273 uint8_t *const tx_type_map = mi_params->tx_type_map + grid_idx;
274 const int mi_stride = mi_params->mi_stride;
275 for (int blk_row = 0; blk_row < bh; ++blk_row) {
276 av1_copy_array(tx_type_map + blk_row * mi_stride,
277 xd->tx_type_map + blk_row * xd->tx_type_map_stride, bw);
278 }
279 xd->tx_type_map = tx_type_map;
280 xd->tx_type_map_stride = mi_stride;
Jayasanker Je9ad4752020-06-30 19:30:03 +0530281 }
liang zhaoc6f775a2020-12-17 11:54:58 -0800282#if CONFIG_SDP
Jayasanker Je9ad4752020-06-30 19:30:03 +0530283 }
liang zhaoc6f775a2020-12-17 11:54:58 -0800284#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530285
286 // If segmentation in use
287 if (seg->enabled) {
288 // For in frame complexity AQ copy the segment id from the segment map.
289 if (cpi->oxcf.q_cfg.aq_mode == COMPLEXITY_AQ) {
290 const uint8_t *const map =
291 seg->update_map ? cpi->enc_seg.map : cm->last_frame_seg_map;
292 mi_addr->segment_id =
293 map ? get_segment_id(mi_params, map, bsize, mi_row, mi_col) : 0;
294 reset_tx_size(x, mi_addr, x->txfm_search_params.tx_mode_search_type);
295 }
296 // Else for cyclic refresh mode update the segment map, set the segment id
297 // and then update the quantizer.
leolzhaoaa4d7692021-01-28 11:00:33 -0800298#if CONFIG_SDP
299 if (cpi->oxcf.q_cfg.aq_mode == CYCLIC_REFRESH_AQ &&
300 xd->tree_type == SHARED_PART) {
301#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530302 if (cpi->oxcf.q_cfg.aq_mode == CYCLIC_REFRESH_AQ) {
leolzhaoaa4d7692021-01-28 11:00:33 -0800303#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530304 av1_cyclic_refresh_update_segment(cpi, mi_addr, mi_row, mi_col, bsize,
305 ctx->rd_stats.rate, ctx->rd_stats.dist,
306 txfm_info->skip_txfm);
307 }
308 if (mi_addr->uv_mode == UV_CFL_PRED && !is_cfl_allowed(xd))
309 mi_addr->uv_mode = UV_DC_PRED;
310 }
liang zhaoc6f775a2020-12-17 11:54:58 -0800311#if CONFIG_SDP
312 for (i = (xd->tree_type == CHROMA_PART); i < num_planes; ++i) {
313#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530314 for (i = 0; i < num_planes; ++i) {
liang zhaoc6f775a2020-12-17 11:54:58 -0800315#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530316 p[i].coeff = ctx->coeff[i];
317 p[i].qcoeff = ctx->qcoeff[i];
318 p[i].dqcoeff = ctx->dqcoeff[i];
319 p[i].eobs = ctx->eobs[i];
320 p[i].txb_entropy_ctx = ctx->txb_entropy_ctx[i];
321 }
322 for (i = 0; i < 2; ++i) pd[i].color_index_map = ctx->color_index_map[i];
323 // Restore the coding context of the MB to that that was in place
324 // when the mode was picked for it
325 for (y = 0; y < mi_height; y++) {
326 for (x_idx = 0; x_idx < mi_width; x_idx++) {
327 if ((xd->mb_to_right_edge >> (3 + MI_SIZE_LOG2)) + mi_width > x_idx &&
328 (xd->mb_to_bottom_edge >> (3 + MI_SIZE_LOG2)) + mi_height > y) {
liang zhaoc6f775a2020-12-17 11:54:58 -0800329#if CONFIG_SDP
330 const int mi_idx =
331 get_alloc_mi_idx(mi_params, mi_row + y, mi_col + x_idx);
332 xd->mi[x_idx + y * mis] = &mi_params->mi_alloc[mi_idx];
333 if (xd->tree_type == LUMA_PART) {
334 *(xd->mi[x_idx + y * mis]) = *mi_addr;
335 } else if (xd->tree_type == CHROMA_PART) {
336 xd->mi[x_idx + y * mis]->sb_type[PLANE_TYPE_UV] =
337 mi_addr->sb_type[PLANE_TYPE_UV];
338 xd->mi[x_idx + y * mis]->uv_mode = mi_addr->uv_mode;
339 xd->mi[x_idx + y * mis]->angle_delta[PLANE_TYPE_UV] =
340 mi_addr->angle_delta[PLANE_TYPE_UV];
341 xd->mi[x_idx + y * mis]->cfl_alpha_signs = mi_addr->cfl_alpha_signs;
342 xd->mi[x_idx + y * mis]->cfl_alpha_idx = mi_addr->cfl_alpha_idx;
343 xd->mi[x_idx + y * mis]->partition = mi_addr->partition;
344 xd->mi[x_idx + y * mis]
345 ->palette_mode_info.palette_size[PLANE_TYPE_UV] =
346 mi_addr->palette_mode_info.palette_size[PLANE_TYPE_UV];
347 for (i = PALETTE_MAX_SIZE; i < 3 * PALETTE_MAX_SIZE; i++)
348 xd->mi[x_idx + y * mis]->palette_mode_info.palette_colors[i] =
349 mi_addr->palette_mode_info.palette_colors[i];
350 } else {
351 xd->mi[x_idx + y * mis] = mi_addr;
352 }
353#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530354 xd->mi[x_idx + y * mis] = mi_addr;
liang zhaoc6f775a2020-12-17 11:54:58 -0800355#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530356 }
357 }
358 }
359
360 if (cpi->oxcf.q_cfg.aq_mode)
361 av1_init_plane_quantizers(cpi, x, mi_addr->segment_id);
362
363 if (dry_run) return;
364
365#if CONFIG_INTERNAL_STATS
366 {
367 unsigned int *const mode_chosen_counts =
368 (unsigned int *)cpi->mode_chosen_counts; // Cast const away.
369 if (frame_is_intra_only(cm)) {
370 static const int kf_mode_index[] = {
371 THR_DC /*DC_PRED*/,
372 THR_V_PRED /*V_PRED*/,
373 THR_H_PRED /*H_PRED*/,
374 THR_D45_PRED /*D45_PRED*/,
375 THR_D135_PRED /*D135_PRED*/,
376 THR_D113_PRED /*D113_PRED*/,
377 THR_D157_PRED /*D157_PRED*/,
378 THR_D203_PRED /*D203_PRED*/,
379 THR_D67_PRED /*D67_PRED*/,
380 THR_SMOOTH, /*SMOOTH_PRED*/
381 THR_SMOOTH_V, /*SMOOTH_V_PRED*/
382 THR_SMOOTH_H, /*SMOOTH_H_PRED*/
383 THR_PAETH /*PAETH_PRED*/,
384 };
385 ++mode_chosen_counts[kf_mode_index[mi_addr->mode]];
386 } else {
387 // Note how often each mode chosen as best
388 ++mode_chosen_counts[ctx->best_mode_index];
389 }
390 }
391#endif
392 if (!frame_is_intra_only(cm)) {
leolzhao3ab59842021-05-11 10:07:48 -0700393#if CONFIG_SDP
394 if (is_inter_block(mi_addr, xd->tree_type)) {
395#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530396 if (is_inter_block(mi_addr)) {
leolzhao3ab59842021-05-11 10:07:48 -0700397#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530398 // TODO(sarahparker): global motion stats need to be handled per-tile
399 // to be compatible with tile-based threading.
400 update_global_motion_used(mi_addr->mode, bsize, mi_addr, rdc);
401 }
402
403 if (cm->features.interp_filter == SWITCHABLE &&
404 mi_addr->motion_mode != WARPED_CAUSAL &&
405 !is_nontrans_global_motion(xd, xd->mi[0])) {
406 update_filter_type_count(td->counts, xd, mi_addr);
407 }
408
409 rdc->comp_pred_diff[SINGLE_REFERENCE] += ctx->single_pred_diff;
410 rdc->comp_pred_diff[COMPOUND_REFERENCE] += ctx->comp_pred_diff;
411 rdc->comp_pred_diff[REFERENCE_MODE_SELECT] += ctx->hybrid_pred_diff;
412 }
413
414 const int x_mis = AOMMIN(bw, mi_params->mi_cols - mi_col);
415 const int y_mis = AOMMIN(bh, mi_params->mi_rows - mi_row);
416 if (cm->seq_params.order_hint_info.enable_ref_frame_mvs)
417 av1_copy_frame_mvs(cm, mi, mi_row, mi_col, x_mis, y_mis);
418}
419
420void av1_update_inter_mode_stats(FRAME_CONTEXT *fc, FRAME_COUNTS *counts,
421 PREDICTION_MODE mode, int16_t mode_context) {
422 (void)counts;
423
424 int16_t mode_ctx = mode_context & NEWMV_CTX_MASK;
425 if (mode == NEWMV) {
426#if CONFIG_ENTROPY_STATS
427 ++counts->newmv_mode[mode_ctx][0];
428#endif
429 update_cdf(fc->newmv_cdf[mode_ctx], 0, 2);
430 return;
431 }
432
433#if CONFIG_ENTROPY_STATS
434 ++counts->newmv_mode[mode_ctx][1];
435#endif
436 update_cdf(fc->newmv_cdf[mode_ctx], 1, 2);
437
438 mode_ctx = (mode_context >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
439 if (mode == GLOBALMV) {
440#if CONFIG_ENTROPY_STATS
441 ++counts->zeromv_mode[mode_ctx][0];
442#endif
443 update_cdf(fc->zeromv_cdf[mode_ctx], 0, 2);
444 return;
445 }
446
447#if CONFIG_ENTROPY_STATS
448 ++counts->zeromv_mode[mode_ctx][1];
449#endif
450 update_cdf(fc->zeromv_cdf[mode_ctx], 1, 2);
451
452 mode_ctx = (mode_context >> REFMV_OFFSET) & REFMV_CTX_MASK;
453#if CONFIG_ENTROPY_STATS
454 ++counts->refmv_mode[mode_ctx][mode != NEARESTMV];
455#endif
456 update_cdf(fc->refmv_cdf[mode_ctx], mode != NEARESTMV, 2);
457}
458
459static void update_palette_cdf(MACROBLOCKD *xd, const MB_MODE_INFO *const mbmi,
460 FRAME_COUNTS *counts) {
461 FRAME_CONTEXT *fc = xd->tile_ctx;
liang zhaoc6f775a2020-12-17 11:54:58 -0800462#if CONFIG_SDP
463 const BLOCK_SIZE bsize = mbmi->sb_type[xd->tree_type == CHROMA_PART];
464#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530465 const BLOCK_SIZE bsize = mbmi->sb_type;
liang zhaoc6f775a2020-12-17 11:54:58 -0800466#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530467 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
468 const int palette_bsize_ctx = av1_get_palette_bsize_ctx(bsize);
469
470 (void)counts;
liang zhaoc6f775a2020-12-17 11:54:58 -0800471#if CONFIG_SDP
472 if (mbmi->mode == DC_PRED && xd->tree_type != CHROMA_PART) {
473#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530474 if (mbmi->mode == DC_PRED) {
liang zhaoc6f775a2020-12-17 11:54:58 -0800475#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530476 const int n = pmi->palette_size[0];
477 const int palette_mode_ctx = av1_get_palette_mode_ctx(xd);
478
479#if CONFIG_ENTROPY_STATS
480 ++counts->palette_y_mode[palette_bsize_ctx][palette_mode_ctx][n > 0];
481#endif
482 update_cdf(fc->palette_y_mode_cdf[palette_bsize_ctx][palette_mode_ctx],
483 n > 0, 2);
484 if (n > 0) {
485#if CONFIG_ENTROPY_STATS
486 ++counts->palette_y_size[palette_bsize_ctx][n - PALETTE_MIN_SIZE];
487#endif
488 update_cdf(fc->palette_y_size_cdf[palette_bsize_ctx],
489 n - PALETTE_MIN_SIZE, PALETTE_SIZES);
490 }
491 }
liang zhaoc6f775a2020-12-17 11:54:58 -0800492#if CONFIG_SDP
493 if (mbmi->uv_mode == UV_DC_PRED && xd->tree_type != LUMA_PART) {
494#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530495 if (mbmi->uv_mode == UV_DC_PRED) {
liang zhaoc6f775a2020-12-17 11:54:58 -0800496#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530497 const int n = pmi->palette_size[1];
498 const int palette_uv_mode_ctx = (pmi->palette_size[0] > 0);
499
500#if CONFIG_ENTROPY_STATS
501 ++counts->palette_uv_mode[palette_uv_mode_ctx][n > 0];
502#endif
503 update_cdf(fc->palette_uv_mode_cdf[palette_uv_mode_ctx], n > 0, 2);
504
505 if (n > 0) {
506#if CONFIG_ENTROPY_STATS
507 ++counts->palette_uv_size[palette_bsize_ctx][n - PALETTE_MIN_SIZE];
508#endif
509 update_cdf(fc->palette_uv_size_cdf[palette_bsize_ctx],
510 n - PALETTE_MIN_SIZE, PALETTE_SIZES);
511 }
512 }
513}
514
515void av1_sum_intra_stats(const AV1_COMMON *const cm, FRAME_COUNTS *counts,
516 MACROBLOCKD *xd, const MB_MODE_INFO *const mbmi,
517 const MB_MODE_INFO *above_mi,
518 const MB_MODE_INFO *left_mi, const int intraonly) {
519 FRAME_CONTEXT *fc = xd->tile_ctx;
520 const PREDICTION_MODE y_mode = mbmi->mode;
521 (void)counts;
liang zhaoc6f775a2020-12-17 11:54:58 -0800522#if CONFIG_SDP
523 const BLOCK_SIZE bsize = mbmi->sb_type[xd->tree_type == CHROMA_PART];
524#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530525 const BLOCK_SIZE bsize = mbmi->sb_type;
Jayasanker Je9ad4752020-06-30 19:30:03 +0530526#endif
liang zhaoc6f775a2020-12-17 11:54:58 -0800527#if CONFIG_SDP
528 if (xd->tree_type != CHROMA_PART) {
529#endif
530 if (intraonly) {
531#if CONFIG_ENTROPY_STATS
532 const PREDICTION_MODE above = av1_above_block_mode(above_mi);
533 const PREDICTION_MODE left = av1_left_block_mode(left_mi);
534 const int above_ctx = intra_mode_context[above];
535 const int left_ctx = intra_mode_context[left];
536 ++counts->kf_y_mode[above_ctx][left_ctx][y_mode];
537#endif // CONFIG_ENTROPY_STATS
538 update_cdf(get_y_mode_cdf(fc, above_mi, left_mi), y_mode, INTRA_MODES);
539 } else {
540#if CONFIG_ENTROPY_STATS
541 ++counts->y_mode[size_group_lookup[bsize]][y_mode];
542#endif // CONFIG_ENTROPY_STATS
543 update_cdf(fc->y_mode_cdf[size_group_lookup[bsize]], y_mode, INTRA_MODES);
544 }
leolzhao943a08d2021-03-30 15:32:52 -0700545#if CONFIG_MRLS
546 if (cm->seq_params.enable_mrls && av1_is_directional_mode(mbmi->mode)) {
547 update_cdf(fc->mrl_index_cdf, mbmi->mrl_index, MRL_LINE_NUMBER);
548 }
549#endif
liang zhaoc6f775a2020-12-17 11:54:58 -0800550 if (av1_filter_intra_allowed(cm, mbmi)) {
551 const int use_filter_intra_mode =
552 mbmi->filter_intra_mode_info.use_filter_intra;
553#if CONFIG_ENTROPY_STATS
554#if CONFIG_SDP
555 ++counts->filter_intra[mbmi->sb_type[xd->tree_type == CHROMA_PART]]
556 [use_filter_intra_mode];
557#else
558 ++counts->filter_intra[mbmi->sb_type][use_filter_intra_mode];
559#endif
560 if (use_filter_intra_mode) {
561 ++counts->filter_intra_mode[mbmi->filter_intra_mode_info
562 .filter_intra_mode];
563 }
564#endif // CONFIG_ENTROPY_STATS
565#if CONFIG_SDP
566 update_cdf(
567 fc->filter_intra_cdfs[mbmi->sb_type[xd->tree_type == CHROMA_PART]],
568 use_filter_intra_mode, 2);
569#else
570 update_cdf(fc->filter_intra_cdfs[mbmi->sb_type], use_filter_intra_mode, 2);
571#endif
572 if (use_filter_intra_mode) {
573 update_cdf(fc->filter_intra_mode_cdf,
574 mbmi->filter_intra_mode_info.filter_intra_mode,
575 FILTER_INTRA_MODES);
576 }
577 }
578 if (av1_is_directional_mode(mbmi->mode) && av1_use_angle_delta(bsize)) {
579#if CONFIG_ENTROPY_STATS
580 ++counts->angle_delta[mbmi->mode - V_PRED]
581 [mbmi->angle_delta[PLANE_TYPE_Y] + MAX_ANGLE_DELTA];
582#endif
583#if CONFIG_SDP
584 update_cdf(fc->angle_delta_cdf[PLANE_TYPE_Y][mbmi->mode - V_PRED],
585 mbmi->angle_delta[PLANE_TYPE_Y] + MAX_ANGLE_DELTA,
586 2 * MAX_ANGLE_DELTA + 1);
587#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530588 update_cdf(fc->angle_delta_cdf[mbmi->mode - V_PRED],
589 mbmi->angle_delta[PLANE_TYPE_Y] + MAX_ANGLE_DELTA,
590 2 * MAX_ANGLE_DELTA + 1);
liang zhaoc6f775a2020-12-17 11:54:58 -0800591#endif
592 }
593#if CONFIG_SDP
Jayasanker Je9ad4752020-06-30 19:30:03 +0530594 }
liang zhaoc6f775a2020-12-17 11:54:58 -0800595#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530596
597 if (!xd->is_chroma_ref) return;
liang zhaoc6f775a2020-12-17 11:54:58 -0800598#if CONFIG_SDP
599 if (xd->tree_type != LUMA_PART) {
600#endif
601 const UV_PREDICTION_MODE uv_mode = mbmi->uv_mode;
602 const CFL_ALLOWED_TYPE cfl_allowed = is_cfl_allowed(xd);
Jayasanker Je9ad4752020-06-30 19:30:03 +0530603#if CONFIG_ENTROPY_STATS
liang zhaoc6f775a2020-12-17 11:54:58 -0800604 ++counts->uv_mode[cfl_allowed][y_mode][uv_mode];
Jayasanker Je9ad4752020-06-30 19:30:03 +0530605#endif // CONFIG_ENTROPY_STATS
liang zhaoc6f775a2020-12-17 11:54:58 -0800606 update_cdf(fc->uv_mode_cdf[cfl_allowed][y_mode], uv_mode,
607 UV_INTRA_MODES - !cfl_allowed);
608 if (uv_mode == UV_CFL_PRED) {
609 const int8_t joint_sign = mbmi->cfl_alpha_signs;
610 const uint8_t idx = mbmi->cfl_alpha_idx;
Jayasanker Je9ad4752020-06-30 19:30:03 +0530611
612#if CONFIG_ENTROPY_STATS
liang zhaoc6f775a2020-12-17 11:54:58 -0800613 ++counts->cfl_sign[joint_sign];
Jayasanker Je9ad4752020-06-30 19:30:03 +0530614#endif
liang zhaoc6f775a2020-12-17 11:54:58 -0800615 update_cdf(fc->cfl_sign_cdf, joint_sign, CFL_JOINT_SIGNS);
616 if (CFL_SIGN_U(joint_sign) != CFL_SIGN_ZERO) {
617 aom_cdf_prob *cdf_u = fc->cfl_alpha_cdf[CFL_CONTEXT_U(joint_sign)];
Jayasanker Je9ad4752020-06-30 19:30:03 +0530618
619#if CONFIG_ENTROPY_STATS
liang zhaoc6f775a2020-12-17 11:54:58 -0800620 ++counts->cfl_alpha[CFL_CONTEXT_U(joint_sign)][CFL_IDX_U(idx)];
Jayasanker Je9ad4752020-06-30 19:30:03 +0530621#endif
liang zhaoc6f775a2020-12-17 11:54:58 -0800622 update_cdf(cdf_u, CFL_IDX_U(idx), CFL_ALPHABET_SIZE);
623 }
624 if (CFL_SIGN_V(joint_sign) != CFL_SIGN_ZERO) {
625 aom_cdf_prob *cdf_v = fc->cfl_alpha_cdf[CFL_CONTEXT_V(joint_sign)];
626
627#if CONFIG_ENTROPY_STATS
628 ++counts->cfl_alpha[CFL_CONTEXT_V(joint_sign)][CFL_IDX_V(idx)];
629#endif
630 update_cdf(cdf_v, CFL_IDX_V(idx), CFL_ALPHABET_SIZE);
631 }
Jayasanker Je9ad4752020-06-30 19:30:03 +0530632 }
liang zhaoc6f775a2020-12-17 11:54:58 -0800633 if (av1_is_directional_mode(get_uv_mode(uv_mode)) &&
634 av1_use_angle_delta(bsize)) {
Jayasanker Je9ad4752020-06-30 19:30:03 +0530635#if CONFIG_ENTROPY_STATS
liang zhaoc6f775a2020-12-17 11:54:58 -0800636 ++counts->angle_delta[uv_mode - UV_V_PRED]
637 [mbmi->angle_delta[PLANE_TYPE_UV] + MAX_ANGLE_DELTA];
Jayasanker Je9ad4752020-06-30 19:30:03 +0530638#endif
liang zhaoc6f775a2020-12-17 11:54:58 -0800639#if CONFIG_SDP
leolzhao02141602021-02-16 15:06:35 -0800640 if (cm->seq_params.enable_sdp)
641 update_cdf(fc->angle_delta_cdf[PLANE_TYPE_UV][uv_mode - UV_V_PRED],
642 mbmi->angle_delta[PLANE_TYPE_UV] + MAX_ANGLE_DELTA,
643 2 * MAX_ANGLE_DELTA + 1);
644 else
645 update_cdf(fc->angle_delta_cdf[PLANE_TYPE_Y][uv_mode - UV_V_PRED],
646 mbmi->angle_delta[PLANE_TYPE_UV] + MAX_ANGLE_DELTA,
647 2 * MAX_ANGLE_DELTA + 1);
liang zhaoc6f775a2020-12-17 11:54:58 -0800648#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530649 update_cdf(fc->angle_delta_cdf[uv_mode - UV_V_PRED],
650 mbmi->angle_delta[PLANE_TYPE_UV] + MAX_ANGLE_DELTA,
651 2 * MAX_ANGLE_DELTA + 1);
liang zhaoc6f775a2020-12-17 11:54:58 -0800652#endif
653 }
654#if CONFIG_SDP
Jayasanker Je9ad4752020-06-30 19:30:03 +0530655 }
liang zhaoc6f775a2020-12-17 11:54:58 -0800656#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530657 if (av1_allow_palette(cm->features.allow_screen_content_tools, bsize)) {
658 update_palette_cdf(xd, mbmi, counts);
659 }
660}
661
662void av1_restore_context(MACROBLOCK *x, const RD_SEARCH_MACROBLOCK_CONTEXT *ctx,
663 int mi_row, int mi_col, BLOCK_SIZE bsize,
664 const int num_planes) {
665 MACROBLOCKD *xd = &x->e_mbd;
666 int p;
667 const int num_4x4_blocks_wide = mi_size_wide[bsize];
668 const int num_4x4_blocks_high = mi_size_high[bsize];
669 int mi_width = mi_size_wide[bsize];
670 int mi_height = mi_size_high[bsize];
liang zhaoc6f775a2020-12-17 11:54:58 -0800671#if CONFIG_SDP
672 for (p = (xd->tree_type == CHROMA_PART); p < num_planes; p++) {
673#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530674 for (p = 0; p < num_planes; p++) {
liang zhaoc6f775a2020-12-17 11:54:58 -0800675#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530676 int tx_col = mi_col;
677 int tx_row = mi_row & MAX_MIB_MASK;
678 memcpy(
679 xd->above_entropy_context[p] + (tx_col >> xd->plane[p].subsampling_x),
680 ctx->a + num_4x4_blocks_wide * p,
681 (sizeof(ENTROPY_CONTEXT) * num_4x4_blocks_wide) >>
682 xd->plane[p].subsampling_x);
683 memcpy(xd->left_entropy_context[p] + (tx_row >> xd->plane[p].subsampling_y),
684 ctx->l + num_4x4_blocks_high * p,
685 (sizeof(ENTROPY_CONTEXT) * num_4x4_blocks_high) >>
686 xd->plane[p].subsampling_y);
liang zhaoc6f775a2020-12-17 11:54:58 -0800687#if CONFIG_SDP
688 memcpy(xd->above_partition_context[p] + mi_col, ctx->sa + mi_width * p,
689 sizeof(*xd->above_partition_context[p]) * mi_width);
690 memcpy(xd->left_partition_context[p] + (mi_row & MAX_MIB_MASK),
691 ctx->sl + mi_height * p,
692 sizeof(xd->left_partition_context[p][0]) * mi_height);
693#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530694 }
liang zhaoc6f775a2020-12-17 11:54:58 -0800695#if !CONFIG_SDP
Jayasanker Je9ad4752020-06-30 19:30:03 +0530696 memcpy(xd->above_partition_context + mi_col, ctx->sa,
697 sizeof(*xd->above_partition_context) * mi_width);
698 memcpy(xd->left_partition_context + (mi_row & MAX_MIB_MASK), ctx->sl,
699 sizeof(xd->left_partition_context[0]) * mi_height);
liang zhaoc6f775a2020-12-17 11:54:58 -0800700#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530701 xd->above_txfm_context = ctx->p_ta;
702 xd->left_txfm_context = ctx->p_tl;
703 memcpy(xd->above_txfm_context, ctx->ta,
704 sizeof(*xd->above_txfm_context) * mi_width);
705 memcpy(xd->left_txfm_context, ctx->tl,
706 sizeof(*xd->left_txfm_context) * mi_height);
707}
708
709void av1_save_context(const MACROBLOCK *x, RD_SEARCH_MACROBLOCK_CONTEXT *ctx,
710 int mi_row, int mi_col, BLOCK_SIZE bsize,
711 const int num_planes) {
712 const MACROBLOCKD *xd = &x->e_mbd;
713 int p;
714 int mi_width = mi_size_wide[bsize];
715 int mi_height = mi_size_high[bsize];
716
717 // buffer the above/left context information of the block in search.
liang zhaoc6f775a2020-12-17 11:54:58 -0800718#if CONFIG_SDP
719 for (p = (xd->tree_type == CHROMA_PART); p < num_planes; ++p) {
720#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530721 for (p = 0; p < num_planes; ++p) {
liang zhaoc6f775a2020-12-17 11:54:58 -0800722#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530723 int tx_col = mi_col;
724 int tx_row = mi_row & MAX_MIB_MASK;
725 memcpy(
726 ctx->a + mi_width * p,
727 xd->above_entropy_context[p] + (tx_col >> xd->plane[p].subsampling_x),
728 (sizeof(ENTROPY_CONTEXT) * mi_width) >> xd->plane[p].subsampling_x);
729 memcpy(ctx->l + mi_height * p,
730 xd->left_entropy_context[p] + (tx_row >> xd->plane[p].subsampling_y),
731 (sizeof(ENTROPY_CONTEXT) * mi_height) >> xd->plane[p].subsampling_y);
liang zhaoc6f775a2020-12-17 11:54:58 -0800732#if CONFIG_SDP
733 memcpy(ctx->sa + mi_width * p, xd->above_partition_context[p] + mi_col,
734 sizeof(*xd->above_partition_context[p]) * mi_width);
735 memcpy(ctx->sl + mi_height * p,
736 xd->left_partition_context[p] + (mi_row & MAX_MIB_MASK),
737 sizeof(xd->left_partition_context[p][0]) * mi_height);
738#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530739 }
liang zhaoc6f775a2020-12-17 11:54:58 -0800740#if !CONFIG_SDP
Jayasanker Je9ad4752020-06-30 19:30:03 +0530741 memcpy(ctx->sa, xd->above_partition_context + mi_col,
742 sizeof(*xd->above_partition_context) * mi_width);
743 memcpy(ctx->sl, xd->left_partition_context + (mi_row & MAX_MIB_MASK),
744 sizeof(xd->left_partition_context[0]) * mi_height);
liang zhaoc6f775a2020-12-17 11:54:58 -0800745#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530746 memcpy(ctx->ta, xd->above_txfm_context,
747 sizeof(*xd->above_txfm_context) * mi_width);
748 memcpy(ctx->tl, xd->left_txfm_context,
749 sizeof(*xd->left_txfm_context) * mi_height);
750 ctx->p_ta = xd->above_txfm_context;
751 ctx->p_tl = xd->left_txfm_context;
752}
753
754static void set_partial_sb_partition(const AV1_COMMON *const cm,
755 MB_MODE_INFO *mi, int bh_in, int bw_in,
756 int mi_rows_remaining,
757 int mi_cols_remaining, BLOCK_SIZE bsize,
758 MB_MODE_INFO **mib) {
759 int bh = bh_in;
760 int r, c;
761 for (r = 0; r < cm->seq_params.mib_size; r += bh) {
762 int bw = bw_in;
763 for (c = 0; c < cm->seq_params.mib_size; c += bw) {
764 const int grid_index = get_mi_grid_idx(&cm->mi_params, r, c);
765 const int mi_index = get_alloc_mi_idx(&cm->mi_params, r, c);
766 mib[grid_index] = mi + mi_index;
liang zhaoc6f775a2020-12-17 11:54:58 -0800767#if CONFIG_SDP
leolzhaoaa4d7692021-01-28 11:00:33 -0800768 mib[grid_index]->sb_type[PLANE_TYPE_Y] =
769 mib[grid_index]->sb_type[PLANE_TYPE_UV] = find_partition_size(
770 bsize, mi_rows_remaining - r, mi_cols_remaining - c, &bh, &bw);
liang zhaoc6f775a2020-12-17 11:54:58 -0800771#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530772 mib[grid_index]->sb_type = find_partition_size(
773 bsize, mi_rows_remaining - r, mi_cols_remaining - c, &bh, &bw);
liang zhaoc6f775a2020-12-17 11:54:58 -0800774#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530775 }
776 }
777}
778
779// This function attempts to set all mode info entries in a given superblock
780// to the same block partition size.
781// However, at the bottom and right borders of the image the requested size
782// may not be allowed in which case this code attempts to choose the largest
783// allowable partition.
784void av1_set_fixed_partitioning(AV1_COMP *cpi, const TileInfo *const tile,
785 MB_MODE_INFO **mib, int mi_row, int mi_col,
786 BLOCK_SIZE bsize) {
787 AV1_COMMON *const cm = &cpi->common;
788 const CommonModeInfoParams *const mi_params = &cm->mi_params;
789 const int mi_rows_remaining = tile->mi_row_end - mi_row;
790 const int mi_cols_remaining = tile->mi_col_end - mi_col;
791 MB_MODE_INFO *const mi_upper_left =
792 mi_params->mi_alloc + get_alloc_mi_idx(mi_params, mi_row, mi_col);
793 int bh = mi_size_high[bsize];
794 int bw = mi_size_wide[bsize];
795
796 assert(bsize >= mi_params->mi_alloc_bsize &&
797 "Attempted to use bsize < mi_params->mi_alloc_bsize");
798 assert((mi_rows_remaining > 0) && (mi_cols_remaining > 0));
799
800 // Apply the requested partition size to the SB if it is all "in image"
801 if ((mi_cols_remaining >= cm->seq_params.mib_size) &&
802 (mi_rows_remaining >= cm->seq_params.mib_size)) {
803 for (int block_row = 0; block_row < cm->seq_params.mib_size;
804 block_row += bh) {
805 for (int block_col = 0; block_col < cm->seq_params.mib_size;
806 block_col += bw) {
807 const int grid_index = get_mi_grid_idx(mi_params, block_row, block_col);
808 const int mi_index = get_alloc_mi_idx(mi_params, block_row, block_col);
809 mib[grid_index] = mi_upper_left + mi_index;
liang zhaoc6f775a2020-12-17 11:54:58 -0800810#if CONFIG_SDP
leolzhaoaa4d7692021-01-28 11:00:33 -0800811 mib[grid_index]->sb_type[PLANE_TYPE_Y] = bsize;
812 mib[grid_index]->sb_type[PLANE_TYPE_UV] = bsize;
liang zhaoc6f775a2020-12-17 11:54:58 -0800813#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530814 mib[grid_index]->sb_type = bsize;
liang zhaoc6f775a2020-12-17 11:54:58 -0800815#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530816 }
817 }
818 } else {
819 // Else this is a partial SB.
820 set_partial_sb_partition(cm, mi_upper_left, bh, bw, mi_rows_remaining,
821 mi_cols_remaining, bsize, mib);
822 }
823}
leolzhao3db7cca2021-01-26 16:53:07 -0800824#if CONFIG_SDP
825int av1_is_leaf_split_partition(AV1_COMMON *cm, MACROBLOCKD *const xd,
826 int mi_row, int mi_col,
827#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530828int av1_is_leaf_split_partition(AV1_COMMON *cm, int mi_row, int mi_col,
leolzhao3db7cca2021-01-26 16:53:07 -0800829#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530830 BLOCK_SIZE bsize) {
831 const int bs = mi_size_wide[bsize];
832 const int hbs = bs / 2;
833 assert(bsize >= BLOCK_8X8);
834 const BLOCK_SIZE subsize = get_partition_subsize(bsize, PARTITION_SPLIT);
835
836 for (int i = 0; i < 4; i++) {
837 int x_idx = (i & 1) * hbs;
838 int y_idx = (i >> 1) * hbs;
839 if ((mi_row + y_idx >= cm->mi_params.mi_rows) ||
840 (mi_col + x_idx >= cm->mi_params.mi_cols))
841 return 0;
leolzhao3db7cca2021-01-26 16:53:07 -0800842#if CONFIG_SDP
843 if (get_partition(cm, xd->tree_type == CHROMA_PART, mi_row + y_idx,
844 mi_col + x_idx, subsize) !=
845#else
Jayasanker Je9ad4752020-06-30 19:30:03 +0530846 if (get_partition(cm, mi_row + y_idx, mi_col + x_idx, subsize) !=
leolzhao3db7cca2021-01-26 16:53:07 -0800847#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +0530848 PARTITION_NONE &&
849 subsize != BLOCK_8X8)
850 return 0;
851 }
852 return 1;
853}
854
855#if !CONFIG_REALTIME_ONLY
Jayasanker J37596eb2020-08-20 16:39:40 +0530856int av1_get_rdmult_delta(AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row,
857 int mi_col, int orig_rdmult) {
Jayasanker Je9ad4752020-06-30 19:30:03 +0530858 AV1_COMMON *const cm = &cpi->common;
859 const GF_GROUP *const gf_group = &cpi->gf_group;
860 assert(IMPLIES(cpi->gf_group.size > 0,
861 cpi->gf_group.index < cpi->gf_group.size));
862 const int tpl_idx = cpi->gf_group.index;
863 TplParams *const tpl_data = &cpi->tpl_data;
864 TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_idx];
865 TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr;
866 const uint8_t block_mis_log2 = tpl_data->tpl_stats_block_mis_log2;
867 int tpl_stride = tpl_frame->stride;
868 int64_t intra_cost = 0;
869 int64_t mc_dep_cost = 0;
870 const int mi_wide = mi_size_wide[bsize];
871 const int mi_high = mi_size_high[bsize];
872
873 if (tpl_frame->is_valid == 0) return orig_rdmult;
874
Deepa K G21e5e8e2020-03-28 13:26:09 +0530875 if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return orig_rdmult;
Jayasanker Je9ad4752020-06-30 19:30:03 +0530876
877 if (cpi->gf_group.index >= MAX_TPL_FRAME_IDX) return orig_rdmult;
878
Jayasanker Je9ad4752020-06-30 19:30:03 +0530879 int mi_count = 0;
880 const int mi_col_sr =
881 coded_to_superres_mi(mi_col, cm->superres_scale_denominator);
882 const int mi_col_end_sr =
883 coded_to_superres_mi(mi_col + mi_wide, cm->superres_scale_denominator);
884 const int mi_cols_sr = av1_pixels_to_mi(cm->superres_upscaled_width);
885 const int step = 1 << block_mis_log2;
Urvang Joshie198bf12020-10-08 15:37:55 -0700886 const int row_step = step;
887 const int col_step_sr =
888 coded_to_superres_mi(step, cm->superres_scale_denominator);
889 for (int row = mi_row; row < mi_row + mi_high; row += row_step) {
890 for (int col = mi_col_sr; col < mi_col_end_sr; col += col_step_sr) {
Jayasanker Je9ad4752020-06-30 19:30:03 +0530891 if (row >= cm->mi_params.mi_rows || col >= mi_cols_sr) continue;
892 TplDepStats *this_stats =
893 &tpl_stats[av1_tpl_ptr_pos(row, col, tpl_stride, block_mis_log2)];
894 int64_t mc_dep_delta =
895 RDCOST(tpl_frame->base_rdmult, this_stats->mc_dep_rate,
896 this_stats->mc_dep_dist);
897 intra_cost += this_stats->recrf_dist << RDDIV_BITS;
898 mc_dep_cost += (this_stats->recrf_dist << RDDIV_BITS) + mc_dep_delta;
Jayasanker Je9ad4752020-06-30 19:30:03 +0530899 mi_count++;
900 }
901 }
Urvang Joshie198bf12020-10-08 15:37:55 -0700902 assert(mi_count <= MAX_TPL_BLK_IN_SB * MAX_TPL_BLK_IN_SB);
Jayasanker Je9ad4752020-06-30 19:30:03 +0530903
904 aom_clear_system_state();
905
906 double beta = 1.0;
Jayasanker J37596eb2020-08-20 16:39:40 +0530907 if (mc_dep_cost > 0 && intra_cost > 0) {
908 const double r0 = cpi->rd.r0;
909 const double rk = (double)intra_cost / mc_dep_cost;
910 beta = (r0 / rk);
Jayasanker Je9ad4752020-06-30 19:30:03 +0530911 }
912
913 int rdmult = av1_get_adaptive_rdmult(cpi, beta);
914
915 aom_clear_system_state();
916
917 rdmult = AOMMIN(rdmult, orig_rdmult * 3 / 2);
918 rdmult = AOMMAX(rdmult, orig_rdmult * 1 / 2);
919
920 rdmult = AOMMAX(1, rdmult);
921
922 return rdmult;
923}
924
925// Checks to see if a super block is on a horizontal image edge.
926// In most cases this is the "real" edge unless there are formatting
927// bars embedded in the stream.
928int av1_active_h_edge(const AV1_COMP *cpi, int mi_row, int mi_step) {
929 int top_edge = 0;
930 int bottom_edge = cpi->common.mi_params.mi_rows;
931 int is_active_h_edge = 0;
932
Jayasanker Je9ad4752020-06-30 19:30:03 +0530933 if (((top_edge >= mi_row) && (top_edge < (mi_row + mi_step))) ||
934 ((bottom_edge >= mi_row) && (bottom_edge < (mi_row + mi_step)))) {
935 is_active_h_edge = 1;
936 }
937 return is_active_h_edge;
938}
939
940// Checks to see if a super block is on a vertical image edge.
941// In most cases this is the "real" edge unless there are formatting
942// bars embedded in the stream.
943int av1_active_v_edge(const AV1_COMP *cpi, int mi_col, int mi_step) {
944 int left_edge = 0;
945 int right_edge = cpi->common.mi_params.mi_cols;
946 int is_active_v_edge = 0;
947
Jayasanker Je9ad4752020-06-30 19:30:03 +0530948 if (((left_edge >= mi_col) && (left_edge < (mi_col + mi_step))) ||
949 ((right_edge >= mi_col) && (right_edge < (mi_col + mi_step)))) {
950 is_active_v_edge = 1;
951 }
952 return is_active_v_edge;
953}
954
955void av1_get_tpl_stats_sb(AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row,
956 int mi_col, SuperBlockEnc *sb_enc) {
957 sb_enc->tpl_data_count = 0;
958
959 if (!cpi->oxcf.algo_cfg.enable_tpl_model) return;
Jayasanker Je9ad4752020-06-30 19:30:03 +0530960 if (cpi->common.current_frame.frame_type == KEY_FRAME) return;
961 const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group);
Debargha Mukherjee5f64acd2020-08-18 14:32:28 -0700962 if (update_type == INTNL_OVERLAY_UPDATE || update_type == OVERLAY_UPDATE ||
963 update_type == KFFLT_OVERLAY_UPDATE)
Jayasanker Je9ad4752020-06-30 19:30:03 +0530964 return;
965 assert(IMPLIES(cpi->gf_group.size > 0,
966 cpi->gf_group.index < cpi->gf_group.size));
967
968 AV1_COMMON *const cm = &cpi->common;
969 const int gf_group_index = cpi->gf_group.index;
970 TplParams *const tpl_data = &cpi->tpl_data;
971 TplDepFrame *tpl_frame = &tpl_data->tpl_frame[gf_group_index];
972 TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr;
973 int tpl_stride = tpl_frame->stride;
974 const int mi_wide = mi_size_wide[bsize];
975 const int mi_high = mi_size_high[bsize];
976
977 if (tpl_frame->is_valid == 0) return;
978 if (gf_group_index >= MAX_TPL_FRAME_IDX) return;
979
980 int mi_count = 0;
981 int count = 0;
982 const int mi_col_sr =
983 coded_to_superres_mi(mi_col, cm->superres_scale_denominator);
984 const int mi_col_end_sr =
985 coded_to_superres_mi(mi_col + mi_wide, cm->superres_scale_denominator);
986 // mi_cols_sr is mi_cols at superres case.
987 const int mi_cols_sr = av1_pixels_to_mi(cm->superres_upscaled_width);
988
989 // TPL store unit size is not the same as the motion estimation unit size.
990 // Here always use motion estimation size to avoid getting repetitive inter/
991 // intra cost.
Yunqing Wangf0a8cf42020-08-14 14:50:33 -0700992 const BLOCK_SIZE tpl_bsize = convert_length_to_bsize(tpl_data->tpl_bsize_1d);
Jayasanker Je9ad4752020-06-30 19:30:03 +0530993 assert(mi_size_wide[tpl_bsize] == mi_size_high[tpl_bsize]);
Urvang Joshie198bf12020-10-08 15:37:55 -0700994 const int row_step = mi_size_high[tpl_bsize];
995 const int col_step_sr = coded_to_superres_mi(mi_size_wide[tpl_bsize],
996 cm->superres_scale_denominator);
Jayasanker Je9ad4752020-06-30 19:30:03 +0530997
998 // Stride is only based on SB size, and we fill in values for every 16x16
999 // block in a SB.
Urvang Joshie198bf12020-10-08 15:37:55 -07001000 sb_enc->tpl_stride = (mi_col_end_sr - mi_col_sr) / col_step_sr;
Jayasanker Je9ad4752020-06-30 19:30:03 +05301001
Urvang Joshie198bf12020-10-08 15:37:55 -07001002 for (int row = mi_row; row < mi_row + mi_high; row += row_step) {
1003 for (int col = mi_col_sr; col < mi_col_end_sr; col += col_step_sr) {
1004 assert(count < MAX_TPL_BLK_IN_SB * MAX_TPL_BLK_IN_SB);
Jayasanker Je9ad4752020-06-30 19:30:03 +05301005 // Handle partial SB, so that no invalid values are used later.
1006 if (row >= cm->mi_params.mi_rows || col >= mi_cols_sr) {
1007 sb_enc->tpl_inter_cost[count] = INT64_MAX;
1008 sb_enc->tpl_intra_cost[count] = INT64_MAX;
1009 for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1010 sb_enc->tpl_mv[count][i].as_int = INVALID_MV;
1011 }
1012 count++;
1013 continue;
1014 }
1015
1016 TplDepStats *this_stats = &tpl_stats[av1_tpl_ptr_pos(
1017 row, col, tpl_stride, tpl_data->tpl_stats_block_mis_log2)];
1018 sb_enc->tpl_inter_cost[count] = this_stats->inter_cost;
1019 sb_enc->tpl_intra_cost[count] = this_stats->intra_cost;
1020 memcpy(sb_enc->tpl_mv[count], this_stats->mv, sizeof(this_stats->mv));
1021 mi_count++;
1022 count++;
1023 }
1024 }
1025
Urvang Joshie198bf12020-10-08 15:37:55 -07001026 assert(mi_count <= MAX_TPL_BLK_IN_SB * MAX_TPL_BLK_IN_SB);
Jayasanker Je9ad4752020-06-30 19:30:03 +05301027 sb_enc->tpl_data_count = mi_count;
1028}
1029
1030// analysis_type 0: Use mc_dep_cost and intra_cost
1031// analysis_type 1: Use count of best inter predictor chosen
1032// analysis_type 2: Use cost reduction from intra to inter for best inter
1033// predictor chosen
1034int av1_get_q_for_deltaq_objective(AV1_COMP *const cpi, BLOCK_SIZE bsize,
1035 int mi_row, int mi_col) {
1036 AV1_COMMON *const cm = &cpi->common;
1037 const GF_GROUP *const gf_group = &cpi->gf_group;
1038 assert(IMPLIES(cpi->gf_group.size > 0,
1039 cpi->gf_group.index < cpi->gf_group.size));
1040 const int tpl_idx = cpi->gf_group.index;
1041 TplParams *const tpl_data = &cpi->tpl_data;
1042 TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_idx];
1043 TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr;
1044 const uint8_t block_mis_log2 = tpl_data->tpl_stats_block_mis_log2;
1045 int tpl_stride = tpl_frame->stride;
1046 int64_t intra_cost = 0;
1047 int64_t mc_dep_cost = 0;
1048 const int mi_wide = mi_size_wide[bsize];
1049 const int mi_high = mi_size_high[bsize];
1050 const int base_qindex = cm->quant_params.base_qindex;
1051
1052 if (tpl_frame->is_valid == 0) return base_qindex;
1053
Deepa K G21e5e8e2020-03-28 13:26:09 +05301054 if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return base_qindex;
Jayasanker Je9ad4752020-06-30 19:30:03 +05301055
1056 if (cpi->gf_group.index >= MAX_TPL_FRAME_IDX) return base_qindex;
1057
Jayasanker Je9ad4752020-06-30 19:30:03 +05301058 int mi_count = 0;
1059 const int mi_col_sr =
1060 coded_to_superres_mi(mi_col, cm->superres_scale_denominator);
1061 const int mi_col_end_sr =
1062 coded_to_superres_mi(mi_col + mi_wide, cm->superres_scale_denominator);
1063 const int mi_cols_sr = av1_pixels_to_mi(cm->superres_upscaled_width);
1064 const int step = 1 << block_mis_log2;
Urvang Joshie198bf12020-10-08 15:37:55 -07001065 const int row_step = step;
1066 const int col_step_sr =
1067 coded_to_superres_mi(step, cm->superres_scale_denominator);
1068 for (int row = mi_row; row < mi_row + mi_high; row += row_step) {
1069 for (int col = mi_col_sr; col < mi_col_end_sr; col += col_step_sr) {
Jayasanker Je9ad4752020-06-30 19:30:03 +05301070 if (row >= cm->mi_params.mi_rows || col >= mi_cols_sr) continue;
1071 TplDepStats *this_stats =
1072 &tpl_stats[av1_tpl_ptr_pos(row, col, tpl_stride, block_mis_log2)];
1073 int64_t mc_dep_delta =
1074 RDCOST(tpl_frame->base_rdmult, this_stats->mc_dep_rate,
1075 this_stats->mc_dep_dist);
1076 intra_cost += this_stats->recrf_dist << RDDIV_BITS;
1077 mc_dep_cost += (this_stats->recrf_dist << RDDIV_BITS) + mc_dep_delta;
Jayasanker Je9ad4752020-06-30 19:30:03 +05301078 mi_count++;
1079 }
1080 }
Urvang Joshie198bf12020-10-08 15:37:55 -07001081 assert(mi_count <= MAX_TPL_BLK_IN_SB * MAX_TPL_BLK_IN_SB);
Jayasanker Je9ad4752020-06-30 19:30:03 +05301082
1083 aom_clear_system_state();
1084
1085 int offset = 0;
1086 double beta = 1.0;
1087 if (mc_dep_cost > 0 && intra_cost > 0) {
1088 const double r0 = cpi->rd.r0;
1089 const double rk = (double)intra_cost / mc_dep_cost;
1090 beta = (r0 / rk);
1091 assert(beta > 0.0);
1092 }
1093 offset = av1_get_deltaq_offset(cpi, base_qindex, beta);
1094 aom_clear_system_state();
1095
1096 const DeltaQInfo *const delta_q_info = &cm->delta_q_info;
1097 offset = AOMMIN(offset, delta_q_info->delta_q_res * 9 - 1);
1098 offset = AOMMAX(offset, -delta_q_info->delta_q_res * 9 + 1);
1099 int qindex = cm->quant_params.base_qindex + offset;
Ryan Leiccc6ea72021-01-06 11:43:56 -08001100#if CONFIG_EXTQUANT
1101 qindex = AOMMIN(qindex, cm->seq_params.bit_depth == AOM_BITS_8
1102 ? MAXQ_8_BITS
1103 : cm->seq_params.bit_depth == AOM_BITS_10
1104 ? MAXQ_10_BITS
1105 : MAXQ);
1106#else
Jayasanker Je9ad4752020-06-30 19:30:03 +05301107 qindex = AOMMIN(qindex, MAXQ);
Ryan Leiccc6ea72021-01-06 11:43:56 -08001108#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +05301109 qindex = AOMMAX(qindex, MINQ);
1110
1111 return qindex;
1112}
1113#endif // !CONFIG_REALTIME_ONLY
1114
1115void av1_reset_simple_motion_tree_partition(SIMPLE_MOTION_DATA_TREE *sms_tree,
1116 BLOCK_SIZE bsize) {
1117 sms_tree->partitioning = PARTITION_NONE;
1118
1119 if (bsize >= BLOCK_8X8) {
1120 BLOCK_SIZE subsize = get_partition_subsize(bsize, PARTITION_SPLIT);
1121 for (int idx = 0; idx < 4; ++idx)
1122 av1_reset_simple_motion_tree_partition(sms_tree->split[idx], subsize);
1123 }
1124}
1125
1126// Record the ref frames that have been selected by square partition blocks.
1127void av1_update_picked_ref_frames_mask(MACROBLOCK *const x, int ref_type,
1128 BLOCK_SIZE bsize, int mib_size,
1129 int mi_row, int mi_col) {
1130 assert(mi_size_wide[bsize] == mi_size_high[bsize]);
1131 const int sb_size_mask = mib_size - 1;
1132 const int mi_row_in_sb = mi_row & sb_size_mask;
1133 const int mi_col_in_sb = mi_col & sb_size_mask;
1134 const int mi_size = mi_size_wide[bsize];
1135 for (int i = mi_row_in_sb; i < mi_row_in_sb + mi_size; ++i) {
1136 for (int j = mi_col_in_sb; j < mi_col_in_sb + mi_size; ++j) {
1137 x->picked_ref_frames_mask[i * 32 + j] |= 1 << ref_type;
1138 }
1139 }
1140}
1141
1142static void avg_cdf_symbol(aom_cdf_prob *cdf_ptr_left, aom_cdf_prob *cdf_ptr_tr,
1143 int num_cdfs, int cdf_stride, int nsymbs,
1144 int wt_left, int wt_tr) {
1145 for (int i = 0; i < num_cdfs; i++) {
1146 for (int j = 0; j <= nsymbs; j++) {
1147 cdf_ptr_left[i * cdf_stride + j] =
1148 (aom_cdf_prob)(((int)cdf_ptr_left[i * cdf_stride + j] * wt_left +
1149 (int)cdf_ptr_tr[i * cdf_stride + j] * wt_tr +
1150 ((wt_left + wt_tr) / 2)) /
1151 (wt_left + wt_tr));
1152 assert(cdf_ptr_left[i * cdf_stride + j] >= 0 &&
1153 cdf_ptr_left[i * cdf_stride + j] < CDF_PROB_TOP);
1154 }
1155 }
1156}
1157
1158#define AVERAGE_CDF(cname_left, cname_tr, nsymbs) \
1159 AVG_CDF_STRIDE(cname_left, cname_tr, nsymbs, CDF_SIZE(nsymbs))
1160
1161#define AVG_CDF_STRIDE(cname_left, cname_tr, nsymbs, cdf_stride) \
1162 do { \
1163 aom_cdf_prob *cdf_ptr_left = (aom_cdf_prob *)cname_left; \
1164 aom_cdf_prob *cdf_ptr_tr = (aom_cdf_prob *)cname_tr; \
1165 int array_size = (int)sizeof(cname_left) / sizeof(aom_cdf_prob); \
1166 int num_cdfs = array_size / cdf_stride; \
1167 avg_cdf_symbol(cdf_ptr_left, cdf_ptr_tr, num_cdfs, cdf_stride, nsymbs, \
1168 wt_left, wt_tr); \
1169 } while (0)
1170
1171static void avg_nmv(nmv_context *nmv_left, nmv_context *nmv_tr, int wt_left,
1172 int wt_tr) {
1173 AVERAGE_CDF(nmv_left->joints_cdf, nmv_tr->joints_cdf, 4);
1174 for (int i = 0; i < 2; i++) {
1175 AVERAGE_CDF(nmv_left->comps[i].classes_cdf, nmv_tr->comps[i].classes_cdf,
1176 MV_CLASSES);
1177 AVERAGE_CDF(nmv_left->comps[i].class0_fp_cdf,
1178 nmv_tr->comps[i].class0_fp_cdf, MV_FP_SIZE);
1179 AVERAGE_CDF(nmv_left->comps[i].fp_cdf, nmv_tr->comps[i].fp_cdf, MV_FP_SIZE);
1180 AVERAGE_CDF(nmv_left->comps[i].sign_cdf, nmv_tr->comps[i].sign_cdf, 2);
1181 AVERAGE_CDF(nmv_left->comps[i].class0_hp_cdf,
1182 nmv_tr->comps[i].class0_hp_cdf, 2);
1183 AVERAGE_CDF(nmv_left->comps[i].hp_cdf, nmv_tr->comps[i].hp_cdf, 2);
1184 AVERAGE_CDF(nmv_left->comps[i].class0_cdf, nmv_tr->comps[i].class0_cdf,
1185 CLASS0_SIZE);
1186 AVERAGE_CDF(nmv_left->comps[i].bits_cdf, nmv_tr->comps[i].bits_cdf, 2);
1187 }
1188}
1189
1190// In case of row-based multi-threading of encoder, since we always
1191// keep a top - right sync, we can average the top - right SB's CDFs and
1192// the left SB's CDFs and use the same for current SB's encoding to
1193// improve the performance. This function facilitates the averaging
1194// of CDF and used only when row-mt is enabled in encoder.
1195void av1_avg_cdf_symbols(FRAME_CONTEXT *ctx_left, FRAME_CONTEXT *ctx_tr,
1196 int wt_left, int wt_tr) {
1197 AVERAGE_CDF(ctx_left->txb_skip_cdf, ctx_tr->txb_skip_cdf, 2);
1198 AVERAGE_CDF(ctx_left->eob_extra_cdf, ctx_tr->eob_extra_cdf, 2);
1199 AVERAGE_CDF(ctx_left->dc_sign_cdf, ctx_tr->dc_sign_cdf, 2);
1200 AVERAGE_CDF(ctx_left->eob_flag_cdf16, ctx_tr->eob_flag_cdf16, 5);
1201 AVERAGE_CDF(ctx_left->eob_flag_cdf32, ctx_tr->eob_flag_cdf32, 6);
1202 AVERAGE_CDF(ctx_left->eob_flag_cdf64, ctx_tr->eob_flag_cdf64, 7);
1203 AVERAGE_CDF(ctx_left->eob_flag_cdf128, ctx_tr->eob_flag_cdf128, 8);
1204 AVERAGE_CDF(ctx_left->eob_flag_cdf256, ctx_tr->eob_flag_cdf256, 9);
1205 AVERAGE_CDF(ctx_left->eob_flag_cdf512, ctx_tr->eob_flag_cdf512, 10);
1206 AVERAGE_CDF(ctx_left->eob_flag_cdf1024, ctx_tr->eob_flag_cdf1024, 11);
1207 AVERAGE_CDF(ctx_left->coeff_base_eob_cdf, ctx_tr->coeff_base_eob_cdf, 3);
1208 AVERAGE_CDF(ctx_left->coeff_base_cdf, ctx_tr->coeff_base_cdf, 4);
1209 AVERAGE_CDF(ctx_left->coeff_br_cdf, ctx_tr->coeff_br_cdf, BR_CDF_SIZE);
1210 AVERAGE_CDF(ctx_left->newmv_cdf, ctx_tr->newmv_cdf, 2);
1211 AVERAGE_CDF(ctx_left->zeromv_cdf, ctx_tr->zeromv_cdf, 2);
1212 AVERAGE_CDF(ctx_left->refmv_cdf, ctx_tr->refmv_cdf, 2);
1213 AVERAGE_CDF(ctx_left->drl_cdf, ctx_tr->drl_cdf, 2);
1214 AVERAGE_CDF(ctx_left->inter_compound_mode_cdf,
1215 ctx_tr->inter_compound_mode_cdf, INTER_COMPOUND_MODES);
1216 AVERAGE_CDF(ctx_left->compound_type_cdf, ctx_tr->compound_type_cdf,
1217 MASKED_COMPOUND_TYPES);
1218 AVERAGE_CDF(ctx_left->wedge_idx_cdf, ctx_tr->wedge_idx_cdf, 16);
1219 AVERAGE_CDF(ctx_left->interintra_cdf, ctx_tr->interintra_cdf, 2);
1220 AVERAGE_CDF(ctx_left->wedge_interintra_cdf, ctx_tr->wedge_interintra_cdf, 2);
1221 AVERAGE_CDF(ctx_left->interintra_mode_cdf, ctx_tr->interintra_mode_cdf,
1222 INTERINTRA_MODES);
1223 AVERAGE_CDF(ctx_left->motion_mode_cdf, ctx_tr->motion_mode_cdf, MOTION_MODES);
1224 AVERAGE_CDF(ctx_left->obmc_cdf, ctx_tr->obmc_cdf, 2);
1225 AVERAGE_CDF(ctx_left->palette_y_size_cdf, ctx_tr->palette_y_size_cdf,
1226 PALETTE_SIZES);
1227 AVERAGE_CDF(ctx_left->palette_uv_size_cdf, ctx_tr->palette_uv_size_cdf,
1228 PALETTE_SIZES);
1229 for (int j = 0; j < PALETTE_SIZES; j++) {
1230 int nsymbs = j + PALETTE_MIN_SIZE;
1231 AVG_CDF_STRIDE(ctx_left->palette_y_color_index_cdf[j],
1232 ctx_tr->palette_y_color_index_cdf[j], nsymbs,
1233 CDF_SIZE(PALETTE_COLORS));
1234 AVG_CDF_STRIDE(ctx_left->palette_uv_color_index_cdf[j],
1235 ctx_tr->palette_uv_color_index_cdf[j], nsymbs,
1236 CDF_SIZE(PALETTE_COLORS));
1237 }
1238 AVERAGE_CDF(ctx_left->palette_y_mode_cdf, ctx_tr->palette_y_mode_cdf, 2);
1239 AVERAGE_CDF(ctx_left->palette_uv_mode_cdf, ctx_tr->palette_uv_mode_cdf, 2);
1240 AVERAGE_CDF(ctx_left->comp_inter_cdf, ctx_tr->comp_inter_cdf, 2);
1241 AVERAGE_CDF(ctx_left->single_ref_cdf, ctx_tr->single_ref_cdf, 2);
1242 AVERAGE_CDF(ctx_left->comp_ref_type_cdf, ctx_tr->comp_ref_type_cdf, 2);
1243 AVERAGE_CDF(ctx_left->uni_comp_ref_cdf, ctx_tr->uni_comp_ref_cdf, 2);
1244 AVERAGE_CDF(ctx_left->comp_ref_cdf, ctx_tr->comp_ref_cdf, 2);
1245 AVERAGE_CDF(ctx_left->comp_bwdref_cdf, ctx_tr->comp_bwdref_cdf, 2);
1246 AVERAGE_CDF(ctx_left->txfm_partition_cdf, ctx_tr->txfm_partition_cdf, 2);
Debargha Mukherjee5bd41f92020-10-04 11:06:11 -07001247#if !CONFIG_REMOVE_DIST_WTD_COMP
Jayasanker Je9ad4752020-06-30 19:30:03 +05301248 AVERAGE_CDF(ctx_left->compound_index_cdf, ctx_tr->compound_index_cdf, 2);
Debargha Mukherjee5bd41f92020-10-04 11:06:11 -07001249#endif // !CONFIG_REMOVE_DIST_WTD_COMP
Jayasanker Je9ad4752020-06-30 19:30:03 +05301250 AVERAGE_CDF(ctx_left->comp_group_idx_cdf, ctx_tr->comp_group_idx_cdf, 2);
1251 AVERAGE_CDF(ctx_left->skip_mode_cdfs, ctx_tr->skip_mode_cdfs, 2);
1252 AVERAGE_CDF(ctx_left->skip_txfm_cdfs, ctx_tr->skip_txfm_cdfs, 2);
1253 AVERAGE_CDF(ctx_left->intra_inter_cdf, ctx_tr->intra_inter_cdf, 2);
1254 avg_nmv(&ctx_left->nmvc, &ctx_tr->nmvc, wt_left, wt_tr);
1255 avg_nmv(&ctx_left->ndvc, &ctx_tr->ndvc, wt_left, wt_tr);
1256 AVERAGE_CDF(ctx_left->intrabc_cdf, ctx_tr->intrabc_cdf, 2);
1257 AVERAGE_CDF(ctx_left->seg.tree_cdf, ctx_tr->seg.tree_cdf, MAX_SEGMENTS);
1258 AVERAGE_CDF(ctx_left->seg.pred_cdf, ctx_tr->seg.pred_cdf, 2);
1259 AVERAGE_CDF(ctx_left->seg.spatial_pred_seg_cdf,
1260 ctx_tr->seg.spatial_pred_seg_cdf, MAX_SEGMENTS);
1261 AVERAGE_CDF(ctx_left->filter_intra_cdfs, ctx_tr->filter_intra_cdfs, 2);
1262 AVERAGE_CDF(ctx_left->filter_intra_mode_cdf, ctx_tr->filter_intra_mode_cdf,
1263 FILTER_INTRA_MODES);
1264 AVERAGE_CDF(ctx_left->switchable_restore_cdf, ctx_tr->switchable_restore_cdf,
1265 RESTORE_SWITCHABLE_TYPES);
1266 AVERAGE_CDF(ctx_left->wiener_restore_cdf, ctx_tr->wiener_restore_cdf, 2);
1267 AVERAGE_CDF(ctx_left->sgrproj_restore_cdf, ctx_tr->sgrproj_restore_cdf, 2);
leolzhao943a08d2021-03-30 15:32:52 -07001268#if CONFIG_MRLS
1269 AVERAGE_CDF(ctx_left->mrl_index_cdf, ctx_tr->mrl_index_cdf, MRL_LINE_NUMBER);
1270#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +05301271 AVERAGE_CDF(ctx_left->y_mode_cdf, ctx_tr->y_mode_cdf, INTRA_MODES);
1272 AVG_CDF_STRIDE(ctx_left->uv_mode_cdf[0], ctx_tr->uv_mode_cdf[0],
1273 UV_INTRA_MODES - 1, CDF_SIZE(UV_INTRA_MODES));
1274 AVERAGE_CDF(ctx_left->uv_mode_cdf[1], ctx_tr->uv_mode_cdf[1], UV_INTRA_MODES);
liang zhaoc6f775a2020-12-17 11:54:58 -08001275#if CONFIG_SDP
1276 for (int plane_index = 0; plane_index < PARTITION_STRUCTURE_NUM;
1277 plane_index++) {
1278 for (int i = 0; i < PARTITION_CONTEXTS; i++) {
1279 if (i < 4) {
1280 AVG_CDF_STRIDE(ctx_left->partition_cdf[plane_index][i],
1281 ctx_tr->partition_cdf[plane_index][i], 4, CDF_SIZE(10));
1282 } else if (i < 16) {
1283 AVERAGE_CDF(ctx_left->partition_cdf[plane_index][i],
1284 ctx_tr->partition_cdf[plane_index][i], 10);
1285 } else {
1286 AVG_CDF_STRIDE(ctx_left->partition_cdf[plane_index][i],
1287 ctx_tr->partition_cdf[plane_index][i], 8, CDF_SIZE(10));
1288 }
1289 }
1290 }
1291#else
Jayasanker Je9ad4752020-06-30 19:30:03 +05301292 for (int i = 0; i < PARTITION_CONTEXTS; i++) {
1293 if (i < 4) {
1294 AVG_CDF_STRIDE(ctx_left->partition_cdf[i], ctx_tr->partition_cdf[i], 4,
1295 CDF_SIZE(10));
1296 } else if (i < 16) {
1297 AVERAGE_CDF(ctx_left->partition_cdf[i], ctx_tr->partition_cdf[i], 10);
1298 } else {
1299 AVG_CDF_STRIDE(ctx_left->partition_cdf[i], ctx_tr->partition_cdf[i], 8,
1300 CDF_SIZE(10));
1301 }
1302 }
liang zhaoc6f775a2020-12-17 11:54:58 -08001303#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +05301304 AVERAGE_CDF(ctx_left->switchable_interp_cdf, ctx_tr->switchable_interp_cdf,
1305 SWITCHABLE_FILTERS);
1306 AVERAGE_CDF(ctx_left->kf_y_cdf, ctx_tr->kf_y_cdf, INTRA_MODES);
1307 AVERAGE_CDF(ctx_left->angle_delta_cdf, ctx_tr->angle_delta_cdf,
1308 2 * MAX_ANGLE_DELTA + 1);
1309 AVG_CDF_STRIDE(ctx_left->tx_size_cdf[0], ctx_tr->tx_size_cdf[0], MAX_TX_DEPTH,
1310 CDF_SIZE(MAX_TX_DEPTH + 1));
1311 AVERAGE_CDF(ctx_left->tx_size_cdf[1], ctx_tr->tx_size_cdf[1],
1312 MAX_TX_DEPTH + 1);
1313 AVERAGE_CDF(ctx_left->tx_size_cdf[2], ctx_tr->tx_size_cdf[2],
1314 MAX_TX_DEPTH + 1);
1315 AVERAGE_CDF(ctx_left->tx_size_cdf[3], ctx_tr->tx_size_cdf[3],
1316 MAX_TX_DEPTH + 1);
1317 AVERAGE_CDF(ctx_left->delta_q_cdf, ctx_tr->delta_q_cdf, DELTA_Q_PROBS + 1);
1318 AVERAGE_CDF(ctx_left->delta_lf_cdf, ctx_tr->delta_lf_cdf, DELTA_LF_PROBS + 1);
1319 for (int i = 0; i < FRAME_LF_COUNT; i++) {
1320 AVERAGE_CDF(ctx_left->delta_lf_multi_cdf[i], ctx_tr->delta_lf_multi_cdf[i],
1321 DELTA_LF_PROBS + 1);
1322 }
1323 AVG_CDF_STRIDE(ctx_left->intra_ext_tx_cdf[1], ctx_tr->intra_ext_tx_cdf[1], 7,
1324 CDF_SIZE(TX_TYPES));
1325 AVG_CDF_STRIDE(ctx_left->intra_ext_tx_cdf[2], ctx_tr->intra_ext_tx_cdf[2], 5,
1326 CDF_SIZE(TX_TYPES));
1327 AVG_CDF_STRIDE(ctx_left->inter_ext_tx_cdf[1], ctx_tr->inter_ext_tx_cdf[1], 16,
1328 CDF_SIZE(TX_TYPES));
1329 AVG_CDF_STRIDE(ctx_left->inter_ext_tx_cdf[2], ctx_tr->inter_ext_tx_cdf[2], 12,
1330 CDF_SIZE(TX_TYPES));
1331 AVG_CDF_STRIDE(ctx_left->inter_ext_tx_cdf[3], ctx_tr->inter_ext_tx_cdf[3], 2,
1332 CDF_SIZE(TX_TYPES));
1333 AVERAGE_CDF(ctx_left->cfl_sign_cdf, ctx_tr->cfl_sign_cdf, CFL_JOINT_SIGNS);
1334 AVERAGE_CDF(ctx_left->cfl_alpha_cdf, ctx_tr->cfl_alpha_cdf,
1335 CFL_ALPHABET_SIZE);
1336}
1337
1338// Grade the temporal variation of the source by comparing the current sb and
1339// its collocated block in the last frame.
1340void av1_source_content_sb(AV1_COMP *cpi, MACROBLOCK *x, int offset) {
1341 unsigned int tmp_sse;
1342 unsigned int tmp_variance;
1343 const BLOCK_SIZE bsize = cpi->common.seq_params.sb_size;
1344 uint8_t *src_y = cpi->source->y_buffer;
1345 int src_ystride = cpi->source->y_stride;
1346 uint8_t *last_src_y = cpi->last_source->y_buffer;
1347 int last_src_ystride = cpi->last_source->y_stride;
1348 uint64_t avg_source_sse_threshold = 100000; // ~5*5*(64*64)
1349 uint64_t avg_source_sse_threshold_high = 1000000; // ~15*15*(64*64)
1350 uint64_t sum_sq_thresh = 10000; // sum = sqrt(thresh / 64*64)) ~1.5
Jayasanker Je9ad4752020-06-30 19:30:03 +05301351 MACROBLOCKD *xd = &x->e_mbd;
1352 if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) return;
Jayasanker Je9ad4752020-06-30 19:30:03 +05301353 src_y += offset;
1354 last_src_y += offset;
1355 tmp_variance = cpi->fn_ptr[bsize].vf(src_y, src_ystride, last_src_y,
1356 last_src_ystride, &tmp_sse);
1357 // Note: tmp_sse - tmp_variance = ((sum * sum) >> 12)
1358 // Detect large lighting change.
1359 if (tmp_variance < (tmp_sse >> 1) && (tmp_sse - tmp_variance) > sum_sq_thresh)
1360 x->content_state_sb = kLowVarHighSumdiff;
1361 else if (tmp_sse < avg_source_sse_threshold)
1362 x->content_state_sb = kLowSad;
1363 else if (tmp_sse > avg_source_sse_threshold_high)
1364 x->content_state_sb = kHighSad;
1365}
1366
1367// Memset the mbmis at the current superblock to 0
1368void av1_reset_mbmi(CommonModeInfoParams *const mi_params, BLOCK_SIZE sb_size,
1369 int mi_row, int mi_col) {
1370 // size of sb in unit of mi (BLOCK_4X4)
1371 const int sb_size_mi = mi_size_wide[sb_size];
1372 const int mi_alloc_size_1d = mi_size_wide[mi_params->mi_alloc_bsize];
1373 // size of sb in unit of allocated mi size
1374 const int sb_size_alloc_mi = mi_size_wide[sb_size] / mi_alloc_size_1d;
1375 assert(mi_params->mi_alloc_stride % sb_size_alloc_mi == 0 &&
1376 "mi is not allocated as a multiple of sb!");
1377 assert(mi_params->mi_stride % sb_size_mi == 0 &&
1378 "mi_grid_base is not allocated as a multiple of sb!");
1379
1380 const int mi_rows = mi_size_high[sb_size];
1381 for (int cur_mi_row = 0; cur_mi_row < mi_rows; cur_mi_row++) {
1382 assert(get_mi_grid_idx(mi_params, 0, mi_col + mi_alloc_size_1d) <
1383 mi_params->mi_stride);
1384 const int mi_grid_idx =
1385 get_mi_grid_idx(mi_params, mi_row + cur_mi_row, mi_col);
1386 const int alloc_mi_idx =
1387 get_alloc_mi_idx(mi_params, mi_row + cur_mi_row, mi_col);
1388 memset(&mi_params->mi_grid_base[mi_grid_idx], 0,
1389 sb_size_mi * sizeof(*mi_params->mi_grid_base));
1390 memset(&mi_params->tx_type_map[mi_grid_idx], 0,
1391 sb_size_mi * sizeof(*mi_params->tx_type_map));
1392 if (cur_mi_row % mi_alloc_size_1d == 0) {
1393 memset(&mi_params->mi_alloc[alloc_mi_idx], 0,
1394 sb_size_alloc_mi * sizeof(*mi_params->mi_alloc));
1395 }
1396 }
1397}
1398
1399void av1_backup_sb_state(SB_FIRST_PASS_STATS *sb_fp_stats, const AV1_COMP *cpi,
1400 ThreadData *td, const TileDataEnc *tile_data,
1401 int mi_row, int mi_col) {
1402 MACROBLOCK *x = &td->mb;
1403 MACROBLOCKD *xd = &x->e_mbd;
1404 const TileInfo *tile_info = &tile_data->tile_info;
1405
1406 const AV1_COMMON *cm = &cpi->common;
1407 const int num_planes = av1_num_planes(cm);
1408 const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
1409
1410 xd->above_txfm_context =
1411 cm->above_contexts.txfm[tile_info->tile_row] + mi_col;
1412 xd->left_txfm_context =
1413 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
1414 av1_save_context(x, &sb_fp_stats->x_ctx, mi_row, mi_col, sb_size, num_planes);
1415
1416 sb_fp_stats->rd_count = cpi->td.rd_counts;
1417 sb_fp_stats->split_count = x->txfm_search_info.txb_split_count;
1418
1419 sb_fp_stats->fc = *td->counts;
1420
1421 memcpy(sb_fp_stats->inter_mode_rd_models, tile_data->inter_mode_rd_models,
1422 sizeof(sb_fp_stats->inter_mode_rd_models));
1423
1424 memcpy(sb_fp_stats->thresh_freq_fact, x->thresh_freq_fact,
1425 sizeof(sb_fp_stats->thresh_freq_fact));
1426
1427 const int alloc_mi_idx = get_alloc_mi_idx(&cm->mi_params, mi_row, mi_col);
1428 sb_fp_stats->current_qindex =
1429 cm->mi_params.mi_alloc[alloc_mi_idx].current_qindex;
1430
1431#if CONFIG_INTERNAL_STATS
1432 memcpy(sb_fp_stats->mode_chosen_counts, cpi->mode_chosen_counts,
1433 sizeof(sb_fp_stats->mode_chosen_counts));
1434#endif // CONFIG_INTERNAL_STATS
1435}
1436
1437void av1_restore_sb_state(const SB_FIRST_PASS_STATS *sb_fp_stats, AV1_COMP *cpi,
1438 ThreadData *td, TileDataEnc *tile_data, int mi_row,
1439 int mi_col) {
1440 MACROBLOCK *x = &td->mb;
1441
1442 const AV1_COMMON *cm = &cpi->common;
1443 const int num_planes = av1_num_planes(cm);
1444 const BLOCK_SIZE sb_size = cm->seq_params.sb_size;
1445
1446 av1_restore_context(x, &sb_fp_stats->x_ctx, mi_row, mi_col, sb_size,
1447 num_planes);
1448
1449 cpi->td.rd_counts = sb_fp_stats->rd_count;
1450 x->txfm_search_info.txb_split_count = sb_fp_stats->split_count;
1451
1452 *td->counts = sb_fp_stats->fc;
1453
1454 memcpy(tile_data->inter_mode_rd_models, sb_fp_stats->inter_mode_rd_models,
1455 sizeof(sb_fp_stats->inter_mode_rd_models));
1456 memcpy(x->thresh_freq_fact, sb_fp_stats->thresh_freq_fact,
1457 sizeof(sb_fp_stats->thresh_freq_fact));
1458
1459 const int alloc_mi_idx = get_alloc_mi_idx(&cm->mi_params, mi_row, mi_col);
1460 cm->mi_params.mi_alloc[alloc_mi_idx].current_qindex =
1461 sb_fp_stats->current_qindex;
1462
1463#if CONFIG_INTERNAL_STATS
1464 memcpy(cpi->mode_chosen_counts, sb_fp_stats->mode_chosen_counts,
1465 sizeof(sb_fp_stats->mode_chosen_counts));
1466#endif // CONFIG_INTERNAL_STATS
1467}
1468
1469// Update the rate costs of some symbols according to the frequency directed
1470// by speed features
1471void av1_set_cost_upd_freq(AV1_COMP *cpi, ThreadData *td,
1472 const TileInfo *const tile_info, const int mi_row,
1473 const int mi_col) {
1474 AV1_COMMON *const cm = &cpi->common;
1475 const int num_planes = av1_num_planes(cm);
1476 MACROBLOCK *const x = &td->mb;
1477 MACROBLOCKD *const xd = &x->e_mbd;
1478
1479 switch (cpi->oxcf.cost_upd_freq.coeff) {
1480 case COST_UPD_TILE: // Tile level
1481 if (mi_row != tile_info->mi_row_start) break;
1482 AOM_FALLTHROUGH_INTENDED;
1483 case COST_UPD_SBROW: // SB row level in tile
1484 if (mi_col != tile_info->mi_col_start) break;
1485 AOM_FALLTHROUGH_INTENDED;
1486 case COST_UPD_SB: // SB level
1487 if (cpi->sf.inter_sf.disable_sb_level_coeff_cost_upd &&
1488 mi_col != tile_info->mi_col_start)
1489 break;
1490 av1_fill_coeff_costs(&x->coeff_costs, xd->tile_ctx, num_planes);
1491 break;
1492 default: assert(0);
1493 }
1494
1495 switch (cpi->oxcf.cost_upd_freq.mode) {
1496 case COST_UPD_TILE: // Tile level
1497 if (mi_row != tile_info->mi_row_start) break;
1498 AOM_FALLTHROUGH_INTENDED;
1499 case COST_UPD_SBROW: // SB row level in tile
1500 if (mi_col != tile_info->mi_col_start) break;
1501 AOM_FALLTHROUGH_INTENDED;
1502 case COST_UPD_SB: // SB level
leolzhao3db7cca2021-01-26 16:53:07 -08001503#if CONFIG_SDP
1504 av1_fill_mode_rates(cm, xd, &x->mode_costs, xd->tile_ctx);
1505#else
Jayasanker Je9ad4752020-06-30 19:30:03 +05301506 av1_fill_mode_rates(cm, &x->mode_costs, xd->tile_ctx);
leolzhao3db7cca2021-01-26 16:53:07 -08001507#endif
Jayasanker Je9ad4752020-06-30 19:30:03 +05301508 break;
1509 default: assert(0);
1510 }
1511 switch (cpi->oxcf.cost_upd_freq.mv) {
1512 case COST_UPD_OFF: break;
1513 case COST_UPD_TILE: // Tile level
1514 if (mi_row != tile_info->mi_row_start) break;
1515 AOM_FALLTHROUGH_INTENDED;
1516 case COST_UPD_SBROW: // SB row level in tile
1517 if (mi_col != tile_info->mi_col_start) break;
1518 AOM_FALLTHROUGH_INTENDED;
1519 case COST_UPD_SB: // SB level
1520 if (cpi->sf.inter_sf.disable_sb_level_mv_cost_upd &&
1521 mi_col != tile_info->mi_col_start)
1522 break;
1523 av1_fill_mv_costs(xd->tile_ctx, cm->features.cur_frame_force_integer_mv,
1524 cm->features.allow_high_precision_mv, &x->mv_costs);
1525 break;
1526 default: assert(0);
1527 }
1528}