Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2020, Alliance for Open Media. All rights reserved |
| 3 | * |
| 4 | * This source code is subject to the terms of the BSD 2 Clause License and |
| 5 | * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License |
| 6 | * was not distributed with this source code in the LICENSE file, you can |
| 7 | * obtain it at www.aomedia.org/license/software. If the Alliance for Open |
| 8 | * Media Patent License 1.0 was not distributed with this source code in the |
| 9 | * PATENTS file, you can obtain it at www.aomedia.org/license/patent. |
| 10 | */ |
| 11 | |
| 12 | #include "aom_ports/system_state.h" |
| 13 | |
| 14 | #include "av1/common/reconintra.h" |
| 15 | |
| 16 | #include "av1/encoder/encoder.h" |
| 17 | #include "av1/encoder/encodeframe_utils.h" |
| 18 | #include "av1/encoder/partition_strategy.h" |
| 19 | #include "av1/encoder/rdopt.h" |
| 20 | |
| 21 | static AOM_INLINE int set_deltaq_rdmult(const AV1_COMP *const cpi, |
| 22 | const MACROBLOCK *const x) { |
| 23 | const AV1_COMMON *const cm = &cpi->common; |
| 24 | const CommonQuantParams *quant_params = &cm->quant_params; |
| 25 | return av1_compute_rd_mult(cpi, quant_params->base_qindex + x->delta_qindex + |
| 26 | quant_params->y_dc_delta_q); |
| 27 | } |
| 28 | |
| 29 | void av1_set_ssim_rdmult(const AV1_COMP *const cpi, MvCosts *const mv_costs, |
| 30 | const BLOCK_SIZE bsize, const int mi_row, |
| 31 | const int mi_col, int *const rdmult) { |
| 32 | const AV1_COMMON *const cm = &cpi->common; |
| 33 | |
| 34 | const int bsize_base = BLOCK_16X16; |
| 35 | const int num_mi_w = mi_size_wide[bsize_base]; |
| 36 | const int num_mi_h = mi_size_high[bsize_base]; |
| 37 | const int num_cols = (cm->mi_params.mi_cols + num_mi_w - 1) / num_mi_w; |
| 38 | const int num_rows = (cm->mi_params.mi_rows + num_mi_h - 1) / num_mi_h; |
| 39 | const int num_bcols = (mi_size_wide[bsize] + num_mi_w - 1) / num_mi_w; |
| 40 | const int num_brows = (mi_size_high[bsize] + num_mi_h - 1) / num_mi_h; |
| 41 | int row, col; |
| 42 | double num_of_mi = 0.0; |
| 43 | double geom_mean_of_scale = 0.0; |
| 44 | |
Vishesh | 94a6529 | 2020-07-01 15:28:53 +0530 | [diff] [blame] | 45 | assert(cpi->oxcf.tune_cfg.tuning == AOM_TUNE_SSIM); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 46 | |
| 47 | aom_clear_system_state(); |
| 48 | for (row = mi_row / num_mi_w; |
| 49 | row < num_rows && row < mi_row / num_mi_w + num_brows; ++row) { |
| 50 | for (col = mi_col / num_mi_h; |
| 51 | col < num_cols && col < mi_col / num_mi_h + num_bcols; ++col) { |
| 52 | const int index = row * num_cols + col; |
| 53 | geom_mean_of_scale += log(cpi->ssim_rdmult_scaling_factors[index]); |
| 54 | num_of_mi += 1.0; |
| 55 | } |
| 56 | } |
| 57 | geom_mean_of_scale = exp(geom_mean_of_scale / num_of_mi); |
| 58 | |
| 59 | *rdmult = (int)((double)(*rdmult) * geom_mean_of_scale + 0.5); |
| 60 | *rdmult = AOMMAX(*rdmult, 0); |
| 61 | av1_set_error_per_bit(mv_costs, *rdmult); |
| 62 | aom_clear_system_state(); |
| 63 | } |
| 64 | |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 65 | // Return the end column for the current superblock, in unit of TPL blocks. |
| 66 | static int get_superblock_tpl_column_end(const AV1_COMMON *const cm, int mi_col, |
| 67 | int num_mi_w) { |
| 68 | // Find the start column of this superblock. |
| 69 | const int sb_mi_col_start = (mi_col >> cm->seq_params.mib_size_log2) |
| 70 | << cm->seq_params.mib_size_log2; |
| 71 | // Same but in superres upscaled dimension. |
| 72 | const int sb_mi_col_start_sr = |
| 73 | coded_to_superres_mi(sb_mi_col_start, cm->superres_scale_denominator); |
| 74 | // Width of this superblock in mi units. |
| 75 | const int sb_mi_width = mi_size_wide[cm->seq_params.sb_size]; |
| 76 | // Same but in superres upscaled dimension. |
| 77 | const int sb_mi_width_sr = |
| 78 | coded_to_superres_mi(sb_mi_width, cm->superres_scale_denominator); |
| 79 | // Superblock end in mi units. |
| 80 | const int sb_mi_end = sb_mi_col_start_sr + sb_mi_width_sr; |
| 81 | // Superblock end in TPL units. |
| 82 | return (sb_mi_end + num_mi_w - 1) / num_mi_w; |
| 83 | } |
| 84 | |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 85 | int av1_get_hier_tpl_rdmult(const AV1_COMP *const cpi, MACROBLOCK *const x, |
| 86 | const BLOCK_SIZE bsize, const int mi_row, |
| 87 | const int mi_col, int orig_rdmult) { |
| 88 | const AV1_COMMON *const cm = &cpi->common; |
| 89 | const GF_GROUP *const gf_group = &cpi->gf_group; |
| 90 | assert(IMPLIES(cpi->gf_group.size > 0, |
| 91 | cpi->gf_group.index < cpi->gf_group.size)); |
| 92 | const int tpl_idx = cpi->gf_group.index; |
| 93 | const TplDepFrame *tpl_frame = &cpi->tpl_data.tpl_frame[tpl_idx]; |
| 94 | const int deltaq_rdmult = set_deltaq_rdmult(cpi, x); |
| 95 | if (tpl_frame->is_valid == 0) return deltaq_rdmult; |
Deepa K G | 21e5e8e | 2020-03-28 13:26:09 +0530 | [diff] [blame] | 96 | if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return deltaq_rdmult; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 97 | if (tpl_idx >= MAX_TPL_FRAME_IDX) return deltaq_rdmult; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 98 | if (cpi->oxcf.q_cfg.aq_mode != NO_AQ) return deltaq_rdmult; |
| 99 | |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 100 | const int mi_col_sr = |
| 101 | coded_to_superres_mi(mi_col, cm->superres_scale_denominator); |
| 102 | const int mi_cols_sr = av1_pixels_to_mi(cm->superres_upscaled_width); |
| 103 | const int block_mi_width_sr = |
| 104 | coded_to_superres_mi(mi_size_wide[bsize], cm->superres_scale_denominator); |
| 105 | |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 106 | const int bsize_base = BLOCK_16X16; |
| 107 | const int num_mi_w = mi_size_wide[bsize_base]; |
| 108 | const int num_mi_h = mi_size_high[bsize_base]; |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 109 | const int num_cols = (mi_cols_sr + num_mi_w - 1) / num_mi_w; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 110 | const int num_rows = (cm->mi_params.mi_rows + num_mi_h - 1) / num_mi_h; |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 111 | const int num_bcols = (block_mi_width_sr + num_mi_w - 1) / num_mi_w; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 112 | const int num_brows = (mi_size_high[bsize] + num_mi_h - 1) / num_mi_h; |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 113 | // This is required because the end col of superblock may be off by 1 in case |
| 114 | // of superres. |
| 115 | const int sb_bcol_end = get_superblock_tpl_column_end(cm, mi_col, num_mi_w); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 116 | int row, col; |
| 117 | double base_block_count = 0.0; |
| 118 | double geom_mean_of_scale = 0.0; |
| 119 | aom_clear_system_state(); |
| 120 | for (row = mi_row / num_mi_w; |
| 121 | row < num_rows && row < mi_row / num_mi_w + num_brows; ++row) { |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 122 | for (col = mi_col_sr / num_mi_h; |
| 123 | col < num_cols && col < mi_col_sr / num_mi_h + num_bcols && |
| 124 | col < sb_bcol_end; |
| 125 | ++col) { |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 126 | const int index = row * num_cols + col; |
| 127 | geom_mean_of_scale += log(cpi->tpl_sb_rdmult_scaling_factors[index]); |
| 128 | base_block_count += 1.0; |
| 129 | } |
| 130 | } |
| 131 | geom_mean_of_scale = exp(geom_mean_of_scale / base_block_count); |
| 132 | int rdmult = (int)((double)orig_rdmult * geom_mean_of_scale + 0.5); |
| 133 | rdmult = AOMMAX(rdmult, 0); |
| 134 | av1_set_error_per_bit(&x->mv_costs, rdmult); |
| 135 | aom_clear_system_state(); |
| 136 | if (bsize == cm->seq_params.sb_size) { |
| 137 | const int rdmult_sb = set_deltaq_rdmult(cpi, x); |
| 138 | assert(rdmult_sb == rdmult); |
| 139 | (void)rdmult_sb; |
| 140 | } |
| 141 | return rdmult; |
| 142 | } |
| 143 | |
| 144 | static AOM_INLINE void update_filter_type_count(FRAME_COUNTS *counts, |
| 145 | const MACROBLOCKD *xd, |
| 146 | const MB_MODE_INFO *mbmi) { |
Hui Su | 93c395b | 2020-10-05 12:00:20 -0700 | [diff] [blame] | 147 | #if CONFIG_REMOVE_DUAL_FILTER |
| 148 | const int ctx = av1_get_pred_context_switchable_interp(xd, 0); |
| 149 | ++counts->switchable_interp[ctx][mbmi->interp_fltr]; |
| 150 | #else |
| 151 | for (int dir = 0; dir < 2; ++dir) { |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 152 | const int ctx = av1_get_pred_context_switchable_interp(xd, dir); |
| 153 | InterpFilter filter = av1_extract_interp_filter(mbmi->interp_filters, dir); |
| 154 | ++counts->switchable_interp[ctx][filter]; |
| 155 | } |
Hui Su | 93c395b | 2020-10-05 12:00:20 -0700 | [diff] [blame] | 156 | #endif // CONFIG_REMOVE_DUAL_FILTER |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 157 | } |
| 158 | |
| 159 | static void reset_tx_size(MACROBLOCK *x, MB_MODE_INFO *mbmi, |
| 160 | const TX_MODE tx_mode) { |
| 161 | MACROBLOCKD *const xd = &x->e_mbd; |
| 162 | TxfmSearchInfo *txfm_info = &x->txfm_search_info; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 163 | #if CONFIG_SDP |
| 164 | int plane_index = xd->tree_type == CHROMA_PART; |
| 165 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 166 | if (xd->lossless[mbmi->segment_id]) { |
| 167 | mbmi->tx_size = TX_4X4; |
| 168 | } else if (tx_mode != TX_MODE_SELECT) { |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 169 | #if CONFIG_SDP |
| 170 | mbmi->tx_size = tx_size_from_tx_mode(mbmi->sb_type[plane_index], tx_mode); |
| 171 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 172 | mbmi->tx_size = tx_size_from_tx_mode(mbmi->sb_type, tx_mode); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 173 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 174 | } else { |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 175 | #if CONFIG_SDP |
| 176 | BLOCK_SIZE bsize = mbmi->sb_type[plane_index]; |
| 177 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 178 | BLOCK_SIZE bsize = mbmi->sb_type; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 179 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 180 | TX_SIZE min_tx_size = depth_to_tx_size(MAX_TX_DEPTH, bsize); |
| 181 | mbmi->tx_size = (TX_SIZE)TXSIZEMAX(mbmi->tx_size, min_tx_size); |
| 182 | } |
| 183 | if (is_inter_block(mbmi)) { |
| 184 | memset(mbmi->inter_tx_size, mbmi->tx_size, sizeof(mbmi->inter_tx_size)); |
| 185 | } |
| 186 | const int stride = xd->tx_type_map_stride; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 187 | #if CONFIG_SDP |
| 188 | const int bw = mi_size_wide[mbmi->sb_type[plane_index]]; |
| 189 | for (int row = 0; row < mi_size_high[mbmi->sb_type[plane_index]]; ++row) { |
| 190 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 191 | const int bw = mi_size_wide[mbmi->sb_type]; |
| 192 | for (int row = 0; row < mi_size_high[mbmi->sb_type]; ++row) { |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 193 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 194 | memset(xd->tx_type_map + row * stride, DCT_DCT, |
| 195 | bw * sizeof(xd->tx_type_map[0])); |
| 196 | } |
| 197 | av1_zero(txfm_info->blk_skip); |
| 198 | txfm_info->skip_txfm = 0; |
| 199 | } |
| 200 | |
| 201 | // This function will copy the best reference mode information from |
| 202 | // MB_MODE_INFO_EXT_FRAME to MB_MODE_INFO_EXT. |
| 203 | static INLINE void copy_mbmi_ext_frame_to_mbmi_ext( |
| 204 | MB_MODE_INFO_EXT *mbmi_ext, |
| 205 | const MB_MODE_INFO_EXT_FRAME *const mbmi_ext_best, uint8_t ref_frame_type) { |
| 206 | memcpy(mbmi_ext->ref_mv_stack[ref_frame_type], mbmi_ext_best->ref_mv_stack, |
| 207 | sizeof(mbmi_ext->ref_mv_stack[USABLE_REF_MV_STACK_SIZE])); |
| 208 | memcpy(mbmi_ext->weight[ref_frame_type], mbmi_ext_best->weight, |
| 209 | sizeof(mbmi_ext->weight[USABLE_REF_MV_STACK_SIZE])); |
| 210 | mbmi_ext->mode_context[ref_frame_type] = mbmi_ext_best->mode_context; |
| 211 | mbmi_ext->ref_mv_count[ref_frame_type] = mbmi_ext_best->ref_mv_count; |
| 212 | memcpy(mbmi_ext->global_mvs, mbmi_ext_best->global_mvs, |
| 213 | sizeof(mbmi_ext->global_mvs)); |
| 214 | } |
| 215 | |
| 216 | void av1_update_state(const AV1_COMP *const cpi, ThreadData *td, |
| 217 | const PICK_MODE_CONTEXT *const ctx, int mi_row, |
| 218 | int mi_col, BLOCK_SIZE bsize, RUN_TYPE dry_run) { |
| 219 | int i, x_idx, y; |
| 220 | const AV1_COMMON *const cm = &cpi->common; |
| 221 | const CommonModeInfoParams *const mi_params = &cm->mi_params; |
| 222 | const int num_planes = av1_num_planes(cm); |
| 223 | RD_COUNTS *const rdc = &td->rd_counts; |
| 224 | MACROBLOCK *const x = &td->mb; |
| 225 | MACROBLOCKD *const xd = &x->e_mbd; |
| 226 | struct macroblock_plane *const p = x->plane; |
| 227 | struct macroblockd_plane *const pd = xd->plane; |
| 228 | const MB_MODE_INFO *const mi = &ctx->mic; |
| 229 | MB_MODE_INFO *const mi_addr = xd->mi[0]; |
| 230 | const struct segmentation *const seg = &cm->seg; |
venkat sanampudi | 2405502 | 2020-07-03 06:52:28 +0530 | [diff] [blame] | 231 | assert(bsize < BLOCK_SIZES_ALL); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 232 | #if CONFIG_SDP |
| 233 | const int bw = mi_size_wide[mi->sb_type[xd->tree_type == CHROMA_PART]]; |
| 234 | const int bh = mi_size_high[mi->sb_type[xd->tree_type == CHROMA_PART]]; |
| 235 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 236 | const int bw = mi_size_wide[mi->sb_type]; |
| 237 | const int bh = mi_size_high[mi->sb_type]; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 238 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 239 | const int mis = mi_params->mi_stride; |
| 240 | const int mi_width = mi_size_wide[bsize]; |
| 241 | const int mi_height = mi_size_high[bsize]; |
| 242 | TxfmSearchInfo *txfm_info = &x->txfm_search_info; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 243 | #if CONFIG_SDP |
| 244 | assert(mi->sb_type[xd->tree_type == CHROMA_PART] == bsize); |
| 245 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 246 | assert(mi->sb_type == bsize); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 247 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 248 | |
| 249 | *mi_addr = *mi; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 250 | #if CONFIG_SDP |
| 251 | if (xd->tree_type != CHROMA_PART) |
| 252 | #endif |
| 253 | copy_mbmi_ext_frame_to_mbmi_ext(x->mbmi_ext, &ctx->mbmi_ext_best, |
| 254 | av1_ref_frame_type(ctx->mic.ref_frame)); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 255 | |
| 256 | memcpy(txfm_info->blk_skip, ctx->blk_skip, |
| 257 | sizeof(txfm_info->blk_skip[0]) * ctx->num_4x4_blk); |
| 258 | |
| 259 | txfm_info->skip_txfm = ctx->rd_stats.skip_txfm; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 260 | #if CONFIG_SDP |
| 261 | if (xd->tree_type != CHROMA_PART) { |
| 262 | #endif |
| 263 | xd->tx_type_map = ctx->tx_type_map; |
| 264 | xd->tx_type_map_stride = mi_size_wide[bsize]; |
| 265 | // If not dry_run, copy the transform type data into the frame level buffer. |
| 266 | // Encoder will fetch tx types when writing bitstream. |
| 267 | if (!dry_run) { |
| 268 | const int grid_idx = get_mi_grid_idx(mi_params, mi_row, mi_col); |
| 269 | uint8_t *const tx_type_map = mi_params->tx_type_map + grid_idx; |
| 270 | const int mi_stride = mi_params->mi_stride; |
| 271 | for (int blk_row = 0; blk_row < bh; ++blk_row) { |
| 272 | av1_copy_array(tx_type_map + blk_row * mi_stride, |
| 273 | xd->tx_type_map + blk_row * xd->tx_type_map_stride, bw); |
| 274 | } |
| 275 | xd->tx_type_map = tx_type_map; |
| 276 | xd->tx_type_map_stride = mi_stride; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 277 | } |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 278 | #if CONFIG_SDP |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 279 | } |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 280 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 281 | |
| 282 | // If segmentation in use |
| 283 | if (seg->enabled) { |
| 284 | // For in frame complexity AQ copy the segment id from the segment map. |
| 285 | if (cpi->oxcf.q_cfg.aq_mode == COMPLEXITY_AQ) { |
| 286 | const uint8_t *const map = |
| 287 | seg->update_map ? cpi->enc_seg.map : cm->last_frame_seg_map; |
| 288 | mi_addr->segment_id = |
| 289 | map ? get_segment_id(mi_params, map, bsize, mi_row, mi_col) : 0; |
| 290 | reset_tx_size(x, mi_addr, x->txfm_search_params.tx_mode_search_type); |
| 291 | } |
| 292 | // Else for cyclic refresh mode update the segment map, set the segment id |
| 293 | // and then update the quantizer. |
leolzhao | aa4d769 | 2021-01-28 11:00:33 -0800 | [diff] [blame] | 294 | #if CONFIG_SDP |
| 295 | if (cpi->oxcf.q_cfg.aq_mode == CYCLIC_REFRESH_AQ && |
| 296 | xd->tree_type == SHARED_PART) { |
| 297 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 298 | if (cpi->oxcf.q_cfg.aq_mode == CYCLIC_REFRESH_AQ) { |
leolzhao | aa4d769 | 2021-01-28 11:00:33 -0800 | [diff] [blame] | 299 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 300 | av1_cyclic_refresh_update_segment(cpi, mi_addr, mi_row, mi_col, bsize, |
| 301 | ctx->rd_stats.rate, ctx->rd_stats.dist, |
| 302 | txfm_info->skip_txfm); |
| 303 | } |
| 304 | if (mi_addr->uv_mode == UV_CFL_PRED && !is_cfl_allowed(xd)) |
| 305 | mi_addr->uv_mode = UV_DC_PRED; |
| 306 | } |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 307 | #if CONFIG_SDP |
| 308 | for (i = (xd->tree_type == CHROMA_PART); i < num_planes; ++i) { |
| 309 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 310 | for (i = 0; i < num_planes; ++i) { |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 311 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 312 | p[i].coeff = ctx->coeff[i]; |
| 313 | p[i].qcoeff = ctx->qcoeff[i]; |
| 314 | p[i].dqcoeff = ctx->dqcoeff[i]; |
| 315 | p[i].eobs = ctx->eobs[i]; |
| 316 | p[i].txb_entropy_ctx = ctx->txb_entropy_ctx[i]; |
| 317 | } |
| 318 | for (i = 0; i < 2; ++i) pd[i].color_index_map = ctx->color_index_map[i]; |
| 319 | // Restore the coding context of the MB to that that was in place |
| 320 | // when the mode was picked for it |
| 321 | for (y = 0; y < mi_height; y++) { |
| 322 | for (x_idx = 0; x_idx < mi_width; x_idx++) { |
| 323 | if ((xd->mb_to_right_edge >> (3 + MI_SIZE_LOG2)) + mi_width > x_idx && |
| 324 | (xd->mb_to_bottom_edge >> (3 + MI_SIZE_LOG2)) + mi_height > y) { |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 325 | #if CONFIG_SDP |
| 326 | const int mi_idx = |
| 327 | get_alloc_mi_idx(mi_params, mi_row + y, mi_col + x_idx); |
| 328 | xd->mi[x_idx + y * mis] = &mi_params->mi_alloc[mi_idx]; |
| 329 | if (xd->tree_type == LUMA_PART) { |
| 330 | *(xd->mi[x_idx + y * mis]) = *mi_addr; |
| 331 | } else if (xd->tree_type == CHROMA_PART) { |
| 332 | xd->mi[x_idx + y * mis]->sb_type[PLANE_TYPE_UV] = |
| 333 | mi_addr->sb_type[PLANE_TYPE_UV]; |
| 334 | xd->mi[x_idx + y * mis]->uv_mode = mi_addr->uv_mode; |
| 335 | xd->mi[x_idx + y * mis]->angle_delta[PLANE_TYPE_UV] = |
| 336 | mi_addr->angle_delta[PLANE_TYPE_UV]; |
| 337 | xd->mi[x_idx + y * mis]->cfl_alpha_signs = mi_addr->cfl_alpha_signs; |
| 338 | xd->mi[x_idx + y * mis]->cfl_alpha_idx = mi_addr->cfl_alpha_idx; |
| 339 | xd->mi[x_idx + y * mis]->partition = mi_addr->partition; |
| 340 | xd->mi[x_idx + y * mis] |
| 341 | ->palette_mode_info.palette_size[PLANE_TYPE_UV] = |
| 342 | mi_addr->palette_mode_info.palette_size[PLANE_TYPE_UV]; |
| 343 | for (i = PALETTE_MAX_SIZE; i < 3 * PALETTE_MAX_SIZE; i++) |
| 344 | xd->mi[x_idx + y * mis]->palette_mode_info.palette_colors[i] = |
| 345 | mi_addr->palette_mode_info.palette_colors[i]; |
| 346 | } else { |
| 347 | xd->mi[x_idx + y * mis] = mi_addr; |
| 348 | } |
| 349 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 350 | xd->mi[x_idx + y * mis] = mi_addr; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 351 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 352 | } |
| 353 | } |
| 354 | } |
| 355 | |
| 356 | if (cpi->oxcf.q_cfg.aq_mode) |
| 357 | av1_init_plane_quantizers(cpi, x, mi_addr->segment_id); |
| 358 | |
| 359 | if (dry_run) return; |
| 360 | |
| 361 | #if CONFIG_INTERNAL_STATS |
| 362 | { |
| 363 | unsigned int *const mode_chosen_counts = |
| 364 | (unsigned int *)cpi->mode_chosen_counts; // Cast const away. |
| 365 | if (frame_is_intra_only(cm)) { |
| 366 | static const int kf_mode_index[] = { |
| 367 | THR_DC /*DC_PRED*/, |
| 368 | THR_V_PRED /*V_PRED*/, |
| 369 | THR_H_PRED /*H_PRED*/, |
| 370 | THR_D45_PRED /*D45_PRED*/, |
| 371 | THR_D135_PRED /*D135_PRED*/, |
| 372 | THR_D113_PRED /*D113_PRED*/, |
| 373 | THR_D157_PRED /*D157_PRED*/, |
| 374 | THR_D203_PRED /*D203_PRED*/, |
| 375 | THR_D67_PRED /*D67_PRED*/, |
| 376 | THR_SMOOTH, /*SMOOTH_PRED*/ |
| 377 | THR_SMOOTH_V, /*SMOOTH_V_PRED*/ |
| 378 | THR_SMOOTH_H, /*SMOOTH_H_PRED*/ |
| 379 | THR_PAETH /*PAETH_PRED*/, |
| 380 | }; |
| 381 | ++mode_chosen_counts[kf_mode_index[mi_addr->mode]]; |
| 382 | } else { |
| 383 | // Note how often each mode chosen as best |
| 384 | ++mode_chosen_counts[ctx->best_mode_index]; |
| 385 | } |
| 386 | } |
| 387 | #endif |
| 388 | if (!frame_is_intra_only(cm)) { |
| 389 | if (is_inter_block(mi_addr)) { |
| 390 | // TODO(sarahparker): global motion stats need to be handled per-tile |
| 391 | // to be compatible with tile-based threading. |
| 392 | update_global_motion_used(mi_addr->mode, bsize, mi_addr, rdc); |
| 393 | } |
| 394 | |
| 395 | if (cm->features.interp_filter == SWITCHABLE && |
| 396 | mi_addr->motion_mode != WARPED_CAUSAL && |
| 397 | !is_nontrans_global_motion(xd, xd->mi[0])) { |
| 398 | update_filter_type_count(td->counts, xd, mi_addr); |
| 399 | } |
| 400 | |
| 401 | rdc->comp_pred_diff[SINGLE_REFERENCE] += ctx->single_pred_diff; |
| 402 | rdc->comp_pred_diff[COMPOUND_REFERENCE] += ctx->comp_pred_diff; |
| 403 | rdc->comp_pred_diff[REFERENCE_MODE_SELECT] += ctx->hybrid_pred_diff; |
| 404 | } |
| 405 | |
| 406 | const int x_mis = AOMMIN(bw, mi_params->mi_cols - mi_col); |
| 407 | const int y_mis = AOMMIN(bh, mi_params->mi_rows - mi_row); |
| 408 | if (cm->seq_params.order_hint_info.enable_ref_frame_mvs) |
| 409 | av1_copy_frame_mvs(cm, mi, mi_row, mi_col, x_mis, y_mis); |
| 410 | } |
| 411 | |
| 412 | void av1_update_inter_mode_stats(FRAME_CONTEXT *fc, FRAME_COUNTS *counts, |
| 413 | PREDICTION_MODE mode, int16_t mode_context) { |
| 414 | (void)counts; |
| 415 | |
| 416 | int16_t mode_ctx = mode_context & NEWMV_CTX_MASK; |
| 417 | if (mode == NEWMV) { |
| 418 | #if CONFIG_ENTROPY_STATS |
| 419 | ++counts->newmv_mode[mode_ctx][0]; |
| 420 | #endif |
| 421 | update_cdf(fc->newmv_cdf[mode_ctx], 0, 2); |
| 422 | return; |
| 423 | } |
| 424 | |
| 425 | #if CONFIG_ENTROPY_STATS |
| 426 | ++counts->newmv_mode[mode_ctx][1]; |
| 427 | #endif |
| 428 | update_cdf(fc->newmv_cdf[mode_ctx], 1, 2); |
| 429 | |
| 430 | mode_ctx = (mode_context >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK; |
| 431 | if (mode == GLOBALMV) { |
| 432 | #if CONFIG_ENTROPY_STATS |
| 433 | ++counts->zeromv_mode[mode_ctx][0]; |
| 434 | #endif |
| 435 | update_cdf(fc->zeromv_cdf[mode_ctx], 0, 2); |
| 436 | return; |
| 437 | } |
| 438 | |
| 439 | #if CONFIG_ENTROPY_STATS |
| 440 | ++counts->zeromv_mode[mode_ctx][1]; |
| 441 | #endif |
| 442 | update_cdf(fc->zeromv_cdf[mode_ctx], 1, 2); |
| 443 | |
| 444 | mode_ctx = (mode_context >> REFMV_OFFSET) & REFMV_CTX_MASK; |
| 445 | #if CONFIG_ENTROPY_STATS |
| 446 | ++counts->refmv_mode[mode_ctx][mode != NEARESTMV]; |
| 447 | #endif |
| 448 | update_cdf(fc->refmv_cdf[mode_ctx], mode != NEARESTMV, 2); |
| 449 | } |
| 450 | |
| 451 | static void update_palette_cdf(MACROBLOCKD *xd, const MB_MODE_INFO *const mbmi, |
| 452 | FRAME_COUNTS *counts) { |
| 453 | FRAME_CONTEXT *fc = xd->tile_ctx; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 454 | #if CONFIG_SDP |
| 455 | const BLOCK_SIZE bsize = mbmi->sb_type[xd->tree_type == CHROMA_PART]; |
| 456 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 457 | const BLOCK_SIZE bsize = mbmi->sb_type; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 458 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 459 | const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info; |
| 460 | const int palette_bsize_ctx = av1_get_palette_bsize_ctx(bsize); |
| 461 | |
| 462 | (void)counts; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 463 | #if CONFIG_SDP |
| 464 | if (mbmi->mode == DC_PRED && xd->tree_type != CHROMA_PART) { |
| 465 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 466 | if (mbmi->mode == DC_PRED) { |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 467 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 468 | const int n = pmi->palette_size[0]; |
| 469 | const int palette_mode_ctx = av1_get_palette_mode_ctx(xd); |
| 470 | |
| 471 | #if CONFIG_ENTROPY_STATS |
| 472 | ++counts->palette_y_mode[palette_bsize_ctx][palette_mode_ctx][n > 0]; |
| 473 | #endif |
| 474 | update_cdf(fc->palette_y_mode_cdf[palette_bsize_ctx][palette_mode_ctx], |
| 475 | n > 0, 2); |
| 476 | if (n > 0) { |
| 477 | #if CONFIG_ENTROPY_STATS |
| 478 | ++counts->palette_y_size[palette_bsize_ctx][n - PALETTE_MIN_SIZE]; |
| 479 | #endif |
| 480 | update_cdf(fc->palette_y_size_cdf[palette_bsize_ctx], |
| 481 | n - PALETTE_MIN_SIZE, PALETTE_SIZES); |
| 482 | } |
| 483 | } |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 484 | #if CONFIG_SDP |
| 485 | if (mbmi->uv_mode == UV_DC_PRED && xd->tree_type != LUMA_PART) { |
| 486 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 487 | if (mbmi->uv_mode == UV_DC_PRED) { |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 488 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 489 | const int n = pmi->palette_size[1]; |
| 490 | const int palette_uv_mode_ctx = (pmi->palette_size[0] > 0); |
| 491 | |
| 492 | #if CONFIG_ENTROPY_STATS |
| 493 | ++counts->palette_uv_mode[palette_uv_mode_ctx][n > 0]; |
| 494 | #endif |
| 495 | update_cdf(fc->palette_uv_mode_cdf[palette_uv_mode_ctx], n > 0, 2); |
| 496 | |
| 497 | if (n > 0) { |
| 498 | #if CONFIG_ENTROPY_STATS |
| 499 | ++counts->palette_uv_size[palette_bsize_ctx][n - PALETTE_MIN_SIZE]; |
| 500 | #endif |
| 501 | update_cdf(fc->palette_uv_size_cdf[palette_bsize_ctx], |
| 502 | n - PALETTE_MIN_SIZE, PALETTE_SIZES); |
| 503 | } |
| 504 | } |
| 505 | } |
| 506 | |
| 507 | void av1_sum_intra_stats(const AV1_COMMON *const cm, FRAME_COUNTS *counts, |
| 508 | MACROBLOCKD *xd, const MB_MODE_INFO *const mbmi, |
| 509 | const MB_MODE_INFO *above_mi, |
| 510 | const MB_MODE_INFO *left_mi, const int intraonly) { |
| 511 | FRAME_CONTEXT *fc = xd->tile_ctx; |
| 512 | const PREDICTION_MODE y_mode = mbmi->mode; |
| 513 | (void)counts; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 514 | #if CONFIG_SDP |
| 515 | const BLOCK_SIZE bsize = mbmi->sb_type[xd->tree_type == CHROMA_PART]; |
| 516 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 517 | const BLOCK_SIZE bsize = mbmi->sb_type; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 518 | #endif |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 519 | #if CONFIG_SDP |
| 520 | if (xd->tree_type != CHROMA_PART) { |
| 521 | #endif |
| 522 | if (intraonly) { |
| 523 | #if CONFIG_ENTROPY_STATS |
| 524 | const PREDICTION_MODE above = av1_above_block_mode(above_mi); |
| 525 | const PREDICTION_MODE left = av1_left_block_mode(left_mi); |
| 526 | const int above_ctx = intra_mode_context[above]; |
| 527 | const int left_ctx = intra_mode_context[left]; |
| 528 | ++counts->kf_y_mode[above_ctx][left_ctx][y_mode]; |
| 529 | #endif // CONFIG_ENTROPY_STATS |
| 530 | update_cdf(get_y_mode_cdf(fc, above_mi, left_mi), y_mode, INTRA_MODES); |
| 531 | } else { |
| 532 | #if CONFIG_ENTROPY_STATS |
| 533 | ++counts->y_mode[size_group_lookup[bsize]][y_mode]; |
| 534 | #endif // CONFIG_ENTROPY_STATS |
| 535 | update_cdf(fc->y_mode_cdf[size_group_lookup[bsize]], y_mode, INTRA_MODES); |
| 536 | } |
| 537 | |
| 538 | if (av1_filter_intra_allowed(cm, mbmi)) { |
| 539 | const int use_filter_intra_mode = |
| 540 | mbmi->filter_intra_mode_info.use_filter_intra; |
| 541 | #if CONFIG_ENTROPY_STATS |
| 542 | #if CONFIG_SDP |
| 543 | ++counts->filter_intra[mbmi->sb_type[xd->tree_type == CHROMA_PART]] |
| 544 | [use_filter_intra_mode]; |
| 545 | #else |
| 546 | ++counts->filter_intra[mbmi->sb_type][use_filter_intra_mode]; |
| 547 | #endif |
| 548 | if (use_filter_intra_mode) { |
| 549 | ++counts->filter_intra_mode[mbmi->filter_intra_mode_info |
| 550 | .filter_intra_mode]; |
| 551 | } |
| 552 | #endif // CONFIG_ENTROPY_STATS |
| 553 | #if CONFIG_SDP |
| 554 | update_cdf( |
| 555 | fc->filter_intra_cdfs[mbmi->sb_type[xd->tree_type == CHROMA_PART]], |
| 556 | use_filter_intra_mode, 2); |
| 557 | #else |
| 558 | update_cdf(fc->filter_intra_cdfs[mbmi->sb_type], use_filter_intra_mode, 2); |
| 559 | #endif |
| 560 | if (use_filter_intra_mode) { |
| 561 | update_cdf(fc->filter_intra_mode_cdf, |
| 562 | mbmi->filter_intra_mode_info.filter_intra_mode, |
| 563 | FILTER_INTRA_MODES); |
| 564 | } |
| 565 | } |
| 566 | if (av1_is_directional_mode(mbmi->mode) && av1_use_angle_delta(bsize)) { |
| 567 | #if CONFIG_ENTROPY_STATS |
| 568 | ++counts->angle_delta[mbmi->mode - V_PRED] |
| 569 | [mbmi->angle_delta[PLANE_TYPE_Y] + MAX_ANGLE_DELTA]; |
| 570 | #endif |
| 571 | #if CONFIG_SDP |
| 572 | update_cdf(fc->angle_delta_cdf[PLANE_TYPE_Y][mbmi->mode - V_PRED], |
| 573 | mbmi->angle_delta[PLANE_TYPE_Y] + MAX_ANGLE_DELTA, |
| 574 | 2 * MAX_ANGLE_DELTA + 1); |
| 575 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 576 | update_cdf(fc->angle_delta_cdf[mbmi->mode - V_PRED], |
| 577 | mbmi->angle_delta[PLANE_TYPE_Y] + MAX_ANGLE_DELTA, |
| 578 | 2 * MAX_ANGLE_DELTA + 1); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 579 | #endif |
| 580 | } |
| 581 | #if CONFIG_SDP |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 582 | } |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 583 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 584 | |
| 585 | if (!xd->is_chroma_ref) return; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 586 | #if CONFIG_SDP |
| 587 | if (xd->tree_type != LUMA_PART) { |
| 588 | #endif |
| 589 | const UV_PREDICTION_MODE uv_mode = mbmi->uv_mode; |
| 590 | const CFL_ALLOWED_TYPE cfl_allowed = is_cfl_allowed(xd); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 591 | #if CONFIG_ENTROPY_STATS |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 592 | ++counts->uv_mode[cfl_allowed][y_mode][uv_mode]; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 593 | #endif // CONFIG_ENTROPY_STATS |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 594 | update_cdf(fc->uv_mode_cdf[cfl_allowed][y_mode], uv_mode, |
| 595 | UV_INTRA_MODES - !cfl_allowed); |
| 596 | if (uv_mode == UV_CFL_PRED) { |
| 597 | const int8_t joint_sign = mbmi->cfl_alpha_signs; |
| 598 | const uint8_t idx = mbmi->cfl_alpha_idx; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 599 | |
| 600 | #if CONFIG_ENTROPY_STATS |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 601 | ++counts->cfl_sign[joint_sign]; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 602 | #endif |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 603 | update_cdf(fc->cfl_sign_cdf, joint_sign, CFL_JOINT_SIGNS); |
| 604 | if (CFL_SIGN_U(joint_sign) != CFL_SIGN_ZERO) { |
| 605 | aom_cdf_prob *cdf_u = fc->cfl_alpha_cdf[CFL_CONTEXT_U(joint_sign)]; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 606 | |
| 607 | #if CONFIG_ENTROPY_STATS |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 608 | ++counts->cfl_alpha[CFL_CONTEXT_U(joint_sign)][CFL_IDX_U(idx)]; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 609 | #endif |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 610 | update_cdf(cdf_u, CFL_IDX_U(idx), CFL_ALPHABET_SIZE); |
| 611 | } |
| 612 | if (CFL_SIGN_V(joint_sign) != CFL_SIGN_ZERO) { |
| 613 | aom_cdf_prob *cdf_v = fc->cfl_alpha_cdf[CFL_CONTEXT_V(joint_sign)]; |
| 614 | |
| 615 | #if CONFIG_ENTROPY_STATS |
| 616 | ++counts->cfl_alpha[CFL_CONTEXT_V(joint_sign)][CFL_IDX_V(idx)]; |
| 617 | #endif |
| 618 | update_cdf(cdf_v, CFL_IDX_V(idx), CFL_ALPHABET_SIZE); |
| 619 | } |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 620 | } |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 621 | if (av1_is_directional_mode(get_uv_mode(uv_mode)) && |
| 622 | av1_use_angle_delta(bsize)) { |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 623 | #if CONFIG_ENTROPY_STATS |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 624 | ++counts->angle_delta[uv_mode - UV_V_PRED] |
| 625 | [mbmi->angle_delta[PLANE_TYPE_UV] + MAX_ANGLE_DELTA]; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 626 | #endif |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 627 | #if CONFIG_SDP |
leolzhao | 0214160 | 2021-02-16 15:06:35 -0800 | [diff] [blame^] | 628 | if (cm->seq_params.enable_sdp) |
| 629 | update_cdf(fc->angle_delta_cdf[PLANE_TYPE_UV][uv_mode - UV_V_PRED], |
| 630 | mbmi->angle_delta[PLANE_TYPE_UV] + MAX_ANGLE_DELTA, |
| 631 | 2 * MAX_ANGLE_DELTA + 1); |
| 632 | else |
| 633 | update_cdf(fc->angle_delta_cdf[PLANE_TYPE_Y][uv_mode - UV_V_PRED], |
| 634 | mbmi->angle_delta[PLANE_TYPE_UV] + MAX_ANGLE_DELTA, |
| 635 | 2 * MAX_ANGLE_DELTA + 1); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 636 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 637 | update_cdf(fc->angle_delta_cdf[uv_mode - UV_V_PRED], |
| 638 | mbmi->angle_delta[PLANE_TYPE_UV] + MAX_ANGLE_DELTA, |
| 639 | 2 * MAX_ANGLE_DELTA + 1); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 640 | #endif |
| 641 | } |
| 642 | #if CONFIG_SDP |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 643 | } |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 644 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 645 | if (av1_allow_palette(cm->features.allow_screen_content_tools, bsize)) { |
| 646 | update_palette_cdf(xd, mbmi, counts); |
| 647 | } |
| 648 | } |
| 649 | |
| 650 | void av1_restore_context(MACROBLOCK *x, const RD_SEARCH_MACROBLOCK_CONTEXT *ctx, |
| 651 | int mi_row, int mi_col, BLOCK_SIZE bsize, |
| 652 | const int num_planes) { |
| 653 | MACROBLOCKD *xd = &x->e_mbd; |
| 654 | int p; |
| 655 | const int num_4x4_blocks_wide = mi_size_wide[bsize]; |
| 656 | const int num_4x4_blocks_high = mi_size_high[bsize]; |
| 657 | int mi_width = mi_size_wide[bsize]; |
| 658 | int mi_height = mi_size_high[bsize]; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 659 | #if CONFIG_SDP |
| 660 | for (p = (xd->tree_type == CHROMA_PART); p < num_planes; p++) { |
| 661 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 662 | for (p = 0; p < num_planes; p++) { |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 663 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 664 | int tx_col = mi_col; |
| 665 | int tx_row = mi_row & MAX_MIB_MASK; |
| 666 | memcpy( |
| 667 | xd->above_entropy_context[p] + (tx_col >> xd->plane[p].subsampling_x), |
| 668 | ctx->a + num_4x4_blocks_wide * p, |
| 669 | (sizeof(ENTROPY_CONTEXT) * num_4x4_blocks_wide) >> |
| 670 | xd->plane[p].subsampling_x); |
| 671 | memcpy(xd->left_entropy_context[p] + (tx_row >> xd->plane[p].subsampling_y), |
| 672 | ctx->l + num_4x4_blocks_high * p, |
| 673 | (sizeof(ENTROPY_CONTEXT) * num_4x4_blocks_high) >> |
| 674 | xd->plane[p].subsampling_y); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 675 | #if CONFIG_SDP |
| 676 | memcpy(xd->above_partition_context[p] + mi_col, ctx->sa + mi_width * p, |
| 677 | sizeof(*xd->above_partition_context[p]) * mi_width); |
| 678 | memcpy(xd->left_partition_context[p] + (mi_row & MAX_MIB_MASK), |
| 679 | ctx->sl + mi_height * p, |
| 680 | sizeof(xd->left_partition_context[p][0]) * mi_height); |
| 681 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 682 | } |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 683 | #if !CONFIG_SDP |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 684 | memcpy(xd->above_partition_context + mi_col, ctx->sa, |
| 685 | sizeof(*xd->above_partition_context) * mi_width); |
| 686 | memcpy(xd->left_partition_context + (mi_row & MAX_MIB_MASK), ctx->sl, |
| 687 | sizeof(xd->left_partition_context[0]) * mi_height); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 688 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 689 | xd->above_txfm_context = ctx->p_ta; |
| 690 | xd->left_txfm_context = ctx->p_tl; |
| 691 | memcpy(xd->above_txfm_context, ctx->ta, |
| 692 | sizeof(*xd->above_txfm_context) * mi_width); |
| 693 | memcpy(xd->left_txfm_context, ctx->tl, |
| 694 | sizeof(*xd->left_txfm_context) * mi_height); |
| 695 | } |
| 696 | |
| 697 | void av1_save_context(const MACROBLOCK *x, RD_SEARCH_MACROBLOCK_CONTEXT *ctx, |
| 698 | int mi_row, int mi_col, BLOCK_SIZE bsize, |
| 699 | const int num_planes) { |
| 700 | const MACROBLOCKD *xd = &x->e_mbd; |
| 701 | int p; |
| 702 | int mi_width = mi_size_wide[bsize]; |
| 703 | int mi_height = mi_size_high[bsize]; |
| 704 | |
| 705 | // buffer the above/left context information of the block in search. |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 706 | #if CONFIG_SDP |
| 707 | for (p = (xd->tree_type == CHROMA_PART); p < num_planes; ++p) { |
| 708 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 709 | for (p = 0; p < num_planes; ++p) { |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 710 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 711 | int tx_col = mi_col; |
| 712 | int tx_row = mi_row & MAX_MIB_MASK; |
| 713 | memcpy( |
| 714 | ctx->a + mi_width * p, |
| 715 | xd->above_entropy_context[p] + (tx_col >> xd->plane[p].subsampling_x), |
| 716 | (sizeof(ENTROPY_CONTEXT) * mi_width) >> xd->plane[p].subsampling_x); |
| 717 | memcpy(ctx->l + mi_height * p, |
| 718 | xd->left_entropy_context[p] + (tx_row >> xd->plane[p].subsampling_y), |
| 719 | (sizeof(ENTROPY_CONTEXT) * mi_height) >> xd->plane[p].subsampling_y); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 720 | #if CONFIG_SDP |
| 721 | memcpy(ctx->sa + mi_width * p, xd->above_partition_context[p] + mi_col, |
| 722 | sizeof(*xd->above_partition_context[p]) * mi_width); |
| 723 | memcpy(ctx->sl + mi_height * p, |
| 724 | xd->left_partition_context[p] + (mi_row & MAX_MIB_MASK), |
| 725 | sizeof(xd->left_partition_context[p][0]) * mi_height); |
| 726 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 727 | } |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 728 | #if !CONFIG_SDP |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 729 | memcpy(ctx->sa, xd->above_partition_context + mi_col, |
| 730 | sizeof(*xd->above_partition_context) * mi_width); |
| 731 | memcpy(ctx->sl, xd->left_partition_context + (mi_row & MAX_MIB_MASK), |
| 732 | sizeof(xd->left_partition_context[0]) * mi_height); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 733 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 734 | memcpy(ctx->ta, xd->above_txfm_context, |
| 735 | sizeof(*xd->above_txfm_context) * mi_width); |
| 736 | memcpy(ctx->tl, xd->left_txfm_context, |
| 737 | sizeof(*xd->left_txfm_context) * mi_height); |
| 738 | ctx->p_ta = xd->above_txfm_context; |
| 739 | ctx->p_tl = xd->left_txfm_context; |
| 740 | } |
| 741 | |
| 742 | static void set_partial_sb_partition(const AV1_COMMON *const cm, |
| 743 | MB_MODE_INFO *mi, int bh_in, int bw_in, |
| 744 | int mi_rows_remaining, |
| 745 | int mi_cols_remaining, BLOCK_SIZE bsize, |
| 746 | MB_MODE_INFO **mib) { |
| 747 | int bh = bh_in; |
| 748 | int r, c; |
| 749 | for (r = 0; r < cm->seq_params.mib_size; r += bh) { |
| 750 | int bw = bw_in; |
| 751 | for (c = 0; c < cm->seq_params.mib_size; c += bw) { |
| 752 | const int grid_index = get_mi_grid_idx(&cm->mi_params, r, c); |
| 753 | const int mi_index = get_alloc_mi_idx(&cm->mi_params, r, c); |
| 754 | mib[grid_index] = mi + mi_index; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 755 | #if CONFIG_SDP |
leolzhao | aa4d769 | 2021-01-28 11:00:33 -0800 | [diff] [blame] | 756 | mib[grid_index]->sb_type[PLANE_TYPE_Y] = |
| 757 | mib[grid_index]->sb_type[PLANE_TYPE_UV] = find_partition_size( |
| 758 | bsize, mi_rows_remaining - r, mi_cols_remaining - c, &bh, &bw); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 759 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 760 | mib[grid_index]->sb_type = find_partition_size( |
| 761 | bsize, mi_rows_remaining - r, mi_cols_remaining - c, &bh, &bw); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 762 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 763 | } |
| 764 | } |
| 765 | } |
| 766 | |
| 767 | // This function attempts to set all mode info entries in a given superblock |
| 768 | // to the same block partition size. |
| 769 | // However, at the bottom and right borders of the image the requested size |
| 770 | // may not be allowed in which case this code attempts to choose the largest |
| 771 | // allowable partition. |
| 772 | void av1_set_fixed_partitioning(AV1_COMP *cpi, const TileInfo *const tile, |
| 773 | MB_MODE_INFO **mib, int mi_row, int mi_col, |
| 774 | BLOCK_SIZE bsize) { |
| 775 | AV1_COMMON *const cm = &cpi->common; |
| 776 | const CommonModeInfoParams *const mi_params = &cm->mi_params; |
| 777 | const int mi_rows_remaining = tile->mi_row_end - mi_row; |
| 778 | const int mi_cols_remaining = tile->mi_col_end - mi_col; |
| 779 | MB_MODE_INFO *const mi_upper_left = |
| 780 | mi_params->mi_alloc + get_alloc_mi_idx(mi_params, mi_row, mi_col); |
| 781 | int bh = mi_size_high[bsize]; |
| 782 | int bw = mi_size_wide[bsize]; |
| 783 | |
| 784 | assert(bsize >= mi_params->mi_alloc_bsize && |
| 785 | "Attempted to use bsize < mi_params->mi_alloc_bsize"); |
| 786 | assert((mi_rows_remaining > 0) && (mi_cols_remaining > 0)); |
| 787 | |
| 788 | // Apply the requested partition size to the SB if it is all "in image" |
| 789 | if ((mi_cols_remaining >= cm->seq_params.mib_size) && |
| 790 | (mi_rows_remaining >= cm->seq_params.mib_size)) { |
| 791 | for (int block_row = 0; block_row < cm->seq_params.mib_size; |
| 792 | block_row += bh) { |
| 793 | for (int block_col = 0; block_col < cm->seq_params.mib_size; |
| 794 | block_col += bw) { |
| 795 | const int grid_index = get_mi_grid_idx(mi_params, block_row, block_col); |
| 796 | const int mi_index = get_alloc_mi_idx(mi_params, block_row, block_col); |
| 797 | mib[grid_index] = mi_upper_left + mi_index; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 798 | #if CONFIG_SDP |
leolzhao | aa4d769 | 2021-01-28 11:00:33 -0800 | [diff] [blame] | 799 | mib[grid_index]->sb_type[PLANE_TYPE_Y] = bsize; |
| 800 | mib[grid_index]->sb_type[PLANE_TYPE_UV] = bsize; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 801 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 802 | mib[grid_index]->sb_type = bsize; |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 803 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 804 | } |
| 805 | } |
| 806 | } else { |
| 807 | // Else this is a partial SB. |
| 808 | set_partial_sb_partition(cm, mi_upper_left, bh, bw, mi_rows_remaining, |
| 809 | mi_cols_remaining, bsize, mib); |
| 810 | } |
| 811 | } |
leolzhao | 3db7cca | 2021-01-26 16:53:07 -0800 | [diff] [blame] | 812 | #if CONFIG_SDP |
| 813 | int av1_is_leaf_split_partition(AV1_COMMON *cm, MACROBLOCKD *const xd, |
| 814 | int mi_row, int mi_col, |
| 815 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 816 | int av1_is_leaf_split_partition(AV1_COMMON *cm, int mi_row, int mi_col, |
leolzhao | 3db7cca | 2021-01-26 16:53:07 -0800 | [diff] [blame] | 817 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 818 | BLOCK_SIZE bsize) { |
| 819 | const int bs = mi_size_wide[bsize]; |
| 820 | const int hbs = bs / 2; |
| 821 | assert(bsize >= BLOCK_8X8); |
| 822 | const BLOCK_SIZE subsize = get_partition_subsize(bsize, PARTITION_SPLIT); |
| 823 | |
| 824 | for (int i = 0; i < 4; i++) { |
| 825 | int x_idx = (i & 1) * hbs; |
| 826 | int y_idx = (i >> 1) * hbs; |
| 827 | if ((mi_row + y_idx >= cm->mi_params.mi_rows) || |
| 828 | (mi_col + x_idx >= cm->mi_params.mi_cols)) |
| 829 | return 0; |
leolzhao | 3db7cca | 2021-01-26 16:53:07 -0800 | [diff] [blame] | 830 | #if CONFIG_SDP |
| 831 | if (get_partition(cm, xd->tree_type == CHROMA_PART, mi_row + y_idx, |
| 832 | mi_col + x_idx, subsize) != |
| 833 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 834 | if (get_partition(cm, mi_row + y_idx, mi_col + x_idx, subsize) != |
leolzhao | 3db7cca | 2021-01-26 16:53:07 -0800 | [diff] [blame] | 835 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 836 | PARTITION_NONE && |
| 837 | subsize != BLOCK_8X8) |
| 838 | return 0; |
| 839 | } |
| 840 | return 1; |
| 841 | } |
| 842 | |
| 843 | #if !CONFIG_REALTIME_ONLY |
Jayasanker J | 37596eb | 2020-08-20 16:39:40 +0530 | [diff] [blame] | 844 | int av1_get_rdmult_delta(AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row, |
| 845 | int mi_col, int orig_rdmult) { |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 846 | AV1_COMMON *const cm = &cpi->common; |
| 847 | const GF_GROUP *const gf_group = &cpi->gf_group; |
| 848 | assert(IMPLIES(cpi->gf_group.size > 0, |
| 849 | cpi->gf_group.index < cpi->gf_group.size)); |
| 850 | const int tpl_idx = cpi->gf_group.index; |
| 851 | TplParams *const tpl_data = &cpi->tpl_data; |
| 852 | TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_idx]; |
| 853 | TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr; |
| 854 | const uint8_t block_mis_log2 = tpl_data->tpl_stats_block_mis_log2; |
| 855 | int tpl_stride = tpl_frame->stride; |
| 856 | int64_t intra_cost = 0; |
| 857 | int64_t mc_dep_cost = 0; |
| 858 | const int mi_wide = mi_size_wide[bsize]; |
| 859 | const int mi_high = mi_size_high[bsize]; |
| 860 | |
| 861 | if (tpl_frame->is_valid == 0) return orig_rdmult; |
| 862 | |
Deepa K G | 21e5e8e | 2020-03-28 13:26:09 +0530 | [diff] [blame] | 863 | if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return orig_rdmult; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 864 | |
| 865 | if (cpi->gf_group.index >= MAX_TPL_FRAME_IDX) return orig_rdmult; |
| 866 | |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 867 | int mi_count = 0; |
| 868 | const int mi_col_sr = |
| 869 | coded_to_superres_mi(mi_col, cm->superres_scale_denominator); |
| 870 | const int mi_col_end_sr = |
| 871 | coded_to_superres_mi(mi_col + mi_wide, cm->superres_scale_denominator); |
| 872 | const int mi_cols_sr = av1_pixels_to_mi(cm->superres_upscaled_width); |
| 873 | const int step = 1 << block_mis_log2; |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 874 | const int row_step = step; |
| 875 | const int col_step_sr = |
| 876 | coded_to_superres_mi(step, cm->superres_scale_denominator); |
| 877 | for (int row = mi_row; row < mi_row + mi_high; row += row_step) { |
| 878 | for (int col = mi_col_sr; col < mi_col_end_sr; col += col_step_sr) { |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 879 | if (row >= cm->mi_params.mi_rows || col >= mi_cols_sr) continue; |
| 880 | TplDepStats *this_stats = |
| 881 | &tpl_stats[av1_tpl_ptr_pos(row, col, tpl_stride, block_mis_log2)]; |
| 882 | int64_t mc_dep_delta = |
| 883 | RDCOST(tpl_frame->base_rdmult, this_stats->mc_dep_rate, |
| 884 | this_stats->mc_dep_dist); |
| 885 | intra_cost += this_stats->recrf_dist << RDDIV_BITS; |
| 886 | mc_dep_cost += (this_stats->recrf_dist << RDDIV_BITS) + mc_dep_delta; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 887 | mi_count++; |
| 888 | } |
| 889 | } |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 890 | assert(mi_count <= MAX_TPL_BLK_IN_SB * MAX_TPL_BLK_IN_SB); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 891 | |
| 892 | aom_clear_system_state(); |
| 893 | |
| 894 | double beta = 1.0; |
Jayasanker J | 37596eb | 2020-08-20 16:39:40 +0530 | [diff] [blame] | 895 | if (mc_dep_cost > 0 && intra_cost > 0) { |
| 896 | const double r0 = cpi->rd.r0; |
| 897 | const double rk = (double)intra_cost / mc_dep_cost; |
| 898 | beta = (r0 / rk); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 899 | } |
| 900 | |
| 901 | int rdmult = av1_get_adaptive_rdmult(cpi, beta); |
| 902 | |
| 903 | aom_clear_system_state(); |
| 904 | |
| 905 | rdmult = AOMMIN(rdmult, orig_rdmult * 3 / 2); |
| 906 | rdmult = AOMMAX(rdmult, orig_rdmult * 1 / 2); |
| 907 | |
| 908 | rdmult = AOMMAX(1, rdmult); |
| 909 | |
| 910 | return rdmult; |
| 911 | } |
| 912 | |
| 913 | // Checks to see if a super block is on a horizontal image edge. |
| 914 | // In most cases this is the "real" edge unless there are formatting |
| 915 | // bars embedded in the stream. |
| 916 | int av1_active_h_edge(const AV1_COMP *cpi, int mi_row, int mi_step) { |
| 917 | int top_edge = 0; |
| 918 | int bottom_edge = cpi->common.mi_params.mi_rows; |
| 919 | int is_active_h_edge = 0; |
| 920 | |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 921 | if (((top_edge >= mi_row) && (top_edge < (mi_row + mi_step))) || |
| 922 | ((bottom_edge >= mi_row) && (bottom_edge < (mi_row + mi_step)))) { |
| 923 | is_active_h_edge = 1; |
| 924 | } |
| 925 | return is_active_h_edge; |
| 926 | } |
| 927 | |
| 928 | // Checks to see if a super block is on a vertical image edge. |
| 929 | // In most cases this is the "real" edge unless there are formatting |
| 930 | // bars embedded in the stream. |
| 931 | int av1_active_v_edge(const AV1_COMP *cpi, int mi_col, int mi_step) { |
| 932 | int left_edge = 0; |
| 933 | int right_edge = cpi->common.mi_params.mi_cols; |
| 934 | int is_active_v_edge = 0; |
| 935 | |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 936 | if (((left_edge >= mi_col) && (left_edge < (mi_col + mi_step))) || |
| 937 | ((right_edge >= mi_col) && (right_edge < (mi_col + mi_step)))) { |
| 938 | is_active_v_edge = 1; |
| 939 | } |
| 940 | return is_active_v_edge; |
| 941 | } |
| 942 | |
| 943 | void av1_get_tpl_stats_sb(AV1_COMP *cpi, BLOCK_SIZE bsize, int mi_row, |
| 944 | int mi_col, SuperBlockEnc *sb_enc) { |
| 945 | sb_enc->tpl_data_count = 0; |
| 946 | |
| 947 | if (!cpi->oxcf.algo_cfg.enable_tpl_model) return; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 948 | if (cpi->common.current_frame.frame_type == KEY_FRAME) return; |
| 949 | const FRAME_UPDATE_TYPE update_type = get_frame_update_type(&cpi->gf_group); |
Debargha Mukherjee | 5f64acd | 2020-08-18 14:32:28 -0700 | [diff] [blame] | 950 | if (update_type == INTNL_OVERLAY_UPDATE || update_type == OVERLAY_UPDATE || |
| 951 | update_type == KFFLT_OVERLAY_UPDATE) |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 952 | return; |
| 953 | assert(IMPLIES(cpi->gf_group.size > 0, |
| 954 | cpi->gf_group.index < cpi->gf_group.size)); |
| 955 | |
| 956 | AV1_COMMON *const cm = &cpi->common; |
| 957 | const int gf_group_index = cpi->gf_group.index; |
| 958 | TplParams *const tpl_data = &cpi->tpl_data; |
| 959 | TplDepFrame *tpl_frame = &tpl_data->tpl_frame[gf_group_index]; |
| 960 | TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr; |
| 961 | int tpl_stride = tpl_frame->stride; |
| 962 | const int mi_wide = mi_size_wide[bsize]; |
| 963 | const int mi_high = mi_size_high[bsize]; |
| 964 | |
| 965 | if (tpl_frame->is_valid == 0) return; |
| 966 | if (gf_group_index >= MAX_TPL_FRAME_IDX) return; |
| 967 | |
| 968 | int mi_count = 0; |
| 969 | int count = 0; |
| 970 | const int mi_col_sr = |
| 971 | coded_to_superres_mi(mi_col, cm->superres_scale_denominator); |
| 972 | const int mi_col_end_sr = |
| 973 | coded_to_superres_mi(mi_col + mi_wide, cm->superres_scale_denominator); |
| 974 | // mi_cols_sr is mi_cols at superres case. |
| 975 | const int mi_cols_sr = av1_pixels_to_mi(cm->superres_upscaled_width); |
| 976 | |
| 977 | // TPL store unit size is not the same as the motion estimation unit size. |
| 978 | // Here always use motion estimation size to avoid getting repetitive inter/ |
| 979 | // intra cost. |
Yunqing Wang | f0a8cf4 | 2020-08-14 14:50:33 -0700 | [diff] [blame] | 980 | const BLOCK_SIZE tpl_bsize = convert_length_to_bsize(tpl_data->tpl_bsize_1d); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 981 | assert(mi_size_wide[tpl_bsize] == mi_size_high[tpl_bsize]); |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 982 | const int row_step = mi_size_high[tpl_bsize]; |
| 983 | const int col_step_sr = coded_to_superres_mi(mi_size_wide[tpl_bsize], |
| 984 | cm->superres_scale_denominator); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 985 | |
| 986 | // Stride is only based on SB size, and we fill in values for every 16x16 |
| 987 | // block in a SB. |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 988 | sb_enc->tpl_stride = (mi_col_end_sr - mi_col_sr) / col_step_sr; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 989 | |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 990 | for (int row = mi_row; row < mi_row + mi_high; row += row_step) { |
| 991 | for (int col = mi_col_sr; col < mi_col_end_sr; col += col_step_sr) { |
| 992 | assert(count < MAX_TPL_BLK_IN_SB * MAX_TPL_BLK_IN_SB); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 993 | // Handle partial SB, so that no invalid values are used later. |
| 994 | if (row >= cm->mi_params.mi_rows || col >= mi_cols_sr) { |
| 995 | sb_enc->tpl_inter_cost[count] = INT64_MAX; |
| 996 | sb_enc->tpl_intra_cost[count] = INT64_MAX; |
| 997 | for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) { |
| 998 | sb_enc->tpl_mv[count][i].as_int = INVALID_MV; |
| 999 | } |
| 1000 | count++; |
| 1001 | continue; |
| 1002 | } |
| 1003 | |
| 1004 | TplDepStats *this_stats = &tpl_stats[av1_tpl_ptr_pos( |
| 1005 | row, col, tpl_stride, tpl_data->tpl_stats_block_mis_log2)]; |
| 1006 | sb_enc->tpl_inter_cost[count] = this_stats->inter_cost; |
| 1007 | sb_enc->tpl_intra_cost[count] = this_stats->intra_cost; |
| 1008 | memcpy(sb_enc->tpl_mv[count], this_stats->mv, sizeof(this_stats->mv)); |
| 1009 | mi_count++; |
| 1010 | count++; |
| 1011 | } |
| 1012 | } |
| 1013 | |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 1014 | assert(mi_count <= MAX_TPL_BLK_IN_SB * MAX_TPL_BLK_IN_SB); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1015 | sb_enc->tpl_data_count = mi_count; |
| 1016 | } |
| 1017 | |
| 1018 | // analysis_type 0: Use mc_dep_cost and intra_cost |
| 1019 | // analysis_type 1: Use count of best inter predictor chosen |
| 1020 | // analysis_type 2: Use cost reduction from intra to inter for best inter |
| 1021 | // predictor chosen |
| 1022 | int av1_get_q_for_deltaq_objective(AV1_COMP *const cpi, BLOCK_SIZE bsize, |
| 1023 | int mi_row, int mi_col) { |
| 1024 | AV1_COMMON *const cm = &cpi->common; |
| 1025 | const GF_GROUP *const gf_group = &cpi->gf_group; |
| 1026 | assert(IMPLIES(cpi->gf_group.size > 0, |
| 1027 | cpi->gf_group.index < cpi->gf_group.size)); |
| 1028 | const int tpl_idx = cpi->gf_group.index; |
| 1029 | TplParams *const tpl_data = &cpi->tpl_data; |
| 1030 | TplDepFrame *tpl_frame = &tpl_data->tpl_frame[tpl_idx]; |
| 1031 | TplDepStats *tpl_stats = tpl_frame->tpl_stats_ptr; |
| 1032 | const uint8_t block_mis_log2 = tpl_data->tpl_stats_block_mis_log2; |
| 1033 | int tpl_stride = tpl_frame->stride; |
| 1034 | int64_t intra_cost = 0; |
| 1035 | int64_t mc_dep_cost = 0; |
| 1036 | const int mi_wide = mi_size_wide[bsize]; |
| 1037 | const int mi_high = mi_size_high[bsize]; |
| 1038 | const int base_qindex = cm->quant_params.base_qindex; |
| 1039 | |
| 1040 | if (tpl_frame->is_valid == 0) return base_qindex; |
| 1041 | |
Deepa K G | 21e5e8e | 2020-03-28 13:26:09 +0530 | [diff] [blame] | 1042 | if (!is_frame_tpl_eligible(gf_group, gf_group->index)) return base_qindex; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1043 | |
| 1044 | if (cpi->gf_group.index >= MAX_TPL_FRAME_IDX) return base_qindex; |
| 1045 | |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1046 | int mi_count = 0; |
| 1047 | const int mi_col_sr = |
| 1048 | coded_to_superres_mi(mi_col, cm->superres_scale_denominator); |
| 1049 | const int mi_col_end_sr = |
| 1050 | coded_to_superres_mi(mi_col + mi_wide, cm->superres_scale_denominator); |
| 1051 | const int mi_cols_sr = av1_pixels_to_mi(cm->superres_upscaled_width); |
| 1052 | const int step = 1 << block_mis_log2; |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 1053 | const int row_step = step; |
| 1054 | const int col_step_sr = |
| 1055 | coded_to_superres_mi(step, cm->superres_scale_denominator); |
| 1056 | for (int row = mi_row; row < mi_row + mi_high; row += row_step) { |
| 1057 | for (int col = mi_col_sr; col < mi_col_end_sr; col += col_step_sr) { |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1058 | if (row >= cm->mi_params.mi_rows || col >= mi_cols_sr) continue; |
| 1059 | TplDepStats *this_stats = |
| 1060 | &tpl_stats[av1_tpl_ptr_pos(row, col, tpl_stride, block_mis_log2)]; |
| 1061 | int64_t mc_dep_delta = |
| 1062 | RDCOST(tpl_frame->base_rdmult, this_stats->mc_dep_rate, |
| 1063 | this_stats->mc_dep_dist); |
| 1064 | intra_cost += this_stats->recrf_dist << RDDIV_BITS; |
| 1065 | mc_dep_cost += (this_stats->recrf_dist << RDDIV_BITS) + mc_dep_delta; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1066 | mi_count++; |
| 1067 | } |
| 1068 | } |
Urvang Joshi | e198bf1 | 2020-10-08 15:37:55 -0700 | [diff] [blame] | 1069 | assert(mi_count <= MAX_TPL_BLK_IN_SB * MAX_TPL_BLK_IN_SB); |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1070 | |
| 1071 | aom_clear_system_state(); |
| 1072 | |
| 1073 | int offset = 0; |
| 1074 | double beta = 1.0; |
| 1075 | if (mc_dep_cost > 0 && intra_cost > 0) { |
| 1076 | const double r0 = cpi->rd.r0; |
| 1077 | const double rk = (double)intra_cost / mc_dep_cost; |
| 1078 | beta = (r0 / rk); |
| 1079 | assert(beta > 0.0); |
| 1080 | } |
| 1081 | offset = av1_get_deltaq_offset(cpi, base_qindex, beta); |
| 1082 | aom_clear_system_state(); |
| 1083 | |
| 1084 | const DeltaQInfo *const delta_q_info = &cm->delta_q_info; |
| 1085 | offset = AOMMIN(offset, delta_q_info->delta_q_res * 9 - 1); |
| 1086 | offset = AOMMAX(offset, -delta_q_info->delta_q_res * 9 + 1); |
| 1087 | int qindex = cm->quant_params.base_qindex + offset; |
| 1088 | qindex = AOMMIN(qindex, MAXQ); |
| 1089 | qindex = AOMMAX(qindex, MINQ); |
| 1090 | |
| 1091 | return qindex; |
| 1092 | } |
| 1093 | #endif // !CONFIG_REALTIME_ONLY |
| 1094 | |
| 1095 | void av1_reset_simple_motion_tree_partition(SIMPLE_MOTION_DATA_TREE *sms_tree, |
| 1096 | BLOCK_SIZE bsize) { |
| 1097 | sms_tree->partitioning = PARTITION_NONE; |
| 1098 | |
| 1099 | if (bsize >= BLOCK_8X8) { |
| 1100 | BLOCK_SIZE subsize = get_partition_subsize(bsize, PARTITION_SPLIT); |
| 1101 | for (int idx = 0; idx < 4; ++idx) |
| 1102 | av1_reset_simple_motion_tree_partition(sms_tree->split[idx], subsize); |
| 1103 | } |
| 1104 | } |
| 1105 | |
| 1106 | // Record the ref frames that have been selected by square partition blocks. |
| 1107 | void av1_update_picked_ref_frames_mask(MACROBLOCK *const x, int ref_type, |
| 1108 | BLOCK_SIZE bsize, int mib_size, |
| 1109 | int mi_row, int mi_col) { |
| 1110 | assert(mi_size_wide[bsize] == mi_size_high[bsize]); |
| 1111 | const int sb_size_mask = mib_size - 1; |
| 1112 | const int mi_row_in_sb = mi_row & sb_size_mask; |
| 1113 | const int mi_col_in_sb = mi_col & sb_size_mask; |
| 1114 | const int mi_size = mi_size_wide[bsize]; |
| 1115 | for (int i = mi_row_in_sb; i < mi_row_in_sb + mi_size; ++i) { |
| 1116 | for (int j = mi_col_in_sb; j < mi_col_in_sb + mi_size; ++j) { |
| 1117 | x->picked_ref_frames_mask[i * 32 + j] |= 1 << ref_type; |
| 1118 | } |
| 1119 | } |
| 1120 | } |
| 1121 | |
| 1122 | static void avg_cdf_symbol(aom_cdf_prob *cdf_ptr_left, aom_cdf_prob *cdf_ptr_tr, |
| 1123 | int num_cdfs, int cdf_stride, int nsymbs, |
| 1124 | int wt_left, int wt_tr) { |
| 1125 | for (int i = 0; i < num_cdfs; i++) { |
| 1126 | for (int j = 0; j <= nsymbs; j++) { |
| 1127 | cdf_ptr_left[i * cdf_stride + j] = |
| 1128 | (aom_cdf_prob)(((int)cdf_ptr_left[i * cdf_stride + j] * wt_left + |
| 1129 | (int)cdf_ptr_tr[i * cdf_stride + j] * wt_tr + |
| 1130 | ((wt_left + wt_tr) / 2)) / |
| 1131 | (wt_left + wt_tr)); |
| 1132 | assert(cdf_ptr_left[i * cdf_stride + j] >= 0 && |
| 1133 | cdf_ptr_left[i * cdf_stride + j] < CDF_PROB_TOP); |
| 1134 | } |
| 1135 | } |
| 1136 | } |
| 1137 | |
| 1138 | #define AVERAGE_CDF(cname_left, cname_tr, nsymbs) \ |
| 1139 | AVG_CDF_STRIDE(cname_left, cname_tr, nsymbs, CDF_SIZE(nsymbs)) |
| 1140 | |
| 1141 | #define AVG_CDF_STRIDE(cname_left, cname_tr, nsymbs, cdf_stride) \ |
| 1142 | do { \ |
| 1143 | aom_cdf_prob *cdf_ptr_left = (aom_cdf_prob *)cname_left; \ |
| 1144 | aom_cdf_prob *cdf_ptr_tr = (aom_cdf_prob *)cname_tr; \ |
| 1145 | int array_size = (int)sizeof(cname_left) / sizeof(aom_cdf_prob); \ |
| 1146 | int num_cdfs = array_size / cdf_stride; \ |
| 1147 | avg_cdf_symbol(cdf_ptr_left, cdf_ptr_tr, num_cdfs, cdf_stride, nsymbs, \ |
| 1148 | wt_left, wt_tr); \ |
| 1149 | } while (0) |
| 1150 | |
| 1151 | static void avg_nmv(nmv_context *nmv_left, nmv_context *nmv_tr, int wt_left, |
| 1152 | int wt_tr) { |
| 1153 | AVERAGE_CDF(nmv_left->joints_cdf, nmv_tr->joints_cdf, 4); |
| 1154 | for (int i = 0; i < 2; i++) { |
| 1155 | AVERAGE_CDF(nmv_left->comps[i].classes_cdf, nmv_tr->comps[i].classes_cdf, |
| 1156 | MV_CLASSES); |
| 1157 | AVERAGE_CDF(nmv_left->comps[i].class0_fp_cdf, |
| 1158 | nmv_tr->comps[i].class0_fp_cdf, MV_FP_SIZE); |
| 1159 | AVERAGE_CDF(nmv_left->comps[i].fp_cdf, nmv_tr->comps[i].fp_cdf, MV_FP_SIZE); |
| 1160 | AVERAGE_CDF(nmv_left->comps[i].sign_cdf, nmv_tr->comps[i].sign_cdf, 2); |
| 1161 | AVERAGE_CDF(nmv_left->comps[i].class0_hp_cdf, |
| 1162 | nmv_tr->comps[i].class0_hp_cdf, 2); |
| 1163 | AVERAGE_CDF(nmv_left->comps[i].hp_cdf, nmv_tr->comps[i].hp_cdf, 2); |
| 1164 | AVERAGE_CDF(nmv_left->comps[i].class0_cdf, nmv_tr->comps[i].class0_cdf, |
| 1165 | CLASS0_SIZE); |
| 1166 | AVERAGE_CDF(nmv_left->comps[i].bits_cdf, nmv_tr->comps[i].bits_cdf, 2); |
| 1167 | } |
| 1168 | } |
| 1169 | |
| 1170 | // In case of row-based multi-threading of encoder, since we always |
| 1171 | // keep a top - right sync, we can average the top - right SB's CDFs and |
| 1172 | // the left SB's CDFs and use the same for current SB's encoding to |
| 1173 | // improve the performance. This function facilitates the averaging |
| 1174 | // of CDF and used only when row-mt is enabled in encoder. |
| 1175 | void av1_avg_cdf_symbols(FRAME_CONTEXT *ctx_left, FRAME_CONTEXT *ctx_tr, |
| 1176 | int wt_left, int wt_tr) { |
| 1177 | AVERAGE_CDF(ctx_left->txb_skip_cdf, ctx_tr->txb_skip_cdf, 2); |
| 1178 | AVERAGE_CDF(ctx_left->eob_extra_cdf, ctx_tr->eob_extra_cdf, 2); |
| 1179 | AVERAGE_CDF(ctx_left->dc_sign_cdf, ctx_tr->dc_sign_cdf, 2); |
| 1180 | AVERAGE_CDF(ctx_left->eob_flag_cdf16, ctx_tr->eob_flag_cdf16, 5); |
| 1181 | AVERAGE_CDF(ctx_left->eob_flag_cdf32, ctx_tr->eob_flag_cdf32, 6); |
| 1182 | AVERAGE_CDF(ctx_left->eob_flag_cdf64, ctx_tr->eob_flag_cdf64, 7); |
| 1183 | AVERAGE_CDF(ctx_left->eob_flag_cdf128, ctx_tr->eob_flag_cdf128, 8); |
| 1184 | AVERAGE_CDF(ctx_left->eob_flag_cdf256, ctx_tr->eob_flag_cdf256, 9); |
| 1185 | AVERAGE_CDF(ctx_left->eob_flag_cdf512, ctx_tr->eob_flag_cdf512, 10); |
| 1186 | AVERAGE_CDF(ctx_left->eob_flag_cdf1024, ctx_tr->eob_flag_cdf1024, 11); |
| 1187 | AVERAGE_CDF(ctx_left->coeff_base_eob_cdf, ctx_tr->coeff_base_eob_cdf, 3); |
| 1188 | AVERAGE_CDF(ctx_left->coeff_base_cdf, ctx_tr->coeff_base_cdf, 4); |
| 1189 | AVERAGE_CDF(ctx_left->coeff_br_cdf, ctx_tr->coeff_br_cdf, BR_CDF_SIZE); |
| 1190 | AVERAGE_CDF(ctx_left->newmv_cdf, ctx_tr->newmv_cdf, 2); |
| 1191 | AVERAGE_CDF(ctx_left->zeromv_cdf, ctx_tr->zeromv_cdf, 2); |
| 1192 | AVERAGE_CDF(ctx_left->refmv_cdf, ctx_tr->refmv_cdf, 2); |
| 1193 | AVERAGE_CDF(ctx_left->drl_cdf, ctx_tr->drl_cdf, 2); |
| 1194 | AVERAGE_CDF(ctx_left->inter_compound_mode_cdf, |
| 1195 | ctx_tr->inter_compound_mode_cdf, INTER_COMPOUND_MODES); |
| 1196 | AVERAGE_CDF(ctx_left->compound_type_cdf, ctx_tr->compound_type_cdf, |
| 1197 | MASKED_COMPOUND_TYPES); |
| 1198 | AVERAGE_CDF(ctx_left->wedge_idx_cdf, ctx_tr->wedge_idx_cdf, 16); |
| 1199 | AVERAGE_CDF(ctx_left->interintra_cdf, ctx_tr->interintra_cdf, 2); |
| 1200 | AVERAGE_CDF(ctx_left->wedge_interintra_cdf, ctx_tr->wedge_interintra_cdf, 2); |
| 1201 | AVERAGE_CDF(ctx_left->interintra_mode_cdf, ctx_tr->interintra_mode_cdf, |
| 1202 | INTERINTRA_MODES); |
| 1203 | AVERAGE_CDF(ctx_left->motion_mode_cdf, ctx_tr->motion_mode_cdf, MOTION_MODES); |
| 1204 | AVERAGE_CDF(ctx_left->obmc_cdf, ctx_tr->obmc_cdf, 2); |
| 1205 | AVERAGE_CDF(ctx_left->palette_y_size_cdf, ctx_tr->palette_y_size_cdf, |
| 1206 | PALETTE_SIZES); |
| 1207 | AVERAGE_CDF(ctx_left->palette_uv_size_cdf, ctx_tr->palette_uv_size_cdf, |
| 1208 | PALETTE_SIZES); |
| 1209 | for (int j = 0; j < PALETTE_SIZES; j++) { |
| 1210 | int nsymbs = j + PALETTE_MIN_SIZE; |
| 1211 | AVG_CDF_STRIDE(ctx_left->palette_y_color_index_cdf[j], |
| 1212 | ctx_tr->palette_y_color_index_cdf[j], nsymbs, |
| 1213 | CDF_SIZE(PALETTE_COLORS)); |
| 1214 | AVG_CDF_STRIDE(ctx_left->palette_uv_color_index_cdf[j], |
| 1215 | ctx_tr->palette_uv_color_index_cdf[j], nsymbs, |
| 1216 | CDF_SIZE(PALETTE_COLORS)); |
| 1217 | } |
| 1218 | AVERAGE_CDF(ctx_left->palette_y_mode_cdf, ctx_tr->palette_y_mode_cdf, 2); |
| 1219 | AVERAGE_CDF(ctx_left->palette_uv_mode_cdf, ctx_tr->palette_uv_mode_cdf, 2); |
| 1220 | AVERAGE_CDF(ctx_left->comp_inter_cdf, ctx_tr->comp_inter_cdf, 2); |
| 1221 | AVERAGE_CDF(ctx_left->single_ref_cdf, ctx_tr->single_ref_cdf, 2); |
| 1222 | AVERAGE_CDF(ctx_left->comp_ref_type_cdf, ctx_tr->comp_ref_type_cdf, 2); |
| 1223 | AVERAGE_CDF(ctx_left->uni_comp_ref_cdf, ctx_tr->uni_comp_ref_cdf, 2); |
| 1224 | AVERAGE_CDF(ctx_left->comp_ref_cdf, ctx_tr->comp_ref_cdf, 2); |
| 1225 | AVERAGE_CDF(ctx_left->comp_bwdref_cdf, ctx_tr->comp_bwdref_cdf, 2); |
| 1226 | AVERAGE_CDF(ctx_left->txfm_partition_cdf, ctx_tr->txfm_partition_cdf, 2); |
Debargha Mukherjee | 5bd41f9 | 2020-10-04 11:06:11 -0700 | [diff] [blame] | 1227 | #if !CONFIG_REMOVE_DIST_WTD_COMP |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1228 | AVERAGE_CDF(ctx_left->compound_index_cdf, ctx_tr->compound_index_cdf, 2); |
Debargha Mukherjee | 5bd41f9 | 2020-10-04 11:06:11 -0700 | [diff] [blame] | 1229 | #endif // !CONFIG_REMOVE_DIST_WTD_COMP |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1230 | AVERAGE_CDF(ctx_left->comp_group_idx_cdf, ctx_tr->comp_group_idx_cdf, 2); |
| 1231 | AVERAGE_CDF(ctx_left->skip_mode_cdfs, ctx_tr->skip_mode_cdfs, 2); |
| 1232 | AVERAGE_CDF(ctx_left->skip_txfm_cdfs, ctx_tr->skip_txfm_cdfs, 2); |
| 1233 | AVERAGE_CDF(ctx_left->intra_inter_cdf, ctx_tr->intra_inter_cdf, 2); |
| 1234 | avg_nmv(&ctx_left->nmvc, &ctx_tr->nmvc, wt_left, wt_tr); |
| 1235 | avg_nmv(&ctx_left->ndvc, &ctx_tr->ndvc, wt_left, wt_tr); |
| 1236 | AVERAGE_CDF(ctx_left->intrabc_cdf, ctx_tr->intrabc_cdf, 2); |
| 1237 | AVERAGE_CDF(ctx_left->seg.tree_cdf, ctx_tr->seg.tree_cdf, MAX_SEGMENTS); |
| 1238 | AVERAGE_CDF(ctx_left->seg.pred_cdf, ctx_tr->seg.pred_cdf, 2); |
| 1239 | AVERAGE_CDF(ctx_left->seg.spatial_pred_seg_cdf, |
| 1240 | ctx_tr->seg.spatial_pred_seg_cdf, MAX_SEGMENTS); |
| 1241 | AVERAGE_CDF(ctx_left->filter_intra_cdfs, ctx_tr->filter_intra_cdfs, 2); |
| 1242 | AVERAGE_CDF(ctx_left->filter_intra_mode_cdf, ctx_tr->filter_intra_mode_cdf, |
| 1243 | FILTER_INTRA_MODES); |
| 1244 | AVERAGE_CDF(ctx_left->switchable_restore_cdf, ctx_tr->switchable_restore_cdf, |
| 1245 | RESTORE_SWITCHABLE_TYPES); |
| 1246 | AVERAGE_CDF(ctx_left->wiener_restore_cdf, ctx_tr->wiener_restore_cdf, 2); |
| 1247 | AVERAGE_CDF(ctx_left->sgrproj_restore_cdf, ctx_tr->sgrproj_restore_cdf, 2); |
| 1248 | AVERAGE_CDF(ctx_left->y_mode_cdf, ctx_tr->y_mode_cdf, INTRA_MODES); |
| 1249 | AVG_CDF_STRIDE(ctx_left->uv_mode_cdf[0], ctx_tr->uv_mode_cdf[0], |
| 1250 | UV_INTRA_MODES - 1, CDF_SIZE(UV_INTRA_MODES)); |
| 1251 | AVERAGE_CDF(ctx_left->uv_mode_cdf[1], ctx_tr->uv_mode_cdf[1], UV_INTRA_MODES); |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 1252 | #if CONFIG_SDP |
| 1253 | for (int plane_index = 0; plane_index < PARTITION_STRUCTURE_NUM; |
| 1254 | plane_index++) { |
| 1255 | for (int i = 0; i < PARTITION_CONTEXTS; i++) { |
| 1256 | if (i < 4) { |
| 1257 | AVG_CDF_STRIDE(ctx_left->partition_cdf[plane_index][i], |
| 1258 | ctx_tr->partition_cdf[plane_index][i], 4, CDF_SIZE(10)); |
| 1259 | } else if (i < 16) { |
| 1260 | AVERAGE_CDF(ctx_left->partition_cdf[plane_index][i], |
| 1261 | ctx_tr->partition_cdf[plane_index][i], 10); |
| 1262 | } else { |
| 1263 | AVG_CDF_STRIDE(ctx_left->partition_cdf[plane_index][i], |
| 1264 | ctx_tr->partition_cdf[plane_index][i], 8, CDF_SIZE(10)); |
| 1265 | } |
| 1266 | } |
| 1267 | } |
| 1268 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1269 | for (int i = 0; i < PARTITION_CONTEXTS; i++) { |
| 1270 | if (i < 4) { |
| 1271 | AVG_CDF_STRIDE(ctx_left->partition_cdf[i], ctx_tr->partition_cdf[i], 4, |
| 1272 | CDF_SIZE(10)); |
| 1273 | } else if (i < 16) { |
| 1274 | AVERAGE_CDF(ctx_left->partition_cdf[i], ctx_tr->partition_cdf[i], 10); |
| 1275 | } else { |
| 1276 | AVG_CDF_STRIDE(ctx_left->partition_cdf[i], ctx_tr->partition_cdf[i], 8, |
| 1277 | CDF_SIZE(10)); |
| 1278 | } |
| 1279 | } |
liang zhao | c6f775a | 2020-12-17 11:54:58 -0800 | [diff] [blame] | 1280 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1281 | AVERAGE_CDF(ctx_left->switchable_interp_cdf, ctx_tr->switchable_interp_cdf, |
| 1282 | SWITCHABLE_FILTERS); |
| 1283 | AVERAGE_CDF(ctx_left->kf_y_cdf, ctx_tr->kf_y_cdf, INTRA_MODES); |
| 1284 | AVERAGE_CDF(ctx_left->angle_delta_cdf, ctx_tr->angle_delta_cdf, |
| 1285 | 2 * MAX_ANGLE_DELTA + 1); |
| 1286 | AVG_CDF_STRIDE(ctx_left->tx_size_cdf[0], ctx_tr->tx_size_cdf[0], MAX_TX_DEPTH, |
| 1287 | CDF_SIZE(MAX_TX_DEPTH + 1)); |
| 1288 | AVERAGE_CDF(ctx_left->tx_size_cdf[1], ctx_tr->tx_size_cdf[1], |
| 1289 | MAX_TX_DEPTH + 1); |
| 1290 | AVERAGE_CDF(ctx_left->tx_size_cdf[2], ctx_tr->tx_size_cdf[2], |
| 1291 | MAX_TX_DEPTH + 1); |
| 1292 | AVERAGE_CDF(ctx_left->tx_size_cdf[3], ctx_tr->tx_size_cdf[3], |
| 1293 | MAX_TX_DEPTH + 1); |
| 1294 | AVERAGE_CDF(ctx_left->delta_q_cdf, ctx_tr->delta_q_cdf, DELTA_Q_PROBS + 1); |
| 1295 | AVERAGE_CDF(ctx_left->delta_lf_cdf, ctx_tr->delta_lf_cdf, DELTA_LF_PROBS + 1); |
| 1296 | for (int i = 0; i < FRAME_LF_COUNT; i++) { |
| 1297 | AVERAGE_CDF(ctx_left->delta_lf_multi_cdf[i], ctx_tr->delta_lf_multi_cdf[i], |
| 1298 | DELTA_LF_PROBS + 1); |
| 1299 | } |
| 1300 | AVG_CDF_STRIDE(ctx_left->intra_ext_tx_cdf[1], ctx_tr->intra_ext_tx_cdf[1], 7, |
| 1301 | CDF_SIZE(TX_TYPES)); |
| 1302 | AVG_CDF_STRIDE(ctx_left->intra_ext_tx_cdf[2], ctx_tr->intra_ext_tx_cdf[2], 5, |
| 1303 | CDF_SIZE(TX_TYPES)); |
| 1304 | AVG_CDF_STRIDE(ctx_left->inter_ext_tx_cdf[1], ctx_tr->inter_ext_tx_cdf[1], 16, |
| 1305 | CDF_SIZE(TX_TYPES)); |
| 1306 | AVG_CDF_STRIDE(ctx_left->inter_ext_tx_cdf[2], ctx_tr->inter_ext_tx_cdf[2], 12, |
| 1307 | CDF_SIZE(TX_TYPES)); |
| 1308 | AVG_CDF_STRIDE(ctx_left->inter_ext_tx_cdf[3], ctx_tr->inter_ext_tx_cdf[3], 2, |
| 1309 | CDF_SIZE(TX_TYPES)); |
| 1310 | AVERAGE_CDF(ctx_left->cfl_sign_cdf, ctx_tr->cfl_sign_cdf, CFL_JOINT_SIGNS); |
| 1311 | AVERAGE_CDF(ctx_left->cfl_alpha_cdf, ctx_tr->cfl_alpha_cdf, |
| 1312 | CFL_ALPHABET_SIZE); |
| 1313 | } |
| 1314 | |
| 1315 | // Grade the temporal variation of the source by comparing the current sb and |
| 1316 | // its collocated block in the last frame. |
| 1317 | void av1_source_content_sb(AV1_COMP *cpi, MACROBLOCK *x, int offset) { |
| 1318 | unsigned int tmp_sse; |
| 1319 | unsigned int tmp_variance; |
| 1320 | const BLOCK_SIZE bsize = cpi->common.seq_params.sb_size; |
| 1321 | uint8_t *src_y = cpi->source->y_buffer; |
| 1322 | int src_ystride = cpi->source->y_stride; |
| 1323 | uint8_t *last_src_y = cpi->last_source->y_buffer; |
| 1324 | int last_src_ystride = cpi->last_source->y_stride; |
| 1325 | uint64_t avg_source_sse_threshold = 100000; // ~5*5*(64*64) |
| 1326 | uint64_t avg_source_sse_threshold_high = 1000000; // ~15*15*(64*64) |
| 1327 | uint64_t sum_sq_thresh = 10000; // sum = sqrt(thresh / 64*64)) ~1.5 |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1328 | MACROBLOCKD *xd = &x->e_mbd; |
| 1329 | if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) return; |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1330 | src_y += offset; |
| 1331 | last_src_y += offset; |
| 1332 | tmp_variance = cpi->fn_ptr[bsize].vf(src_y, src_ystride, last_src_y, |
| 1333 | last_src_ystride, &tmp_sse); |
| 1334 | // Note: tmp_sse - tmp_variance = ((sum * sum) >> 12) |
| 1335 | // Detect large lighting change. |
| 1336 | if (tmp_variance < (tmp_sse >> 1) && (tmp_sse - tmp_variance) > sum_sq_thresh) |
| 1337 | x->content_state_sb = kLowVarHighSumdiff; |
| 1338 | else if (tmp_sse < avg_source_sse_threshold) |
| 1339 | x->content_state_sb = kLowSad; |
| 1340 | else if (tmp_sse > avg_source_sse_threshold_high) |
| 1341 | x->content_state_sb = kHighSad; |
| 1342 | } |
| 1343 | |
| 1344 | // Memset the mbmis at the current superblock to 0 |
| 1345 | void av1_reset_mbmi(CommonModeInfoParams *const mi_params, BLOCK_SIZE sb_size, |
| 1346 | int mi_row, int mi_col) { |
| 1347 | // size of sb in unit of mi (BLOCK_4X4) |
| 1348 | const int sb_size_mi = mi_size_wide[sb_size]; |
| 1349 | const int mi_alloc_size_1d = mi_size_wide[mi_params->mi_alloc_bsize]; |
| 1350 | // size of sb in unit of allocated mi size |
| 1351 | const int sb_size_alloc_mi = mi_size_wide[sb_size] / mi_alloc_size_1d; |
| 1352 | assert(mi_params->mi_alloc_stride % sb_size_alloc_mi == 0 && |
| 1353 | "mi is not allocated as a multiple of sb!"); |
| 1354 | assert(mi_params->mi_stride % sb_size_mi == 0 && |
| 1355 | "mi_grid_base is not allocated as a multiple of sb!"); |
| 1356 | |
| 1357 | const int mi_rows = mi_size_high[sb_size]; |
| 1358 | for (int cur_mi_row = 0; cur_mi_row < mi_rows; cur_mi_row++) { |
| 1359 | assert(get_mi_grid_idx(mi_params, 0, mi_col + mi_alloc_size_1d) < |
| 1360 | mi_params->mi_stride); |
| 1361 | const int mi_grid_idx = |
| 1362 | get_mi_grid_idx(mi_params, mi_row + cur_mi_row, mi_col); |
| 1363 | const int alloc_mi_idx = |
| 1364 | get_alloc_mi_idx(mi_params, mi_row + cur_mi_row, mi_col); |
| 1365 | memset(&mi_params->mi_grid_base[mi_grid_idx], 0, |
| 1366 | sb_size_mi * sizeof(*mi_params->mi_grid_base)); |
| 1367 | memset(&mi_params->tx_type_map[mi_grid_idx], 0, |
| 1368 | sb_size_mi * sizeof(*mi_params->tx_type_map)); |
| 1369 | if (cur_mi_row % mi_alloc_size_1d == 0) { |
| 1370 | memset(&mi_params->mi_alloc[alloc_mi_idx], 0, |
| 1371 | sb_size_alloc_mi * sizeof(*mi_params->mi_alloc)); |
| 1372 | } |
| 1373 | } |
| 1374 | } |
| 1375 | |
| 1376 | void av1_backup_sb_state(SB_FIRST_PASS_STATS *sb_fp_stats, const AV1_COMP *cpi, |
| 1377 | ThreadData *td, const TileDataEnc *tile_data, |
| 1378 | int mi_row, int mi_col) { |
| 1379 | MACROBLOCK *x = &td->mb; |
| 1380 | MACROBLOCKD *xd = &x->e_mbd; |
| 1381 | const TileInfo *tile_info = &tile_data->tile_info; |
| 1382 | |
| 1383 | const AV1_COMMON *cm = &cpi->common; |
| 1384 | const int num_planes = av1_num_planes(cm); |
| 1385 | const BLOCK_SIZE sb_size = cm->seq_params.sb_size; |
| 1386 | |
| 1387 | xd->above_txfm_context = |
| 1388 | cm->above_contexts.txfm[tile_info->tile_row] + mi_col; |
| 1389 | xd->left_txfm_context = |
| 1390 | xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK); |
| 1391 | av1_save_context(x, &sb_fp_stats->x_ctx, mi_row, mi_col, sb_size, num_planes); |
| 1392 | |
| 1393 | sb_fp_stats->rd_count = cpi->td.rd_counts; |
| 1394 | sb_fp_stats->split_count = x->txfm_search_info.txb_split_count; |
| 1395 | |
| 1396 | sb_fp_stats->fc = *td->counts; |
| 1397 | |
| 1398 | memcpy(sb_fp_stats->inter_mode_rd_models, tile_data->inter_mode_rd_models, |
| 1399 | sizeof(sb_fp_stats->inter_mode_rd_models)); |
| 1400 | |
| 1401 | memcpy(sb_fp_stats->thresh_freq_fact, x->thresh_freq_fact, |
| 1402 | sizeof(sb_fp_stats->thresh_freq_fact)); |
| 1403 | |
| 1404 | const int alloc_mi_idx = get_alloc_mi_idx(&cm->mi_params, mi_row, mi_col); |
| 1405 | sb_fp_stats->current_qindex = |
| 1406 | cm->mi_params.mi_alloc[alloc_mi_idx].current_qindex; |
| 1407 | |
| 1408 | #if CONFIG_INTERNAL_STATS |
| 1409 | memcpy(sb_fp_stats->mode_chosen_counts, cpi->mode_chosen_counts, |
| 1410 | sizeof(sb_fp_stats->mode_chosen_counts)); |
| 1411 | #endif // CONFIG_INTERNAL_STATS |
| 1412 | } |
| 1413 | |
| 1414 | void av1_restore_sb_state(const SB_FIRST_PASS_STATS *sb_fp_stats, AV1_COMP *cpi, |
| 1415 | ThreadData *td, TileDataEnc *tile_data, int mi_row, |
| 1416 | int mi_col) { |
| 1417 | MACROBLOCK *x = &td->mb; |
| 1418 | |
| 1419 | const AV1_COMMON *cm = &cpi->common; |
| 1420 | const int num_planes = av1_num_planes(cm); |
| 1421 | const BLOCK_SIZE sb_size = cm->seq_params.sb_size; |
| 1422 | |
| 1423 | av1_restore_context(x, &sb_fp_stats->x_ctx, mi_row, mi_col, sb_size, |
| 1424 | num_planes); |
| 1425 | |
| 1426 | cpi->td.rd_counts = sb_fp_stats->rd_count; |
| 1427 | x->txfm_search_info.txb_split_count = sb_fp_stats->split_count; |
| 1428 | |
| 1429 | *td->counts = sb_fp_stats->fc; |
| 1430 | |
| 1431 | memcpy(tile_data->inter_mode_rd_models, sb_fp_stats->inter_mode_rd_models, |
| 1432 | sizeof(sb_fp_stats->inter_mode_rd_models)); |
| 1433 | memcpy(x->thresh_freq_fact, sb_fp_stats->thresh_freq_fact, |
| 1434 | sizeof(sb_fp_stats->thresh_freq_fact)); |
| 1435 | |
| 1436 | const int alloc_mi_idx = get_alloc_mi_idx(&cm->mi_params, mi_row, mi_col); |
| 1437 | cm->mi_params.mi_alloc[alloc_mi_idx].current_qindex = |
| 1438 | sb_fp_stats->current_qindex; |
| 1439 | |
| 1440 | #if CONFIG_INTERNAL_STATS |
| 1441 | memcpy(cpi->mode_chosen_counts, sb_fp_stats->mode_chosen_counts, |
| 1442 | sizeof(sb_fp_stats->mode_chosen_counts)); |
| 1443 | #endif // CONFIG_INTERNAL_STATS |
| 1444 | } |
| 1445 | |
| 1446 | // Update the rate costs of some symbols according to the frequency directed |
| 1447 | // by speed features |
| 1448 | void av1_set_cost_upd_freq(AV1_COMP *cpi, ThreadData *td, |
| 1449 | const TileInfo *const tile_info, const int mi_row, |
| 1450 | const int mi_col) { |
| 1451 | AV1_COMMON *const cm = &cpi->common; |
| 1452 | const int num_planes = av1_num_planes(cm); |
| 1453 | MACROBLOCK *const x = &td->mb; |
| 1454 | MACROBLOCKD *const xd = &x->e_mbd; |
| 1455 | |
| 1456 | switch (cpi->oxcf.cost_upd_freq.coeff) { |
| 1457 | case COST_UPD_TILE: // Tile level |
| 1458 | if (mi_row != tile_info->mi_row_start) break; |
| 1459 | AOM_FALLTHROUGH_INTENDED; |
| 1460 | case COST_UPD_SBROW: // SB row level in tile |
| 1461 | if (mi_col != tile_info->mi_col_start) break; |
| 1462 | AOM_FALLTHROUGH_INTENDED; |
| 1463 | case COST_UPD_SB: // SB level |
| 1464 | if (cpi->sf.inter_sf.disable_sb_level_coeff_cost_upd && |
| 1465 | mi_col != tile_info->mi_col_start) |
| 1466 | break; |
| 1467 | av1_fill_coeff_costs(&x->coeff_costs, xd->tile_ctx, num_planes); |
| 1468 | break; |
| 1469 | default: assert(0); |
| 1470 | } |
| 1471 | |
| 1472 | switch (cpi->oxcf.cost_upd_freq.mode) { |
| 1473 | case COST_UPD_TILE: // Tile level |
| 1474 | if (mi_row != tile_info->mi_row_start) break; |
| 1475 | AOM_FALLTHROUGH_INTENDED; |
| 1476 | case COST_UPD_SBROW: // SB row level in tile |
| 1477 | if (mi_col != tile_info->mi_col_start) break; |
| 1478 | AOM_FALLTHROUGH_INTENDED; |
| 1479 | case COST_UPD_SB: // SB level |
leolzhao | 3db7cca | 2021-01-26 16:53:07 -0800 | [diff] [blame] | 1480 | #if CONFIG_SDP |
| 1481 | av1_fill_mode_rates(cm, xd, &x->mode_costs, xd->tile_ctx); |
| 1482 | #else |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1483 | av1_fill_mode_rates(cm, &x->mode_costs, xd->tile_ctx); |
leolzhao | 3db7cca | 2021-01-26 16:53:07 -0800 | [diff] [blame] | 1484 | #endif |
Jayasanker J | e9ad475 | 2020-06-30 19:30:03 +0530 | [diff] [blame] | 1485 | break; |
| 1486 | default: assert(0); |
| 1487 | } |
| 1488 | switch (cpi->oxcf.cost_upd_freq.mv) { |
| 1489 | case COST_UPD_OFF: break; |
| 1490 | case COST_UPD_TILE: // Tile level |
| 1491 | if (mi_row != tile_info->mi_row_start) break; |
| 1492 | AOM_FALLTHROUGH_INTENDED; |
| 1493 | case COST_UPD_SBROW: // SB row level in tile |
| 1494 | if (mi_col != tile_info->mi_col_start) break; |
| 1495 | AOM_FALLTHROUGH_INTENDED; |
| 1496 | case COST_UPD_SB: // SB level |
| 1497 | if (cpi->sf.inter_sf.disable_sb_level_mv_cost_upd && |
| 1498 | mi_col != tile_info->mi_col_start) |
| 1499 | break; |
| 1500 | av1_fill_mv_costs(xd->tile_ctx, cm->features.cur_frame_force_integer_mv, |
| 1501 | cm->features.allow_high_precision_mv, &x->mv_costs); |
| 1502 | break; |
| 1503 | default: assert(0); |
| 1504 | } |
| 1505 | } |