blob: 219784fedfa69fc743d1e3d3e96641c3d1288769 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070017#include "aom_dsp/aom_dsp_common.h"
Debargha Mukherjee47748b52017-03-24 12:20:49 -070018#include "aom_dsp/binary_codes_writer.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070019#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070020#include "aom_mem/aom_mem.h"
Wan-Teh Changccdd87f2018-08-08 18:40:29 -070021#include "aom_ports/bitops.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070022#include "aom_ports/mem_ops.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070023#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070025#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070026
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010027#include "av1/common/cdef.h"
Luc Trudeaud183b642017-11-28 11:42:37 -050028#include "av1/common/cfl.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070029#include "av1/common/entropy.h"
30#include "av1/common/entropymode.h"
31#include "av1/common/entropymv.h"
32#include "av1/common/mvref_common.h"
33#include "av1/common/pred_common.h"
34#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080035#include "av1/common/reconintra.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070036#include "av1/common/seg_common.h"
37#include "av1/common/tile_common.h"
38
Yaowu Xuc27fc142016-08-22 16:08:15 -070039#include "av1/encoder/bitstream.h"
40#include "av1/encoder/cost.h"
41#include "av1/encoder/encodemv.h"
Hui Suec73b442018-01-04 12:47:53 -080042#include "av1/encoder/encodetxb.h"
Cherma Rajan Ace0c4232021-04-23 21:29:51 +053043#include "av1/encoder/ethread.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070044#include "av1/encoder/mcomp.h"
hui sud13c24a2017-04-07 16:13:07 -070045#include "av1/encoder/palette.h"
Rachel Barkerfe93d332023-07-10 10:01:15 +000046#include "av1/encoder/pickrst.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#include "av1/encoder/segmentation.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070048#include "av1/encoder/tokenize.h"
49
Di Chen56586622017-06-09 13:49:44 -070050#define ENC_MISMATCH_DEBUG 0
Cherma Rajan A248e8d32021-06-15 09:06:38 +053051#define SETUP_TIME_OH_CONST 5 // Setup time overhead constant per worker
52#define JOB_DISP_TIME_OH_CONST 1 // Job dispatch time overhead per tile
Zoe Liu85b66462017-04-20 14:28:19 -070053
Yaowu Xuf883b422016-08-30 14:01:10 -070054static INLINE void write_uniform(aom_writer *w, int n, int v) {
hui su37499292017-04-26 09:49:53 -070055 const int l = get_unsigned_bits(n);
56 const int m = (1 << l) - n;
Yaowu Xuc27fc142016-08-22 16:08:15 -070057 if (l == 0) return;
58 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -070059 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070060 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -070061 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
62 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070063 }
64}
65
Jerome Jiang3dd9df92020-10-29 16:42:33 -070066#if !CONFIG_REALTIME_ONLY
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -070067static AOM_INLINE void loop_restoration_write_sb_coeffs(
Rachel Barkerfe93d332023-07-10 10:01:15 +000068 const AV1_COMMON *const cm, MACROBLOCKD *xd, int runit_idx,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -070069 aom_writer *const w, int plane, FRAME_COUNTS *counts);
Jerome Jiang3dd9df92020-10-29 16:42:33 -070070#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070071
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -070072static AOM_INLINE void write_intra_y_mode_kf(FRAME_CONTEXT *frame_ctx,
73 const MB_MODE_INFO *mi,
74 const MB_MODE_INFO *above_mi,
75 const MB_MODE_INFO *left_mi,
76 PREDICTION_MODE mode,
77 aom_writer *w) {
Yue Chen53b53f02018-03-29 14:31:23 -070078 assert(!is_intrabc_block(mi));
Jingning Han9010e202017-12-14 14:48:09 -080079 (void)mi;
Jingning Han9010e202017-12-14 14:48:09 -080080 aom_write_symbol(w, mode, get_y_mode_cdf(frame_ctx, above_mi, left_mi),
Jingning Hanf04254f2017-03-08 10:51:35 -080081 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -080082}
Yaowu Xuc27fc142016-08-22 16:08:15 -070083
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -070084static AOM_INLINE void write_inter_mode(aom_writer *w, PREDICTION_MODE mode,
85 FRAME_CONTEXT *ec_ctx,
86 const int16_t mode_ctx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070087 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Yaowu Xuc27fc142016-08-22 16:08:15 -070088
Thomas Davies149eda52017-06-12 18:11:55 +010089 aom_write_symbol(w, mode != NEWMV, ec_ctx->newmv_cdf[newmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -070090
Jingning Hanf2b87bd2017-05-18 16:27:30 -070091 if (mode != NEWMV) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -070092 const int16_t zeromv_ctx =
93 (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Sarah Parker2b9ec2e2017-10-30 17:34:08 -070094 aom_write_symbol(w, mode != GLOBALMV, ec_ctx->zeromv_cdf[zeromv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -070095
Sarah Parker2b9ec2e2017-10-30 17:34:08 -070096 if (mode != GLOBALMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070097 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Thomas Davies149eda52017-06-12 18:11:55 +010098 aom_write_symbol(w, mode != NEARESTMV, ec_ctx->refmv_cdf[refmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -070099 }
100 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700101}
102
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700103static AOM_INLINE void write_drl_idx(
104 FRAME_CONTEXT *ec_ctx, const MB_MODE_INFO *mbmi,
105 const MB_MODE_INFO_EXT_FRAME *mbmi_ext_frame, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700106 assert(mbmi->ref_mv_idx < 3);
107
Sebastien Alaiwan34d55662017-11-15 09:36:03 +0100108 const int new_mv = mbmi->mode == NEWMV || mbmi->mode == NEW_NEWMV;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000109 if (new_mv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700110 int idx;
111 for (idx = 0; idx < 2; ++idx) {
Remya0cce44c2019-08-16 11:57:24 +0530112 if (mbmi_ext_frame->ref_mv_count > idx + 1) {
113 uint8_t drl_ctx = av1_drl_ctx(mbmi_ext_frame->weight, idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700114
Thomas Davies149eda52017-06-12 18:11:55 +0100115 aom_write_symbol(w, mbmi->ref_mv_idx != idx, ec_ctx->drl_cdf[drl_ctx],
116 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700117 if (mbmi->ref_mv_idx == idx) return;
118 }
119 }
120 return;
121 }
122
David Barker3dfba992017-04-03 16:10:09 +0100123 if (have_nearmv_in_inter_mode(mbmi->mode)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700124 int idx;
125 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
126 for (idx = 1; idx < 3; ++idx) {
Remya0cce44c2019-08-16 11:57:24 +0530127 if (mbmi_ext_frame->ref_mv_count > idx + 1) {
128 uint8_t drl_ctx = av1_drl_ctx(mbmi_ext_frame->weight, idx);
Thomas Davies149eda52017-06-12 18:11:55 +0100129 aom_write_symbol(w, mbmi->ref_mv_idx != (idx - 1),
130 ec_ctx->drl_cdf[drl_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700131 if (mbmi->ref_mv_idx == (idx - 1)) return;
132 }
133 }
134 return;
135 }
136}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700137
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700138static AOM_INLINE void write_inter_compound_mode(MACROBLOCKD *xd, aom_writer *w,
139 PREDICTION_MODE mode,
140 const int16_t mode_ctx) {
Thomas Davies8c08a332017-06-26 17:30:34 +0100141 assert(is_inter_compound_mode(mode));
Thomas Davies8c08a332017-06-26 17:30:34 +0100142 aom_write_symbol(w, INTER_COMPOUND_OFFSET(mode),
143 xd->tile_ctx->inter_compound_mode_cdf[mode_ctx],
144 INTER_COMPOUND_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700145}
Zoe Liu85b66462017-04-20 14:28:19 -0700146
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700147static AOM_INLINE void write_tx_size_vartx(MACROBLOCKD *xd,
148 const MB_MODE_INFO *mbmi,
149 TX_SIZE tx_size, int depth,
150 int blk_row, int blk_col,
151 aom_writer *w) {
David Turner760a2f42018-12-07 15:25:36 +0000152 FRAME_CONTEXT *const ec_ctx = xd->tile_ctx;
chiyotsai0f5cd052020-08-27 14:37:44 -0700153 const int max_blocks_high = max_block_high(xd, mbmi->bsize, 0);
154 const int max_blocks_wide = max_block_wide(xd, mbmi->bsize, 0);
Jingning Hanf65b8702016-10-31 12:13:20 -0700155
Yaowu Xuc27fc142016-08-22 16:08:15 -0700156 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
157
Jingning Han571189c2016-10-24 10:38:43 -0700158 if (depth == MAX_VARTX_DEPTH) {
Jingning Han331662e2017-05-30 17:03:32 -0700159 txfm_partition_update(xd->above_txfm_context + blk_col,
160 xd->left_txfm_context + blk_row, tx_size, tx_size);
Jingning Han94d5bfc2016-10-21 10:14:36 -0700161 return;
162 }
163
Hui Su7167d952018-02-01 16:33:12 -0800164 const int ctx = txfm_partition_context(xd->above_txfm_context + blk_col,
165 xd->left_txfm_context + blk_row,
chiyotsai0f5cd052020-08-27 14:37:44 -0700166 mbmi->bsize, tx_size);
Hui Su7167d952018-02-01 16:33:12 -0800167 const int txb_size_index =
chiyotsai0f5cd052020-08-27 14:37:44 -0700168 av1_get_txb_size_index(mbmi->bsize, blk_row, blk_col);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000169 const int write_txfm_partition =
Hui Su7167d952018-02-01 16:33:12 -0800170 tx_size == mbmi->inter_tx_size[txb_size_index];
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000171 if (write_txfm_partition) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100172 aom_write_symbol(w, 0, ec_ctx->txfm_partition_cdf[ctx], 2);
Thomas Davies985bfc32017-06-27 16:51:26 +0100173
Jingning Han331662e2017-05-30 17:03:32 -0700174 txfm_partition_update(xd->above_txfm_context + blk_col,
175 xd->left_txfm_context + blk_row, tx_size, tx_size);
Yue Chend6bdd462017-07-19 16:05:43 -0700176 // TODO(yuec): set correct txfm partition update for qttx
Yaowu Xuc27fc142016-08-22 16:08:15 -0700177 } else {
Frederic Barbier4b56b102018-03-30 16:09:34 +0200178 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700179 const int bsw = tx_size_wide_unit[sub_txs];
180 const int bsh = tx_size_high_unit[sub_txs];
Jingning Hanf64062f2016-11-02 16:22:18 -0700181
Thomas Davies985bfc32017-06-27 16:51:26 +0100182 aom_write_symbol(w, 1, ec_ctx->txfm_partition_cdf[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700183
David Barker16c64e32017-08-23 16:54:59 +0100184 if (sub_txs == TX_4X4) {
Jingning Han331662e2017-05-30 17:03:32 -0700185 txfm_partition_update(xd->above_txfm_context + blk_col,
186 xd->left_txfm_context + blk_row, sub_txs, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700187 return;
188 }
189
Yue Chen0797a202017-10-27 17:24:56 -0700190 assert(bsw > 0 && bsh > 0);
Wan-Teh Chang0cb03d92021-03-05 08:27:21 -0800191 for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh) {
192 const int offsetr = blk_row + row;
Yue Chen0797a202017-10-27 17:24:56 -0700193 for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
Wan-Teh Chang0cb03d92021-03-05 08:27:21 -0800194 const int offsetc = blk_col + col;
Yaowu Xu6567c4a2018-03-19 21:39:31 -0700195 write_tx_size_vartx(xd, mbmi, sub_txs, depth + 1, offsetr, offsetc, w);
Yue Chen0797a202017-10-27 17:24:56 -0700196 }
Wan-Teh Chang0cb03d92021-03-05 08:27:21 -0800197 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700198 }
199}
200
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700201static AOM_INLINE void write_selected_tx_size(const MACROBLOCKD *xd,
202 aom_writer *w) {
Yue Chen53b53f02018-03-29 14:31:23 -0700203 const MB_MODE_INFO *const mbmi = xd->mi[0];
chiyotsai0f5cd052020-08-27 14:37:44 -0700204 const BLOCK_SIZE bsize = mbmi->bsize;
Thomas Davies15580c52017-03-09 13:53:42 +0000205 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Rupert Swarbrickfcff0b22017-10-05 09:26:04 +0100206 if (block_signals_txsize(bsize)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207 const TX_SIZE tx_size = mbmi->tx_size;
Yaowu Xu25ff26a2018-02-26 11:20:10 -0800208 const int tx_size_ctx = get_tx_size_context(xd);
Frederic Barbier4b56b102018-03-30 16:09:34 +0200209 const int depth = tx_size_to_depth(tx_size, bsize);
210 const int max_depths = bsize_to_max_depth(bsize);
211 const int32_t tx_size_cat = bsize_to_tx_size_cat(bsize);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800212
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800213 assert(depth >= 0 && depth <= max_depths);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800214 assert(!is_inter_block(mbmi));
Yue Chen49587a72016-09-28 17:09:47 -0700215 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700216
Thomas Davies15580c52017-03-09 13:53:42 +0000217 aom_write_symbol(w, depth, ec_ctx->tx_size_cdf[tx_size_cat][tx_size_ctx],
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800218 max_depths + 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 }
220}
221
Yaowu Xuf883b422016-08-30 14:01:10 -0700222static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
chiyotsaid730cef2022-10-26 13:58:20 -0700223 uint8_t segment_id, const MB_MODE_INFO *mi,
224 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700225 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
226 return 1;
227 } else {
chiyotsai8c004e12020-04-17 15:52:08 -0700228 const int skip_txfm = mi->skip_txfm;
229 const int ctx = av1_get_skip_txfm_context(xd);
Thomas Davies61e3e372017-04-04 16:10:23 +0100230 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
chiyotsai8c004e12020-04-17 15:52:08 -0700231 aom_write_symbol(w, skip_txfm, ec_ctx->skip_txfm_cdfs[ctx], 2);
232 return skip_txfm;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700233 }
234}
235
Zoe Liuf40a9572017-10-13 12:37:19 -0700236static int write_skip_mode(const AV1_COMMON *cm, const MACROBLOCKD *xd,
chiyotsaid730cef2022-10-26 13:58:20 -0700237 uint8_t segment_id, const MB_MODE_INFO *mi,
Yue Chen53b53f02018-03-29 14:31:23 -0700238 aom_writer *w) {
David Turnerd2a592e2018-11-16 14:59:31 +0000239 if (!cm->current_frame.skip_mode_info.skip_mode_flag) return 0;
Zoe Liuf40a9572017-10-13 12:37:19 -0700240 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
241 return 0;
242 }
Yue Chen53b53f02018-03-29 14:31:23 -0700243 const int skip_mode = mi->skip_mode;
chiyotsai0f5cd052020-08-27 14:37:44 -0700244 if (!is_comp_ref_allowed(mi->bsize)) {
Zoe Liuf40a9572017-10-13 12:37:19 -0700245 assert(!skip_mode);
246 return 0;
247 }
Urvang Joshi6cd95d22018-05-15 14:14:58 -0400248 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME) ||
249 segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV)) {
250 // These features imply single-reference mode, while skip mode implies
251 // compound reference. Hence, the two are mutually exclusive.
252 // In other words, skip_mode is implicitly 0 here.
Urvang Joshie5ae2832018-05-11 13:47:00 -0700253 assert(!skip_mode);
254 return 0;
255 }
Zoe Liuf40a9572017-10-13 12:37:19 -0700256 const int ctx = av1_get_skip_mode_context(xd);
257 aom_write_symbol(w, skip_mode, xd->tile_ctx->skip_mode_cdfs[ctx], 2);
258 return skip_mode;
259}
Zoe Liuf40a9572017-10-13 12:37:19 -0700260
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700261static AOM_INLINE void write_is_inter(const AV1_COMMON *cm,
chiyotsaid730cef2022-10-26 13:58:20 -0700262 const MACROBLOCKD *xd, uint8_t segment_id,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700263 aom_writer *w, const int is_inter) {
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100264 if (!segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
Debargha Mukherjee27e55a62018-04-17 07:43:04 -0700265 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV)) {
266 assert(is_inter);
267 return;
268 }
Yue Chen170678a2017-10-17 13:43:10 -0700269 const int ctx = av1_get_intra_inter_context(xd);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100270 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100271 aom_write_symbol(w, is_inter, ec_ctx->intra_inter_cdf[ctx], 2);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100272 }
273}
274
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700275static AOM_INLINE void write_motion_mode(const AV1_COMMON *cm, MACROBLOCKD *xd,
276 const MB_MODE_INFO *mbmi,
277 aom_writer *w) {
Sebastien Alaiwan48795802017-10-30 12:07:13 +0100278 MOTION_MODE last_motion_mode_allowed =
Urvang Joshi6237b882020-03-26 15:02:26 -0700279 cm->features.switchable_motion_mode
Yue Chen53b53f02018-03-29 14:31:23 -0700280 ? motion_mode_allowed(cm->global_motion, xd, mbmi,
Urvang Joshib6409e92020-03-23 11:23:27 -0700281 cm->features.allow_warped_motion)
Yue Chen5380cb52018-02-23 15:33:21 -0800282 : SIMPLE_TRANSLATION;
283 assert(mbmi->motion_mode <= last_motion_mode_allowed);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000284 switch (last_motion_mode_allowed) {
285 case SIMPLE_TRANSLATION: break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000286 case OBMC_CAUSAL:
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000287 aom_write_symbol(w, mbmi->motion_mode == OBMC_CAUSAL,
chiyotsai0f5cd052020-08-27 14:37:44 -0700288 xd->tile_ctx->obmc_cdf[mbmi->bsize], 2);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000289 break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000290 default:
291 aom_write_symbol(w, mbmi->motion_mode,
chiyotsai0f5cd052020-08-27 14:37:44 -0700292 xd->tile_ctx->motion_mode_cdf[mbmi->bsize],
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000293 MOTION_MODES);
Yue Chen69f18e12016-09-08 14:48:15 -0700294 }
Yue Chen69f18e12016-09-08 14:48:15 -0700295}
Wei-Ting Lin85a8f702017-06-22 13:55:15 -0700296
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700297static AOM_INLINE void write_delta_qindex(const MACROBLOCKD *xd,
298 int delta_qindex, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +0200299 int sign = delta_qindex < 0;
300 int abs = sign ? -delta_qindex : delta_qindex;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000301 int rem_bits, thr;
Thomas Daviesf6936102016-09-05 16:51:31 +0100302 int smallval = abs < DELTA_Q_SMALL ? 1 : 0;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000303 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviesf6936102016-09-05 16:51:31 +0100304
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000305 aom_write_symbol(w, AOMMIN(abs, DELTA_Q_SMALL), ec_ctx->delta_q_cdf,
306 DELTA_Q_PROBS + 1);
Thomas Daviesf6936102016-09-05 16:51:31 +0100307
308 if (!smallval) {
Wan-Teh Changccdd87f2018-08-08 18:40:29 -0700309 rem_bits = get_msb(abs - 1);
Thomas Daviesf6936102016-09-05 16:51:31 +0100310 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100311 aom_write_literal(w, rem_bits - 1, 3);
Thomas Daviesf6936102016-09-05 16:51:31 +0100312 aom_write_literal(w, abs - thr, rem_bits);
Arild Fuldseth07441162016-08-15 15:07:52 +0200313 }
314 if (abs > 0) {
315 aom_write_bit(w, sign);
316 }
317}
Thomas Daviesf6936102016-09-05 16:51:31 +0100318
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700319static AOM_INLINE void write_delta_lflevel(const AV1_COMMON *cm,
320 const MACROBLOCKD *xd, int lf_id,
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530321 int delta_lflevel,
322 int delta_lf_multi, aom_writer *w) {
Fangwen Fu231fe422017-04-24 17:52:29 -0700323 int sign = delta_lflevel < 0;
324 int abs = sign ? -delta_lflevel : delta_lflevel;
325 int rem_bits, thr;
326 int smallval = abs < DELTA_LF_SMALL ? 1 : 0;
Fangwen Fu231fe422017-04-24 17:52:29 -0700327 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530328 (void)cm;
Fangwen Fu231fe422017-04-24 17:52:29 -0700329
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530330 if (delta_lf_multi) {
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +0000331 assert(lf_id >= 0 && lf_id < (av1_num_planes(cm) > 1 ? FRAME_LF_COUNT
332 : FRAME_LF_COUNT - 2));
Cheng Chen880166a2017-10-02 17:48:48 -0700333 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL),
334 ec_ctx->delta_lf_multi_cdf[lf_id], DELTA_LF_PROBS + 1);
335 } else {
336 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
337 DELTA_LF_PROBS + 1);
338 }
Fangwen Fu231fe422017-04-24 17:52:29 -0700339
340 if (!smallval) {
Wan-Teh Changccdd87f2018-08-08 18:40:29 -0700341 rem_bits = get_msb(abs - 1);
Fangwen Fu231fe422017-04-24 17:52:29 -0700342 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100343 aom_write_literal(w, rem_bits - 1, 3);
Fangwen Fu231fe422017-04-24 17:52:29 -0700344 aom_write_literal(w, abs - thr, rem_bits);
345 }
346 if (abs > 0) {
347 aom_write_bit(w, sign);
348 }
349}
Arild Fuldseth07441162016-08-15 15:07:52 +0200350
Vishesh686aa772020-04-13 14:40:12 +0530351static AOM_INLINE void pack_map_tokens(aom_writer *w, const TokenExtra **tp,
Jayasanker J4969e722020-09-18 20:36:15 +0530352 int n, int num, MapCdf map_pb_cdf) {
Vishesh686aa772020-04-13 14:40:12 +0530353 const TokenExtra *p = *tp;
Jayasanker J4969e722020-09-18 20:36:15 +0530354 const int palette_size_idx = n - PALETTE_MIN_SIZE;
hui su40b9e7f2017-07-13 18:15:56 -0700355 write_uniform(w, n, p->token); // The first color index.
356 ++p;
357 --num;
358 for (int i = 0; i < num; ++i) {
Aniket Wanare8268a652021-02-22 14:38:08 +0530359 assert((p->color_ctx >= 0) &&
360 (p->color_ctx < PALETTE_COLOR_INDEX_CONTEXTS));
Jayasanker J4969e722020-09-18 20:36:15 +0530361 aom_cdf_prob *color_map_cdf = map_pb_cdf[palette_size_idx][p->color_ctx];
362 aom_write_symbol(w, p->token, color_map_cdf, n);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700363 ++p;
364 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700365 *tp = p;
366}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700367
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700368static AOM_INLINE void pack_txb_tokens(
Vishesh686aa772020-04-13 14:40:12 +0530369 aom_writer *w, AV1_COMMON *cm, MACROBLOCK *const x, const TokenExtra **tp,
370 const TokenExtra *const tok_end, MACROBLOCKD *xd, MB_MODE_INFO *mbmi,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700371 int plane, BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth, int block,
372 int blk_row, int blk_col, TX_SIZE tx_size, TOKEN_STATS *token_stats) {
Jingning Han4fe5f672017-05-19 15:46:07 -0700373 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
374 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
375
376 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
377
Hui Su43d09942018-04-03 12:40:17 -0700378 const struct macroblockd_plane *const pd = &xd->plane[plane];
Debargha Mukherjee8aec7f32017-12-20 15:48:49 -0800379 const TX_SIZE plane_tx_size =
chiyotsai0f5cd052020-08-27 14:37:44 -0700380 plane ? av1_get_max_uv_txsize(mbmi->bsize, pd->subsampling_x,
Cheng Chen8ab1f442018-04-27 18:01:52 -0700381 pd->subsampling_y)
Hui Su7167d952018-02-01 16:33:12 -0800382 : mbmi->inter_tx_size[av1_get_txb_size_index(plane_bsize, blk_row,
383 blk_col)];
Jingning Han4fe5f672017-05-19 15:46:07 -0700384
Debargha Mukherjee6396d3a2017-12-21 00:12:38 -0800385 if (tx_size == plane_tx_size || plane) {
Hui Su179fbfa2019-10-07 15:13:06 -0700386 av1_write_coeffs_txb(cm, x, w, blk_row, blk_col, plane, block, tx_size);
Jingning Han4fe5f672017-05-19 15:46:07 -0700387#if CONFIG_RD_DEBUG
Hui Su43d09942018-04-03 12:40:17 -0700388 TOKEN_STATS tmp_token_stats;
389 init_token_stats(&tmp_token_stats);
Jingning Han4fe5f672017-05-19 15:46:07 -0700390 token_stats->cost += tmp_token_stats.cost;
391#endif
392 } else {
Frederic Barbier4b56b102018-03-30 16:09:34 +0200393 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700394 const int bsw = tx_size_wide_unit[sub_txs];
395 const int bsh = tx_size_high_unit[sub_txs];
Hui Su43d09942018-04-03 12:40:17 -0700396 const int step = bsh * bsw;
Wan-Teh Chang0cb03d92021-03-05 08:27:21 -0800397 const int row_end =
398 AOMMIN(tx_size_high_unit[tx_size], max_blocks_high - blk_row);
399 const int col_end =
400 AOMMIN(tx_size_wide_unit[tx_size], max_blocks_wide - blk_col);
Jingning Han4fe5f672017-05-19 15:46:07 -0700401
Yue Chen0797a202017-10-27 17:24:56 -0700402 assert(bsw > 0 && bsh > 0);
Jingning Han4fe5f672017-05-19 15:46:07 -0700403
Wan-Teh Chang0cb03d92021-03-05 08:27:21 -0800404 for (int r = 0; r < row_end; r += bsh) {
405 const int offsetr = blk_row + r;
406 for (int c = 0; c < col_end; c += bsw) {
Yue Chen0797a202017-10-27 17:24:56 -0700407 const int offsetc = blk_col + c;
Yue Chen0797a202017-10-27 17:24:56 -0700408 pack_txb_tokens(w, cm, x, tp, tok_end, xd, mbmi, plane, plane_bsize,
409 bit_depth, block, offsetr, offsetc, sub_txs,
410 token_stats);
411 block += step;
412 }
Jingning Han4fe5f672017-05-19 15:46:07 -0700413 }
414 }
415}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700416
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700417static INLINE void set_spatial_segment_id(
418 const CommonModeInfoParams *const mi_params, uint8_t *segment_ids,
chiyotsaid730cef2022-10-26 13:58:20 -0700419 BLOCK_SIZE bsize, int mi_row, int mi_col, uint8_t segment_id) {
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700420 const int mi_offset = mi_row * mi_params->mi_cols + mi_col;
Frederic Barbier0c4a6322018-02-22 10:14:00 +0100421 const int bw = mi_size_wide[bsize];
422 const int bh = mi_size_high[bsize];
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700423 const int xmis = AOMMIN(mi_params->mi_cols - mi_col, bw);
424 const int ymis = AOMMIN(mi_params->mi_rows - mi_row, bh);
Frederic Barbier0c4a6322018-02-22 10:14:00 +0100425
chiyotsaid730cef2022-10-26 13:58:20 -0700426 const int mi_stride = mi_params->mi_cols;
427
chiyotsaiae4339f2022-11-16 10:44:11 -0800428 set_segment_id(segment_ids, mi_offset, xmis, ymis, mi_stride, segment_id);
Frederic Barbier0c4a6322018-02-22 10:14:00 +0100429}
430
Frederic Barbier19f0a232018-03-07 12:28:20 +0100431int av1_neg_interleave(int x, int ref, int max) {
Hui Su6bf4ec82018-03-28 21:18:10 -0700432 assert(x < max);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100433 const int diff = x - ref;
434 if (!ref) return x;
Frederic Barbier541429a2018-03-07 14:33:42 +0100435 if (ref >= (max - 1)) return -x + max - 1;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100436 if (2 * ref < max) {
437 if (abs(diff) <= ref) {
438 if (diff > 0)
439 return (diff << 1) - 1;
440 else
441 return ((-diff) << 1);
442 }
443 return x;
444 } else {
445 if (abs(diff) < (max - ref)) {
446 if (diff > 0)
447 return (diff << 1) - 1;
448 else
449 return ((-diff) << 1);
450 }
451 return (max - x) - 1;
452 }
453}
454
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530455static AOM_INLINE void write_segment_id(AV1_COMP *cpi, MACROBLOCKD *const xd,
chiyotsai8c004e12020-04-17 15:52:08 -0700456 const MB_MODE_INFO *const mbmi,
457 aom_writer *w,
458 const struct segmentation *seg,
459 struct segmentation_probs *segp,
460 int skip_txfm) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000461 if (!seg->enabled || !seg->update_map) return;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100462
Hui Sucff20b92018-03-13 15:35:52 -0700463 AV1_COMMON *const cm = &cpi->common;
Hui Sucff20b92018-03-13 15:35:52 -0700464 int cdf_num;
chiyotsaid730cef2022-10-26 13:58:20 -0700465 const uint8_t pred = av1_get_spatial_seg_pred(
466 cm, xd, &cdf_num, cpi->cyclic_refresh->skip_over4x4);
Hui Sud62a63a2020-02-27 16:59:54 -0800467 const int mi_row = xd->mi_row;
468 const int mi_col = xd->mi_col;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100469
chiyotsai8c004e12020-04-17 15:52:08 -0700470 if (skip_txfm) {
471 // Still need to transmit tx size for intra blocks even if skip_txfm is
Hui Suad7551e2018-03-14 11:13:31 -0700472 // true. Changing segment_id may make the tx size become invalid, e.g
473 // changing from lossless to lossy.
Vishesh8c90fff2020-04-13 11:32:56 +0530474 assert(is_inter_block(mbmi) || !cpi->enc_seg.has_lossless_segment);
Hui Suad7551e2018-03-14 11:13:31 -0700475
chiyotsai0f5cd052020-08-27 14:37:44 -0700476 set_spatial_segment_id(&cm->mi_params, cm->cur_frame->seg_map, mbmi->bsize,
477 mi_row, mi_col, pred);
478 set_spatial_segment_id(&cm->mi_params, cpi->enc_seg.map, mbmi->bsize,
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700479 mi_row, mi_col, pred);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000480 /* mbmi is read only but we need to update segment_id */
481 ((MB_MODE_INFO *)mbmi)->segment_id = pred;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100482 return;
483 }
484
Hui Sucff20b92018-03-13 15:35:52 -0700485 const int coded_id =
Frederic Barbier19f0a232018-03-07 12:28:20 +0100486 av1_neg_interleave(mbmi->segment_id, pred, seg->last_active_segid + 1);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000487 aom_cdf_prob *pred_cdf = segp->spatial_pred_seg_cdf[cdf_num];
Luc Trudeaubb6ca122018-04-10 16:59:37 -0400488 aom_write_symbol(w, coded_id, pred_cdf, MAX_SEGMENTS);
chiyotsai0f5cd052020-08-27 14:37:44 -0700489 set_spatial_segment_id(&cm->mi_params, cm->cur_frame->seg_map, mbmi->bsize,
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700490 mi_row, mi_col, mbmi->segment_id);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100491}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700492
Thomas Davies315f5782017-06-14 15:14:55 +0100493#define WRITE_REF_BIT(bname, pname) \
Thomas Davies0fbd2b72017-09-12 10:49:45 +0100494 aom_write_symbol(w, bname, av1_get_pred_cdf_##pname(xd), 2)
Thomas Davies315f5782017-06-14 15:14:55 +0100495
Yaowu Xuc27fc142016-08-22 16:08:15 -0700496// This function encodes the reference frame
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700497static AOM_INLINE void write_ref_frames(const AV1_COMMON *cm,
498 const MACROBLOCKD *xd, aom_writer *w) {
Yue Chen53b53f02018-03-29 14:31:23 -0700499 const MB_MODE_INFO *const mbmi = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700500 const int is_compound = has_second_ref(mbmi);
chiyotsaid730cef2022-10-26 13:58:20 -0700501 const uint8_t segment_id = mbmi->segment_id;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700502
503 // If segment level coding of this signal is disabled...
504 // or the segment allows multiple reference frame options
505 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
506 assert(!is_compound);
507 assert(mbmi->ref_frame[0] ==
508 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
Yaowu Xu564a5222018-03-20 09:12:16 -0700509 } else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP) ||
510 segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV)) {
David Barkerd92f3562017-10-09 17:46:23 +0100511 assert(!is_compound);
512 assert(mbmi->ref_frame[0] == LAST_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700513 } else {
514 // does the feature use compound prediction or not
515 // (if not specified at the frame/segment level)
David Turnerd2a592e2018-11-16 14:59:31 +0000516 if (cm->current_frame.reference_mode == REFERENCE_MODE_SELECT) {
chiyotsai0f5cd052020-08-27 14:37:44 -0700517 if (is_comp_ref_allowed(mbmi->bsize))
Yaowu Xu6567c4a2018-03-19 21:39:31 -0700518 aom_write_symbol(w, is_compound, av1_get_reference_mode_cdf(xd), 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700519 } else {
David Turnerd2a592e2018-11-16 14:59:31 +0000520 assert((!is_compound) ==
521 (cm->current_frame.reference_mode == SINGLE_REFERENCE));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700522 }
523
524 if (is_compound) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700525 const COMP_REFERENCE_TYPE comp_ref_type = has_uni_comp_refs(mbmi)
526 ? UNIDIR_COMP_REFERENCE
527 : BIDIR_COMP_REFERENCE;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100528 aom_write_symbol(w, comp_ref_type, av1_get_comp_reference_type_cdf(xd),
529 2);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700530
531 if (comp_ref_type == UNIDIR_COMP_REFERENCE) {
532 const int bit = mbmi->ref_frame[0] == BWDREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800533 WRITE_REF_BIT(bit, uni_comp_ref_p);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700534
Zoe Liuc082bbc2017-05-17 13:31:37 -0700535 if (!bit) {
Zoe Liufcf5fa22017-06-26 16:00:38 -0700536 assert(mbmi->ref_frame[0] == LAST_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100537 const int bit1 = mbmi->ref_frame[1] == LAST3_FRAME ||
538 mbmi->ref_frame[1] == GOLDEN_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800539 WRITE_REF_BIT(bit1, uni_comp_ref_p1);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100540 if (bit1) {
541 const int bit2 = mbmi->ref_frame[1] == GOLDEN_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800542 WRITE_REF_BIT(bit2, uni_comp_ref_p2);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700543 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700544 } else {
545 assert(mbmi->ref_frame[1] == ALTREF_FRAME);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700546 }
547
548 return;
549 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700550
551 assert(comp_ref_type == BIDIR_COMP_REFERENCE);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700552
Yaowu Xuc27fc142016-08-22 16:08:15 -0700553 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
554 mbmi->ref_frame[0] == LAST3_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100555 WRITE_REF_BIT(bit, comp_ref_p);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700556
Yaowu Xuc27fc142016-08-22 16:08:15 -0700557 if (!bit) {
Zoe Liu87818282017-11-26 17:09:59 -0800558 const int bit1 = mbmi->ref_frame[0] == LAST2_FRAME;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100559 WRITE_REF_BIT(bit1, comp_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700560 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100561 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
562 WRITE_REF_BIT(bit2, comp_ref_p2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700563 }
Zoe Liu7b1ec7a2017-05-24 22:28:24 -0700564
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100565 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800566 WRITE_REF_BIT(bit_bwd, comp_bwdref_p);
Zoe Liue9b15e22017-07-19 15:53:01 -0700567
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100568 if (!bit_bwd) {
Zoe Liu49172952018-01-23 14:32:31 -0800569 WRITE_REF_BIT(mbmi->ref_frame[1] == ALTREF2_FRAME, comp_bwdref_p1);
Zoe Liue9b15e22017-07-19 15:53:01 -0700570 }
Zoe Liue9b15e22017-07-19 15:53:01 -0700571
Yaowu Xuc27fc142016-08-22 16:08:15 -0700572 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -0700573 const int bit0 = (mbmi->ref_frame[0] <= ALTREF_FRAME &&
574 mbmi->ref_frame[0] >= BWDREF_FRAME);
Zoe Liu49172952018-01-23 14:32:31 -0800575 WRITE_REF_BIT(bit0, single_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700576
577 if (bit0) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100578 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800579 WRITE_REF_BIT(bit1, single_ref_p2);
Zoe Liue9b15e22017-07-19 15:53:01 -0700580
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100581 if (!bit1) {
Zoe Liu49172952018-01-23 14:32:31 -0800582 WRITE_REF_BIT(mbmi->ref_frame[0] == ALTREF2_FRAME, single_ref_p6);
Zoe Liue9b15e22017-07-19 15:53:01 -0700583 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700584 } else {
585 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
586 mbmi->ref_frame[0] == GOLDEN_FRAME);
Zoe Liu49172952018-01-23 14:32:31 -0800587 WRITE_REF_BIT(bit2, single_ref_p3);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700588
589 if (!bit2) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100590 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800591 WRITE_REF_BIT(bit3, single_ref_p4);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700592 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100593 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800594 WRITE_REF_BIT(bit4, single_ref_p5);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700595 }
596 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700597 }
598 }
599}
600
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700601static AOM_INLINE void write_filter_intra_mode_info(
602 const AV1_COMMON *cm, const MACROBLOCKD *xd, const MB_MODE_INFO *const mbmi,
603 aom_writer *w) {
Yue Chena9383622018-03-08 14:37:09 -0800604 if (av1_filter_intra_allowed(cm, mbmi)) {
Yue Chenb0571872017-12-18 18:12:59 -0800605 aom_write_symbol(w, mbmi->filter_intra_mode_info.use_filter_intra,
chiyotsai0f5cd052020-08-27 14:37:44 -0700606 xd->tile_ctx->filter_intra_cdfs[mbmi->bsize], 2);
Yue Chenb0571872017-12-18 18:12:59 -0800607 if (mbmi->filter_intra_mode_info.use_filter_intra) {
hui su5db97432016-10-14 16:10:14 -0700608 const FILTER_INTRA_MODE mode =
Yue Chenb0571872017-12-18 18:12:59 -0800609 mbmi->filter_intra_mode_info.filter_intra_mode;
Yue Chen994dba22017-12-19 15:27:26 -0800610 aom_write_symbol(w, mode, xd->tile_ctx->filter_intra_mode_cdf,
Yue Chen63ce36f2017-10-10 23:37:31 -0700611 FILTER_INTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700612 }
613 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700614}
615
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700616static AOM_INLINE void write_angle_delta(aom_writer *w, int angle_delta,
617 aom_cdf_prob *cdf) {
Luc Trudeau866da792018-02-12 11:13:34 -0500618 aom_write_symbol(w, angle_delta + MAX_ANGLE_DELTA, cdf,
619 2 * MAX_ANGLE_DELTA + 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700620}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700621
Urvang Joshi6237b882020-03-26 15:02:26 -0700622static AOM_INLINE void write_mb_interp_filter(AV1_COMMON *const cm,
Cherma Rajan Ad0f59a72021-04-13 12:43:54 +0530623 ThreadData *td, aom_writer *w) {
624 const MACROBLOCKD *xd = &td->mb.e_mbd;
Yue Chen53b53f02018-03-29 14:31:23 -0700625 const MB_MODE_INFO *const mbmi = xd->mi[0];
Thomas Davies77c7c402017-01-11 17:58:54 +0000626 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Jingning Han203b1d32017-01-12 16:00:13 -0800627
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700628 if (!av1_is_interp_needed(xd)) {
Urvang Joshi6237b882020-03-26 15:02:26 -0700629 int_interpfilters filters = av1_broadcast_interp_filter(
630 av1_unswitchable_filter(cm->features.interp_filter));
Ravi Chaudhary1e4f94b2019-06-20 16:19:49 +0530631 assert(mbmi->interp_filters.as_int == filters.as_int);
632 (void)filters;
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700633 return;
634 }
Urvang Joshi6237b882020-03-26 15:02:26 -0700635 if (cm->features.interp_filter == SWITCHABLE) {
Jingning Han203b1d32017-01-12 16:00:13 -0800636 int dir;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700637 for (dir = 0; dir < 2; ++dir) {
Jingning Han4a173352018-03-01 17:54:07 -0800638 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
639 InterpFilter filter =
640 av1_extract_interp_filter(mbmi->interp_filters, dir);
641 aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
642 SWITCHABLE_FILTERS);
Cherma Rajan Ad0f59a72021-04-13 12:43:54 +0530643 ++td->interp_filter_selected[filter];
Tarundeep Singh4243e622021-04-20 16:10:22 +0530644 if (cm->seq_params->enable_dual_filter == 0) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700645 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700646 }
647}
648
hui su33567b22017-04-30 16:40:19 -0700649// Transmit color values with delta encoding. Write the first value as
650// literal, and the deltas between each value and the previous one. "min_val" is
651// the smallest possible value of the deltas.
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700652static AOM_INLINE void delta_encode_palette_colors(const int *colors, int num,
653 int bit_depth, int min_val,
654 aom_writer *w) {
hui su33567b22017-04-30 16:40:19 -0700655 if (num <= 0) return;
hui sufa4ff852017-05-15 12:20:50 -0700656 assert(colors[0] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700657 aom_write_literal(w, colors[0], bit_depth);
658 if (num == 1) return;
659 int max_delta = 0;
660 int deltas[PALETTE_MAX_SIZE];
661 memset(deltas, 0, sizeof(deltas));
662 for (int i = 1; i < num; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700663 assert(colors[i] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700664 const int delta = colors[i] - colors[i - 1];
665 deltas[i - 1] = delta;
666 assert(delta >= min_val);
667 if (delta > max_delta) max_delta = delta;
668 }
669 const int min_bits = bit_depth - 3;
670 int bits = AOMMAX(av1_ceil_log2(max_delta + 1 - min_val), min_bits);
hui sufa4ff852017-05-15 12:20:50 -0700671 assert(bits <= bit_depth);
hui su33567b22017-04-30 16:40:19 -0700672 int range = (1 << bit_depth) - colors[0] - min_val;
hui sud13c24a2017-04-07 16:13:07 -0700673 aom_write_literal(w, bits - min_bits, 2);
hui su33567b22017-04-30 16:40:19 -0700674 for (int i = 0; i < num - 1; ++i) {
675 aom_write_literal(w, deltas[i] - min_val, bits);
676 range -= deltas[i];
677 bits = AOMMIN(bits, av1_ceil_log2(range));
hui sud13c24a2017-04-07 16:13:07 -0700678 }
679}
680
hui su33567b22017-04-30 16:40:19 -0700681// Transmit luma palette color values. First signal if each color in the color
682// cache is used. Those colors that are not in the cache are transmitted with
683// delta encoding.
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700684static AOM_INLINE void write_palette_colors_y(
685 const MACROBLOCKD *const xd, const PALETTE_MODE_INFO *const pmi,
686 int bit_depth, aom_writer *w) {
hui su33567b22017-04-30 16:40:19 -0700687 const int n = pmi->palette_size[0];
hui su33567b22017-04-30 16:40:19 -0700688 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700689 const int n_cache = av1_get_palette_cache(xd, 0, color_cache);
hui su33567b22017-04-30 16:40:19 -0700690 int out_cache_colors[PALETTE_MAX_SIZE];
691 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
692 const int n_out_cache =
693 av1_index_color_cache(color_cache, n_cache, pmi->palette_colors, n,
694 cache_color_found, out_cache_colors);
695 int n_in_cache = 0;
696 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
697 const int found = cache_color_found[i];
698 aom_write_bit(w, found);
699 n_in_cache += found;
700 }
701 assert(n_in_cache + n_out_cache == n);
702 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 1, w);
703}
704
705// Write chroma palette color values. U channel is handled similarly to the luma
706// channel. For v channel, either use delta encoding or transmit raw values
707// directly, whichever costs less.
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700708static AOM_INLINE void write_palette_colors_uv(
709 const MACROBLOCKD *const xd, const PALETTE_MODE_INFO *const pmi,
710 int bit_depth, aom_writer *w) {
hui sud13c24a2017-04-07 16:13:07 -0700711 const int n = pmi->palette_size[1];
hui sud13c24a2017-04-07 16:13:07 -0700712 const uint16_t *colors_u = pmi->palette_colors + PALETTE_MAX_SIZE;
713 const uint16_t *colors_v = pmi->palette_colors + 2 * PALETTE_MAX_SIZE;
hui sud13c24a2017-04-07 16:13:07 -0700714 // U channel colors.
hui su33567b22017-04-30 16:40:19 -0700715 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700716 const int n_cache = av1_get_palette_cache(xd, 1, color_cache);
hui su33567b22017-04-30 16:40:19 -0700717 int out_cache_colors[PALETTE_MAX_SIZE];
718 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
719 const int n_out_cache = av1_index_color_cache(
720 color_cache, n_cache, colors_u, n, cache_color_found, out_cache_colors);
721 int n_in_cache = 0;
722 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
723 const int found = cache_color_found[i];
724 aom_write_bit(w, found);
725 n_in_cache += found;
hui sud13c24a2017-04-07 16:13:07 -0700726 }
hui su33567b22017-04-30 16:40:19 -0700727 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 0, w);
728
729 // V channel colors. Don't use color cache as the colors are not sorted.
hui sud13c24a2017-04-07 16:13:07 -0700730 const int max_val = 1 << bit_depth;
731 int zero_count = 0, min_bits_v = 0;
732 int bits_v =
733 av1_get_palette_delta_bits_v(pmi, bit_depth, &zero_count, &min_bits_v);
734 const int rate_using_delta =
735 2 + bit_depth + (bits_v + 1) * (n - 1) - zero_count;
736 const int rate_using_raw = bit_depth * n;
737 if (rate_using_delta < rate_using_raw) { // delta encoding
hui sufa4ff852017-05-15 12:20:50 -0700738 assert(colors_v[0] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700739 aom_write_bit(w, 1);
740 aom_write_literal(w, bits_v - min_bits_v, 2);
741 aom_write_literal(w, colors_v[0], bit_depth);
hui su33567b22017-04-30 16:40:19 -0700742 for (int i = 1; i < n; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700743 assert(colors_v[i] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700744 if (colors_v[i] == colors_v[i - 1]) { // No need to signal sign bit.
745 aom_write_literal(w, 0, bits_v);
746 continue;
747 }
748 const int delta = abs((int)colors_v[i] - colors_v[i - 1]);
749 const int sign_bit = colors_v[i] < colors_v[i - 1];
750 if (delta <= max_val - delta) {
751 aom_write_literal(w, delta, bits_v);
752 aom_write_bit(w, sign_bit);
753 } else {
754 aom_write_literal(w, max_val - delta, bits_v);
755 aom_write_bit(w, !sign_bit);
756 }
757 }
758 } else { // Transmit raw values.
759 aom_write_bit(w, 0);
hui sufa4ff852017-05-15 12:20:50 -0700760 for (int i = 0; i < n; ++i) {
761 assert(colors_v[i] < (1 << bit_depth));
762 aom_write_literal(w, colors_v[i], bit_depth);
763 }
hui sud13c24a2017-04-07 16:13:07 -0700764 }
765}
hui sud13c24a2017-04-07 16:13:07 -0700766
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700767static AOM_INLINE void write_palette_mode_info(const AV1_COMMON *cm,
768 const MACROBLOCKD *xd,
769 const MB_MODE_INFO *const mbmi,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700770 aom_writer *w) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000771 const int num_planes = av1_num_planes(cm);
chiyotsai0f5cd052020-08-27 14:37:44 -0700772 const BLOCK_SIZE bsize = mbmi->bsize;
Urvang Joshib6409e92020-03-23 11:23:27 -0700773 assert(av1_allow_palette(cm->features.allow_screen_content_tools, bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700774 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
Hui Suc1f411b2017-12-19 15:58:28 -0800775 const int bsize_ctx = av1_get_palette_bsize_ctx(bsize);
Rupert Swarbrick6f9cd942017-08-02 15:57:18 +0100776
Yaowu Xuc27fc142016-08-22 16:08:15 -0700777 if (mbmi->mode == DC_PRED) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800778 const int n = pmi->palette_size[0];
Hui Sudb685552018-01-12 16:38:33 -0800779 const int palette_y_mode_ctx = av1_get_palette_mode_ctx(xd);
Thomas Davies59f92312017-08-23 00:33:12 +0100780 aom_write_symbol(
781 w, n > 0,
Hui Suc1f411b2017-12-19 15:58:28 -0800782 xd->tile_ctx->palette_y_mode_cdf[bsize_ctx][palette_y_mode_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700783 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +0100784 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Hui Suc1f411b2017-12-19 15:58:28 -0800785 xd->tile_ctx->palette_y_size_cdf[bsize_ctx],
Thomas Daviesce7272d2017-07-04 16:11:08 +0100786 PALETTE_SIZES);
Tarundeep Singh4243e622021-04-20 16:10:22 +0530787 write_palette_colors_y(xd, pmi, cm->seq_params->bit_depth, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700788 }
789 }
790
Hui Su474e1e12020-02-27 15:46:36 -0800791 const int uv_dc_pred =
792 num_planes > 1 && mbmi->uv_mode == UV_DC_PRED && xd->is_chroma_ref;
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +0000793 if (uv_dc_pred) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800794 const int n = pmi->palette_size[1];
795 const int palette_uv_mode_ctx = (pmi->palette_size[0] > 0);
Thomas Davies59f92312017-08-23 00:33:12 +0100796 aom_write_symbol(w, n > 0,
797 xd->tile_ctx->palette_uv_mode_cdf[palette_uv_mode_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700798 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +0100799 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Hui Suc1f411b2017-12-19 15:58:28 -0800800 xd->tile_ctx->palette_uv_size_cdf[bsize_ctx],
Thomas Daviesce7272d2017-07-04 16:11:08 +0100801 PALETTE_SIZES);
Tarundeep Singh4243e622021-04-20 16:10:22 +0530802 write_palette_colors_uv(xd, pmi, cm->seq_params->bit_depth, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700803 }
804 }
805}
806
Angie Chiangc31ea682017-04-13 16:20:54 -0700807void av1_write_tx_type(const AV1_COMMON *const cm, const MACROBLOCKD *xd,
Hui Su179fbfa2019-10-07 15:13:06 -0700808 TX_TYPE tx_type, TX_SIZE tx_size, aom_writer *w) {
Yue Chen53b53f02018-03-29 14:31:23 -0700809 MB_MODE_INFO *mbmi = xd->mi[0];
Urvang Joshib6409e92020-03-23 11:23:27 -0700810 const FeatureFlags *const features = &cm->features;
Jingning Han2a4da942016-11-03 18:31:30 -0700811 const int is_inter = is_inter_block(mbmi);
Urvang Joshib6409e92020-03-23 11:23:27 -0700812 if (get_ext_tx_types(tx_size, is_inter, features->reduced_tx_set_used) > 1 &&
Urvang Joshi17814622020-03-27 17:26:17 -0700813 ((!cm->seg.enabled && cm->quant_params.base_qindex > 0) ||
Hui Su99350a62018-01-11 16:41:09 -0800814 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
chiyotsai8c004e12020-04-17 15:52:08 -0700815 !mbmi->skip_txfm &&
Hui Su99350a62018-01-11 16:41:09 -0800816 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
Hui Su179fbfa2019-10-07 15:13:06 -0700817 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
818 const TX_SIZE square_tx_size = txsize_sqr_map[tx_size];
Urvang Joshib6409e92020-03-23 11:23:27 -0700819 const TxSetType tx_set_type = av1_get_ext_tx_set_type(
820 tx_size, is_inter, features->reduced_tx_set_used);
821 const int eset =
822 get_ext_tx_set(tx_size, is_inter, features->reduced_tx_set_used);
Hui Su99350a62018-01-11 16:41:09 -0800823 // eset == 0 should correspond to a set with only DCT_DCT and there
824 // is no need to send the tx_type
825 assert(eset > 0);
826 assert(av1_ext_tx_used[tx_set_type][tx_type]);
827 if (is_inter) {
828 aom_write_symbol(w, av1_ext_tx_ind[tx_set_type][tx_type],
829 ec_ctx->inter_ext_tx_cdf[eset][square_tx_size],
830 av1_num_ext_tx_set[tx_set_type]);
831 } else {
Hui Su99350a62018-01-11 16:41:09 -0800832 PREDICTION_MODE intra_dir;
833 if (mbmi->filter_intra_mode_info.use_filter_intra)
834 intra_dir =
835 fimode_to_intradir[mbmi->filter_intra_mode_info.filter_intra_mode];
836 else
837 intra_dir = mbmi->mode;
838 aom_write_symbol(
839 w, av1_ext_tx_ind[tx_set_type][tx_type],
840 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][intra_dir],
841 av1_num_ext_tx_set[tx_set_type]);
Lester Lu432012f2017-08-17 14:39:29 -0700842 }
Jingning Han2a4da942016-11-03 18:31:30 -0700843 }
844}
845
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700846static AOM_INLINE void write_intra_y_mode_nonkf(FRAME_CONTEXT *frame_ctx,
847 BLOCK_SIZE bsize,
848 PREDICTION_MODE mode,
849 aom_writer *w) {
Hui Su814f41e2017-10-02 12:21:24 -0700850 aom_write_symbol(w, mode, frame_ctx->y_mode_cdf[size_group_lookup[bsize]],
Jingning Hanf04254f2017-03-08 10:51:35 -0800851 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -0800852}
853
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700854static AOM_INLINE void write_intra_uv_mode(FRAME_CONTEXT *frame_ctx,
855 UV_PREDICTION_MODE uv_mode,
856 PREDICTION_MODE y_mode,
857 CFL_ALLOWED_TYPE cfl_allowed,
858 aom_writer *w) {
David Michael Barrcb3a8ef2018-01-06 15:48:49 +0900859 aom_write_symbol(w, uv_mode, frame_ctx->uv_mode_cdf[cfl_allowed][y_mode],
860 UV_INTRA_MODES - !cfl_allowed);
Jingning Hanf04254f2017-03-08 10:51:35 -0800861}
862
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700863static AOM_INLINE void write_cfl_alphas(FRAME_CONTEXT *const ec_ctx,
864 uint8_t idx, int8_t joint_sign,
865 aom_writer *w) {
David Michael Barrf6eaa152017-07-19 19:42:28 +0900866 aom_write_symbol(w, joint_sign, ec_ctx->cfl_sign_cdf, CFL_JOINT_SIGNS);
867 // Magnitudes are only signaled for nonzero codes.
868 if (CFL_SIGN_U(joint_sign) != CFL_SIGN_ZERO) {
869 aom_cdf_prob *cdf_u = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_U(joint_sign)];
870 aom_write_symbol(w, CFL_IDX_U(idx), cdf_u, CFL_ALPHABET_SIZE);
871 }
872 if (CFL_SIGN_V(joint_sign) != CFL_SIGN_ZERO) {
873 aom_cdf_prob *cdf_v = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_V(joint_sign)];
874 aom_write_symbol(w, CFL_IDX_V(idx), cdf_v, CFL_ALPHABET_SIZE);
875 }
Luc Trudeauf5334002017-04-25 12:21:26 -0400876}
Luc Trudeauf5334002017-04-25 12:21:26 -0400877
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700878static AOM_INLINE void write_cdef(AV1_COMMON *cm, MACROBLOCKD *const xd,
Hui Sud62a63a2020-02-27 16:59:54 -0800879 aom_writer *w, int skip) {
Urvang Joshib6409e92020-03-23 11:23:27 -0700880 if (cm->features.coded_lossless || cm->features.allow_intrabc) return;
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200881
Urvang Joshi791fc3f2020-04-07 00:19:46 -0700882 // At the start of a superblock, mark that we haven't yet written CDEF
883 // strengths for any of the CDEF units contained in this superblock.
Tarundeep Singh4243e622021-04-20 16:10:22 +0530884 const int sb_mask = (cm->seq_params->mib_size - 1);
Urvang Joshi791fc3f2020-04-07 00:19:46 -0700885 const int mi_row_in_sb = (xd->mi_row & sb_mask);
886 const int mi_col_in_sb = (xd->mi_col & sb_mask);
887 if (mi_row_in_sb == 0 && mi_col_in_sb == 0) {
888 xd->cdef_transmitted[0] = xd->cdef_transmitted[1] =
889 xd->cdef_transmitted[2] = xd->cdef_transmitted[3] = false;
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200890 }
891
Urvang Joshi791fc3f2020-04-07 00:19:46 -0700892 // CDEF unit size is 64x64 irrespective of the superblock size.
893 const int cdef_size = 1 << (6 - MI_SIZE_LOG2);
894
895 // Find index of this CDEF unit in this superblock.
896 const int index_mask = cdef_size;
897 const int cdef_unit_row_in_sb = ((xd->mi_row & index_mask) != 0);
898 const int cdef_unit_col_in_sb = ((xd->mi_col & index_mask) != 0);
Tarundeep Singh4243e622021-04-20 16:10:22 +0530899 const int index = (cm->seq_params->sb_size == BLOCK_128X128)
Urvang Joshi791fc3f2020-04-07 00:19:46 -0700900 ? cdef_unit_col_in_sb + 2 * cdef_unit_row_in_sb
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200901 : 0;
Urvang Joshi791fc3f2020-04-07 00:19:46 -0700902
903 // Write CDEF strength to the first non-skip coding block in this CDEF unit.
904 if (!xd->cdef_transmitted[index] && !skip) {
905 // CDEF strength for this CDEF unit needs to be stored in the MB_MODE_INFO
906 // of the 1st block in this CDEF unit.
907 const int first_block_mask = ~(cdef_size - 1);
908 const CommonModeInfoParams *const mi_params = &cm->mi_params;
909 const int grid_idx =
910 get_mi_grid_idx(mi_params, xd->mi_row & first_block_mask,
911 xd->mi_col & first_block_mask);
912 const MB_MODE_INFO *const mbmi = mi_params->mi_grid_base[grid_idx];
David Turnerebf96f42018-11-14 16:57:57 +0000913 aom_write_literal(w, mbmi->cdef_strength, cm->cdef_info.cdef_bits);
Urvang Joshi791fc3f2020-04-07 00:19:46 -0700914 xd->cdef_transmitted[index] = true;
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200915 }
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200916}
917
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700918static AOM_INLINE void write_inter_segment_id(
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530919 AV1_COMP *cpi, MACROBLOCKD *const xd, aom_writer *w,
920 const struct segmentation *const seg, struct segmentation_probs *const segp,
921 int skip, int preskip) {
David Turner760a2f42018-12-07 15:25:36 +0000922 MB_MODE_INFO *const mbmi = xd->mi[0];
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000923 AV1_COMMON *const cm = &cpi->common;
Hui Sud62a63a2020-02-27 16:59:54 -0800924 const int mi_row = xd->mi_row;
925 const int mi_col = xd->mi_col;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000926
927 if (seg->update_map) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000928 if (preskip) {
Wan-Teh Changd1b9dcf2018-06-06 10:45:18 -0700929 if (!seg->segid_preskip) return;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000930 } else {
Wan-Teh Changd1b9dcf2018-06-06 10:45:18 -0700931 if (seg->segid_preskip) return;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000932 if (skip) {
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530933 write_segment_id(cpi, xd, mbmi, w, seg, segp, 1);
David Turner760a2f42018-12-07 15:25:36 +0000934 if (seg->temporal_update) mbmi->seg_id_predicted = 0;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000935 return;
936 }
937 }
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000938 if (seg->temporal_update) {
939 const int pred_flag = mbmi->seg_id_predicted;
940 aom_cdf_prob *pred_cdf = av1_get_pred_cdf_seg_id(segp, xd);
941 aom_write_symbol(w, pred_flag, pred_cdf, 2);
942 if (!pred_flag) {
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530943 write_segment_id(cpi, xd, mbmi, w, seg, segp, 0);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000944 }
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000945 if (pred_flag) {
Urvang Joshi9dc909d2020-03-23 16:07:02 -0700946 set_spatial_segment_id(&cm->mi_params, cm->cur_frame->seg_map,
chiyotsai0f5cd052020-08-27 14:37:44 -0700947 mbmi->bsize, mi_row, mi_col, mbmi->segment_id);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000948 }
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000949 } else {
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530950 write_segment_id(cpi, xd, mbmi, w, seg, segp, 0);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000951 }
952 }
953}
954
Urvang Joshi381ed662018-08-03 12:57:11 -0700955// If delta q is present, writes delta_q index.
956// Also writes delta_q loop filter levels, if present.
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530957static AOM_INLINE void write_delta_q_params(AV1_COMMON *const cm,
958 MACROBLOCKD *const xd, int skip,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -0700959 aom_writer *w) {
David Turnerebf96f42018-11-14 16:57:57 +0000960 const DeltaQInfo *const delta_q_info = &cm->delta_q_info;
961
962 if (delta_q_info->delta_q_present_flag) {
Urvang Joshi381ed662018-08-03 12:57:11 -0700963 const MB_MODE_INFO *const mbmi = xd->mi[0];
chiyotsai0f5cd052020-08-27 14:37:44 -0700964 const BLOCK_SIZE bsize = mbmi->bsize;
Urvang Joshi381ed662018-08-03 12:57:11 -0700965 const int super_block_upper_left =
Tarundeep Singh4243e622021-04-20 16:10:22 +0530966 ((xd->mi_row & (cm->seq_params->mib_size - 1)) == 0) &&
967 ((xd->mi_col & (cm->seq_params->mib_size - 1)) == 0);
Urvang Joshi381ed662018-08-03 12:57:11 -0700968
Tarundeep Singh4243e622021-04-20 16:10:22 +0530969 if ((bsize != cm->seq_params->sb_size || skip == 0) &&
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000970 super_block_upper_left) {
Wan-Teh Chang413f5ef2018-06-05 17:36:03 -0700971 assert(mbmi->current_qindex > 0);
Urvang Joshi381ed662018-08-03 12:57:11 -0700972 const int reduced_delta_qindex =
Urvang Joshiacad1ca2020-04-27 17:03:25 -0700973 (mbmi->current_qindex - xd->current_base_qindex) /
David Turnerebf96f42018-11-14 16:57:57 +0000974 delta_q_info->delta_q_res;
Yaowu Xu6567c4a2018-03-19 21:39:31 -0700975 write_delta_qindex(xd, reduced_delta_qindex, w);
Urvang Joshiacad1ca2020-04-27 17:03:25 -0700976 xd->current_base_qindex = mbmi->current_qindex;
David Turnerebf96f42018-11-14 16:57:57 +0000977 if (delta_q_info->delta_lf_present_flag) {
978 if (delta_q_info->delta_lf_multi) {
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +0000979 const int frame_lf_count =
980 av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
981 for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id) {
Cheng Chen880166a2017-10-02 17:48:48 -0700982 int reduced_delta_lflevel =
Wan-Teh Chang413f5ef2018-06-05 17:36:03 -0700983 (mbmi->delta_lf[lf_id] - xd->delta_lf[lf_id]) /
David Turnerebf96f42018-11-14 16:57:57 +0000984 delta_q_info->delta_lf_res;
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530985 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, 1, w);
Wan-Teh Chang413f5ef2018-06-05 17:36:03 -0700986 xd->delta_lf[lf_id] = mbmi->delta_lf[lf_id];
Cheng Chen880166a2017-10-02 17:48:48 -0700987 }
988 } else {
Cheng Chena97394f2017-09-27 15:05:14 -0700989 int reduced_delta_lflevel =
Wan-Teh Chang413f5ef2018-06-05 17:36:03 -0700990 (mbmi->delta_lf_from_base - xd->delta_lf_from_base) /
David Turnerebf96f42018-11-14 16:57:57 +0000991 delta_q_info->delta_lf_res;
Cherma Rajan A8ba48242021-04-12 18:13:01 +0530992 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, 0, w);
Wan-Teh Chang413f5ef2018-06-05 17:36:03 -0700993 xd->delta_lf_from_base = mbmi->delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -0700994 }
995 }
Arild Fuldseth07441162016-08-15 15:07:52 +0200996 }
997 }
Urvang Joshi381ed662018-08-03 12:57:11 -0700998}
999
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301000static AOM_INLINE void write_intra_prediction_modes(const AV1_COMMON *cm,
1001 MACROBLOCKD *const xd,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001002 int is_keyframe,
1003 aom_writer *w) {
Urvang Joshi76e80932018-08-03 14:36:11 -07001004 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1005 const MB_MODE_INFO *const mbmi = xd->mi[0];
1006 const PREDICTION_MODE mode = mbmi->mode;
chiyotsai0f5cd052020-08-27 14:37:44 -07001007 const BLOCK_SIZE bsize = mbmi->bsize;
Urvang Joshi76e80932018-08-03 14:36:11 -07001008
1009 // Y mode.
1010 if (is_keyframe) {
1011 const MB_MODE_INFO *const above_mi = xd->above_mbmi;
1012 const MB_MODE_INFO *const left_mi = xd->left_mbmi;
Urvang Joshic1437cd2018-08-03 14:42:49 -07001013 write_intra_y_mode_kf(ec_ctx, mbmi, above_mi, left_mi, mode, w);
Urvang Joshi76e80932018-08-03 14:36:11 -07001014 } else {
Urvang Joshic1437cd2018-08-03 14:42:49 -07001015 write_intra_y_mode_nonkf(ec_ctx, bsize, mode, w);
Urvang Joshi76e80932018-08-03 14:36:11 -07001016 }
1017
1018 // Y angle delta.
1019 const int use_angle_delta = av1_use_angle_delta(bsize);
1020 if (use_angle_delta && av1_is_directional_mode(mode)) {
1021 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_Y],
1022 ec_ctx->angle_delta_cdf[mode - V_PRED]);
1023 }
1024
1025 // UV mode and UV angle delta.
Tarundeep Singh4243e622021-04-20 16:10:22 +05301026 if (!cm->seq_params->monochrome && xd->is_chroma_ref) {
Urvang Joshi76e80932018-08-03 14:36:11 -07001027 const UV_PREDICTION_MODE uv_mode = mbmi->uv_mode;
1028 write_intra_uv_mode(ec_ctx, uv_mode, mode, is_cfl_allowed(xd), w);
1029 if (uv_mode == UV_CFL_PRED)
1030 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Wan-Teh Chang7dd4cb62023-10-16 11:38:23 -07001031 const PREDICTION_MODE intra_mode = get_uv_mode(uv_mode);
1032 if (use_angle_delta && av1_is_directional_mode(intra_mode)) {
Urvang Joshi76e80932018-08-03 14:36:11 -07001033 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_UV],
Wan-Teh Chang7dd4cb62023-10-16 11:38:23 -07001034 ec_ctx->angle_delta_cdf[intra_mode - V_PRED]);
Urvang Joshi76e80932018-08-03 14:36:11 -07001035 }
1036 }
1037
1038 // Palette.
Urvang Joshib6409e92020-03-23 11:23:27 -07001039 if (av1_allow_palette(cm->features.allow_screen_content_tools, bsize)) {
Hui Sud62a63a2020-02-27 16:59:54 -08001040 write_palette_mode_info(cm, xd, mbmi, w);
Urvang Joshi76e80932018-08-03 14:36:11 -07001041 }
1042
1043 // Filter intra.
1044 write_filter_intra_mode_info(cm, xd, mbmi, w);
1045}
1046
Remya73ae2ba2019-08-12 19:19:40 +05301047static INLINE int16_t mode_context_analyzer(
Remya0cce44c2019-08-16 11:57:24 +05301048 const int16_t mode_context, const MV_REFERENCE_FRAME *const rf) {
1049 if (rf[1] <= INTRA_FRAME) return mode_context;
Remya73ae2ba2019-08-12 19:19:40 +05301050
Remya0cce44c2019-08-16 11:57:24 +05301051 const int16_t newmv_ctx = mode_context & NEWMV_CTX_MASK;
1052 const int16_t refmv_ctx = (mode_context >> REFMV_OFFSET) & REFMV_CTX_MASK;
Remya73ae2ba2019-08-12 19:19:40 +05301053
1054 const int16_t comp_ctx = compound_mode_ctx_map[refmv_ctx >> 1][AOMMIN(
1055 newmv_ctx, COMP_NEWMV_CTXS - 1)];
1056 return comp_ctx;
1057}
1058
Remya0cce44c2019-08-16 11:57:24 +05301059static INLINE int_mv get_ref_mv_from_stack(
1060 int ref_idx, const MV_REFERENCE_FRAME *ref_frame, int ref_mv_idx,
1061 const MB_MODE_INFO_EXT_FRAME *mbmi_ext_frame) {
Remya73ae2ba2019-08-12 19:19:40 +05301062 const int8_t ref_frame_type = av1_ref_frame_type(ref_frame);
Remya0cce44c2019-08-16 11:57:24 +05301063 const CANDIDATE_MV *curr_ref_mv_stack = mbmi_ext_frame->ref_mv_stack;
Remya73ae2ba2019-08-12 19:19:40 +05301064
1065 if (ref_frame[1] > INTRA_FRAME) {
1066 assert(ref_idx == 0 || ref_idx == 1);
1067 return ref_idx ? curr_ref_mv_stack[ref_mv_idx].comp_mv
1068 : curr_ref_mv_stack[ref_mv_idx].this_mv;
1069 }
1070
1071 assert(ref_idx == 0);
Remya0cce44c2019-08-16 11:57:24 +05301072 return ref_mv_idx < mbmi_ext_frame->ref_mv_count
Remya73ae2ba2019-08-12 19:19:40 +05301073 ? curr_ref_mv_stack[ref_mv_idx].this_mv
Remya0cce44c2019-08-16 11:57:24 +05301074 : mbmi_ext_frame->global_mvs[ref_frame_type];
Remya73ae2ba2019-08-12 19:19:40 +05301075}
1076
1077static INLINE int_mv get_ref_mv(const MACROBLOCK *x, int ref_idx) {
1078 const MACROBLOCKD *xd = &x->e_mbd;
1079 const MB_MODE_INFO *mbmi = xd->mi[0];
1080 int ref_mv_idx = mbmi->ref_mv_idx;
1081 if (mbmi->mode == NEAR_NEWMV || mbmi->mode == NEW_NEARMV) {
1082 assert(has_second_ref(mbmi));
1083 ref_mv_idx += 1;
1084 }
1085 return get_ref_mv_from_stack(ref_idx, mbmi->ref_frame, ref_mv_idx,
Remya0cce44c2019-08-16 11:57:24 +05301086 x->mbmi_ext_frame);
Remya73ae2ba2019-08-12 19:19:40 +05301087}
1088
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301089static AOM_INLINE void pack_inter_mode_mvs(AV1_COMP *cpi, ThreadData *const td,
1090 aom_writer *w) {
Urvang Joshi381ed662018-08-03 12:57:11 -07001091 AV1_COMMON *const cm = &cpi->common;
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301092 MACROBLOCK *const x = &td->mb;
Urvang Joshi381ed662018-08-03 12:57:11 -07001093 MACROBLOCKD *const xd = &x->e_mbd;
1094 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1095 const struct segmentation *const seg = &cm->seg;
1096 struct segmentation_probs *const segp = &ec_ctx->seg;
1097 const MB_MODE_INFO *const mbmi = xd->mi[0];
Remya0cce44c2019-08-16 11:57:24 +05301098 const MB_MODE_INFO_EXT_FRAME *const mbmi_ext_frame = x->mbmi_ext_frame;
Urvang Joshi381ed662018-08-03 12:57:11 -07001099 const PREDICTION_MODE mode = mbmi->mode;
chiyotsaid730cef2022-10-26 13:58:20 -07001100 const uint8_t segment_id = mbmi->segment_id;
chiyotsai0f5cd052020-08-27 14:37:44 -07001101 const BLOCK_SIZE bsize = mbmi->bsize;
Urvang Joshib6409e92020-03-23 11:23:27 -07001102 const int allow_hp = cm->features.allow_high_precision_mv;
Urvang Joshi381ed662018-08-03 12:57:11 -07001103 const int is_inter = is_inter_block(mbmi);
1104 const int is_compound = has_second_ref(mbmi);
1105 int ref;
1106
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301107 write_inter_segment_id(cpi, xd, w, seg, segp, 0, 1);
Urvang Joshi381ed662018-08-03 12:57:11 -07001108
1109 write_skip_mode(cm, xd, segment_id, mbmi, w);
1110
chiyotsai8c004e12020-04-17 15:52:08 -07001111 assert(IMPLIES(mbmi->skip_mode, mbmi->skip_txfm));
Urvang Joshi381ed662018-08-03 12:57:11 -07001112 const int skip =
1113 mbmi->skip_mode ? 1 : write_skip(cm, xd, segment_id, mbmi, w);
1114
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301115 write_inter_segment_id(cpi, xd, w, seg, segp, skip, 0);
Urvang Joshi381ed662018-08-03 12:57:11 -07001116
Hui Sud62a63a2020-02-27 16:59:54 -08001117 write_cdef(cm, xd, w, skip);
Urvang Joshi381ed662018-08-03 12:57:11 -07001118
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301119 write_delta_q_params(cm, xd, skip, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001120
Zoe Liu93c35022018-02-27 17:15:13 -08001121 if (!mbmi->skip_mode) write_is_inter(cm, xd, mbmi->segment_id, w, is_inter);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001122
Zoe Liu56644192017-12-19 13:16:18 -08001123 if (mbmi->skip_mode) return;
Zoe Liuf40a9572017-10-13 12:37:19 -07001124
Yaowu Xuc27fc142016-08-22 16:08:15 -07001125 if (!is_inter) {
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301126 write_intra_prediction_modes(cm, xd, 0, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001127 } else {
Yaowu Xub0d0d002016-11-22 09:26:43 -08001128 int16_t mode_ctx;
Zoe Liufa8bad12018-01-23 14:32:31 -08001129
1130 av1_collect_neighbors_ref_counts(xd);
1131
Yaowu Xuc27fc142016-08-22 16:08:15 -07001132 write_ref_frames(cm, xd, w);
1133
Remya0cce44c2019-08-16 11:57:24 +05301134 mode_ctx =
1135 mode_context_analyzer(mbmi_ext_frame->mode_context, mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001136
1137 // If segment skip is not enabled code the mode.
1138 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001139 if (is_inter_compound_mode(mode))
Yaowu Xu6567c4a2018-03-19 21:39:31 -07001140 write_inter_compound_mode(xd, w, mode, mode_ctx);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001141 else if (is_inter_singleref_mode(mode))
1142 write_inter_mode(w, mode, ec_ctx, mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001143
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001144 if (mode == NEWMV || mode == NEW_NEWMV || have_nearmv_in_inter_mode(mode))
Remya0cce44c2019-08-16 11:57:24 +05301145 write_drl_idx(ec_ctx, mbmi, mbmi_ext_frame, w);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001146 else
1147 assert(mbmi->ref_mv_idx == 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001148 }
1149
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001150 if (mode == NEWMV || mode == NEW_NEWMV) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001151 for (ref = 0; ref < 1 + is_compound; ++ref) {
Jingning Hanf050fc12018-03-09 14:53:33 -08001152 nmv_context *nmvc = &ec_ctx->nmvc;
Remya73ae2ba2019-08-12 19:19:40 +05301153 const int_mv ref_mv = get_ref_mv(x, ref);
Cherma Rajan Ad0f59a72021-04-13 12:43:54 +05301154 av1_encode_mv(cpi, w, td, &mbmi->mv[ref].as_mv, &ref_mv.as_mv, nmvc,
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001155 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001156 }
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001157 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
Jingning Hanf050fc12018-03-09 14:53:33 -08001158 nmv_context *nmvc = &ec_ctx->nmvc;
Remya73ae2ba2019-08-12 19:19:40 +05301159 const int_mv ref_mv = get_ref_mv(x, 1);
Cherma Rajan Ad0f59a72021-04-13 12:43:54 +05301160 av1_encode_mv(cpi, w, td, &mbmi->mv[1].as_mv, &ref_mv.as_mv, nmvc,
1161 allow_hp);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001162 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
Jingning Hanf050fc12018-03-09 14:53:33 -08001163 nmv_context *nmvc = &ec_ctx->nmvc;
Remya73ae2ba2019-08-12 19:19:40 +05301164 const int_mv ref_mv = get_ref_mv(x, 0);
Cherma Rajan Ad0f59a72021-04-13 12:43:54 +05301165 av1_encode_mv(cpi, w, td, &mbmi->mv[0].as_mv, &ref_mv.as_mv, nmvc,
1166 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001167 }
1168
David Turnerd2a592e2018-11-16 14:59:31 +00001169 if (cpi->common.current_frame.reference_mode != COMPOUND_REFERENCE &&
Tarundeep Singh4243e622021-04-20 16:10:22 +05301170 cpi->common.seq_params->enable_interintra_compound &&
Debargha Mukherjee97095fb2018-03-26 07:51:48 -07001171 is_interintra_allowed(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001172 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
1173 const int bsize_group = size_group_lookup[bsize];
Thomas Daviescff91712017-07-07 11:49:55 +01001174 aom_write_symbol(w, interintra, ec_ctx->interintra_cdf[bsize_group], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001175 if (interintra) {
Thomas Davies299ff042017-06-27 13:41:59 +01001176 aom_write_symbol(w, mbmi->interintra_mode,
1177 ec_ctx->interintra_mode_cdf[bsize_group],
1178 INTERINTRA_MODES);
Hui Suc88f9742019-12-23 13:46:54 -08001179 if (av1_is_wedge_used(bsize)) {
Thomas Daviescff91712017-07-07 11:49:55 +01001180 aom_write_symbol(w, mbmi->use_wedge_interintra,
1181 ec_ctx->wedge_interintra_cdf[bsize], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001182 if (mbmi->use_wedge_interintra) {
Yue Chen73335fa2017-12-20 23:33:41 -08001183 aom_write_symbol(w, mbmi->interintra_wedge_index,
Hui Suc88f9742019-12-23 13:46:54 -08001184 ec_ctx->wedge_idx_cdf[bsize], MAX_WEDGE_TYPES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001185 }
1186 }
1187 }
1188 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001189
Yue Chen53b53f02018-03-29 14:31:23 -07001190 if (mbmi->ref_frame[1] != INTRA_FRAME) write_motion_mode(cm, xd, mbmi, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001191
Cheng Chen33a13d92017-11-28 16:49:59 -08001192 // First write idx to indicate current compound inter prediction mode group
Debargha Mukherjeef90004a2018-12-20 13:35:06 -08001193 // Group A (0): dist_wtd_comp, compound_average
Sarah Parker5b4df2b2018-04-02 14:48:25 -07001194 // Group B (1): interintra, compound_diffwtd, wedge
Cheng Chen33a13d92017-11-28 16:49:59 -08001195 if (has_second_ref(mbmi)) {
Debargha Mukherjee97095fb2018-03-26 07:51:48 -07001196 const int masked_compound_used = is_any_masked_compound_used(bsize) &&
Tarundeep Singh4243e622021-04-20 16:10:22 +05301197 cm->seq_params->enable_masked_compound;
Cheng Chen5a881722017-11-30 17:05:10 -08001198
Zoe Liu5f11e912017-12-05 23:23:56 -08001199 if (masked_compound_used) {
Cheng Chen5a881722017-11-30 17:05:10 -08001200 const int ctx_comp_group_idx = get_comp_group_idx_context(xd);
1201 aom_write_symbol(w, mbmi->comp_group_idx,
1202 ec_ctx->comp_group_idx_cdf[ctx_comp_group_idx], 2);
Zoe Liu5f11e912017-12-05 23:23:56 -08001203 } else {
1204 assert(mbmi->comp_group_idx == 0);
Cheng Chen5a881722017-11-30 17:05:10 -08001205 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001206
1207 if (mbmi->comp_group_idx == 0) {
1208 if (mbmi->compound_idx)
Peng Bin42b77702018-06-01 11:09:54 +08001209 assert(mbmi->interinter_comp.type == COMPOUND_AVERAGE);
Cheng Chen33a13d92017-11-28 16:49:59 -08001210
Tarundeep Singh4243e622021-04-20 16:10:22 +05301211 if (cm->seq_params->order_hint_info.enable_dist_wtd_comp) {
David Barkere21f4d92018-02-26 16:37:24 +00001212 const int comp_index_ctx = get_comp_index_context(cm, xd);
1213 aom_write_symbol(w, mbmi->compound_idx,
1214 ec_ctx->compound_index_cdf[comp_index_ctx], 2);
1215 } else {
1216 assert(mbmi->compound_idx == 1);
1217 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001218 } else {
David Turnerd2a592e2018-11-16 14:59:31 +00001219 assert(cpi->common.current_frame.reference_mode != SINGLE_REFERENCE &&
Zoe Liu5f11e912017-12-05 23:23:56 -08001220 is_inter_compound_mode(mbmi->mode) &&
1221 mbmi->motion_mode == SIMPLE_TRANSLATION);
1222 assert(masked_compound_used);
Sarah Parker5b4df2b2018-04-02 14:48:25 -07001223 // compound_diffwtd, wedge
Peng Bin42b77702018-06-01 11:09:54 +08001224 assert(mbmi->interinter_comp.type == COMPOUND_WEDGE ||
1225 mbmi->interinter_comp.type == COMPOUND_DIFFWTD);
Cheng Chen33a13d92017-11-28 16:49:59 -08001226
Zoe Liu5f11e912017-12-05 23:23:56 -08001227 if (is_interinter_compound_used(COMPOUND_WEDGE, bsize))
Debargha Mukherjee54eabb52019-02-01 16:54:33 -08001228 aom_write_symbol(w, mbmi->interinter_comp.type - COMPOUND_WEDGE,
Zoe Liu5f11e912017-12-05 23:23:56 -08001229 ec_ctx->compound_type_cdf[bsize],
Debargha Mukherjee54eabb52019-02-01 16:54:33 -08001230 MASKED_COMPOUND_TYPES);
Zoe Liu5f11e912017-12-05 23:23:56 -08001231
Peng Bin42b77702018-06-01 11:09:54 +08001232 if (mbmi->interinter_comp.type == COMPOUND_WEDGE) {
Zoe Liu5f11e912017-12-05 23:23:56 -08001233 assert(is_interinter_compound_used(COMPOUND_WEDGE, bsize));
Peng Bin42b77702018-06-01 11:09:54 +08001234 aom_write_symbol(w, mbmi->interinter_comp.wedge_index,
Hui Suc88f9742019-12-23 13:46:54 -08001235 ec_ctx->wedge_idx_cdf[bsize], MAX_WEDGE_TYPES);
Peng Bin42b77702018-06-01 11:09:54 +08001236 aom_write_bit(w, mbmi->interinter_comp.wedge_sign);
Zoe Liu5f11e912017-12-05 23:23:56 -08001237 } else {
Peng Bin42b77702018-06-01 11:09:54 +08001238 assert(mbmi->interinter_comp.type == COMPOUND_DIFFWTD);
1239 aom_write_literal(w, mbmi->interinter_comp.mask_type,
1240 MAX_DIFFWTD_MASK_BITS);
Cheng Chen33a13d92017-11-28 16:49:59 -08001241 }
1242 }
1243 }
Cherma Rajan Ad0f59a72021-04-13 12:43:54 +05301244 write_mb_interp_filter(cm, td, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001245 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001246}
1247
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001248static AOM_INLINE void write_intrabc_info(
1249 MACROBLOCKD *xd, const MB_MODE_INFO_EXT_FRAME *mbmi_ext_frame,
1250 aom_writer *w) {
Yue Chen53b53f02018-03-29 14:31:23 -07001251 const MB_MODE_INFO *const mbmi = xd->mi[0];
Hui Suc2232cf2017-10-11 17:32:56 -07001252 int use_intrabc = is_intrabc_block(mbmi);
1253 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1254 aom_write_symbol(w, use_intrabc, ec_ctx->intrabc_cdf, 2);
1255 if (use_intrabc) {
1256 assert(mbmi->mode == DC_PRED);
1257 assert(mbmi->uv_mode == UV_DC_PRED);
Hui Su1fbe32a2018-02-26 21:44:54 -08001258 assert(mbmi->motion_mode == SIMPLE_TRANSLATION);
Remya0cce44c2019-08-16 11:57:24 +05301259 int_mv dv_ref = mbmi_ext_frame->ref_mv_stack[0].this_mv;
Hui Suc2232cf2017-10-11 17:32:56 -07001260 av1_encode_dv(w, &mbmi->mv[0].as_mv, &dv_ref.as_mv, &ec_ctx->ndvc);
Hui Suc2232cf2017-10-11 17:32:56 -07001261 }
1262}
Hui Suc2232cf2017-10-11 17:32:56 -07001263
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001264static AOM_INLINE void write_mb_modes_kf(
1265 AV1_COMP *cpi, MACROBLOCKD *xd,
Hui Sud62a63a2020-02-27 16:59:54 -08001266 const MB_MODE_INFO_EXT_FRAME *mbmi_ext_frame, aom_writer *w) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001267 AV1_COMMON *const cm = &cpi->common;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001268 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001269 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001270 struct segmentation_probs *const segp = &ec_ctx->seg;
Yue Chen53b53f02018-03-29 14:31:23 -07001271 const MB_MODE_INFO *const mbmi = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001272
Wan-Teh Changd1b9dcf2018-06-06 10:45:18 -07001273 if (seg->segid_preskip && seg->update_map)
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301274 write_segment_id(cpi, xd, mbmi, w, seg, segp, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001275
Yue Chen53b53f02018-03-29 14:31:23 -07001276 const int skip = write_skip(cm, xd, mbmi->segment_id, mbmi, w);
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001277
Wan-Teh Changd1b9dcf2018-06-06 10:45:18 -07001278 if (!seg->segid_preskip && seg->update_map)
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301279 write_segment_id(cpi, xd, mbmi, w, seg, segp, skip);
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001280
Hui Sud62a63a2020-02-27 16:59:54 -08001281 write_cdef(cm, xd, w, skip);
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001282
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301283 write_delta_q_params(cm, xd, skip, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001284
Hui Sueb2fd5c2017-12-15 14:38:01 -08001285 if (av1_allow_intrabc(cm)) {
Remya0cce44c2019-08-16 11:57:24 +05301286 write_intrabc_info(xd, mbmi_ext_frame, w);
Hui Suc2232cf2017-10-11 17:32:56 -07001287 if (is_intrabc_block(mbmi)) return;
Alex Converse28744302017-04-13 14:46:22 -07001288 }
Hui Suc2232cf2017-10-11 17:32:56 -07001289
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301290 write_intra_prediction_modes(cm, xd, 1, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001291}
1292
Angie Chiangd4022822016-11-02 18:30:25 -07001293#if CONFIG_RD_DEBUG
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001294static AOM_INLINE void dump_mode_info(MB_MODE_INFO *mi) {
Yue Chen53b53f02018-03-29 14:31:23 -07001295 printf("\nmi->mi_row == %d\n", mi->mi_row);
1296 printf("&& mi->mi_col == %d\n", mi->mi_col);
chiyotsai0f5cd052020-08-27 14:37:44 -07001297 printf("&& mi->bsize == %d\n", mi->bsize);
Yue Chen53b53f02018-03-29 14:31:23 -07001298 printf("&& mi->tx_size == %d\n", mi->tx_size);
1299 printf("&& mi->mode == %d\n", mi->mode);
Angie Chiangd4022822016-11-02 18:30:25 -07001300}
Debargha Mukherjee514b52b2019-01-02 16:34:28 -08001301
Angie Chiangd02001d2016-11-06 15:31:49 -08001302static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats,
1303 int plane) {
1304 if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) {
Angie Chiangd02001d2016-11-06 15:31:49 -08001305 printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n",
1306 plane, rd_stats->txb_coeff_cost[plane], token_stats->cost);
Angie Chiangd02001d2016-11-06 15:31:49 -08001307 return 1;
1308 }
1309 return 0;
1310}
Angie Chiangd4022822016-11-02 18:30:25 -07001311#endif
1312
Di Chen56586622017-06-09 13:49:44 -07001313#if ENC_MISMATCH_DEBUG
Visheshd1317912020-04-07 14:39:44 +05301314static AOM_INLINE void enc_dump_logs(
1315 const AV1_COMMON *const cm,
1316 const MBMIExtFrameBufferInfo *const mbmi_ext_info, int mi_row, int mi_col) {
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001317 const MB_MODE_INFO *const mbmi = *(
1318 cm->mi_params.mi_grid_base + (mi_row * cm->mi_params.mi_stride + mi_col));
Visheshd1317912020-04-07 14:39:44 +05301319 const MB_MODE_INFO_EXT_FRAME *const mbmi_ext_frame =
1320 mbmi_ext_info->frame_base + get_mi_ext_idx(mi_row, mi_col,
1321 cm->mi_params.mi_alloc_bsize,
1322 mbmi_ext_info->stride);
Yue Chen53b53f02018-03-29 14:31:23 -07001323 if (is_inter_block(mbmi)) {
Zoe Liuf40a9572017-10-13 12:37:19 -07001324#define FRAME_TO_CHECK 11
David Turnerd2a592e2018-11-16 14:59:31 +00001325 if (cm->current_frame.frame_number == FRAME_TO_CHECK &&
1326 cm->show_frame == 1) {
chiyotsai0f5cd052020-08-27 14:37:44 -07001327 const BLOCK_SIZE bsize = mbmi->bsize;
Di Chen56586622017-06-09 13:49:44 -07001328
David Turner760a2f42018-12-07 15:25:36 +00001329 int_mv mv[2] = { 0 };
1330 const int is_comp_ref = has_second_ref(mbmi);
Di Chen56586622017-06-09 13:49:44 -07001331
David Turner760a2f42018-12-07 15:25:36 +00001332 for (int ref = 0; ref < 1 + is_comp_ref; ++ref)
Yue Chen53b53f02018-03-29 14:31:23 -07001333 mv[ref].as_mv = mbmi->mv[ref].as_mv;
Di Chen56586622017-06-09 13:49:44 -07001334
1335 if (!is_comp_ref) {
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001336 mv[1].as_int = 0;
Di Chen56586622017-06-09 13:49:44 -07001337 }
Di Chen56586622017-06-09 13:49:44 -07001338
Zoe Liuf40a9572017-10-13 12:37:19 -07001339 const int16_t mode_ctx =
Remya0cce44c2019-08-16 11:57:24 +05301340 is_comp_ref ? 0
1341 : mode_context_analyzer(mbmi_ext_frame->mode_context,
1342 mbmi->ref_frame);
Zoe Liuf40a9572017-10-13 12:37:19 -07001343
Di Chen56586622017-06-09 13:49:44 -07001344 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
1345 int16_t zeromv_ctx = -1;
1346 int16_t refmv_ctx = -1;
Zoe Liuf40a9572017-10-13 12:37:19 -07001347
Di Chen56586622017-06-09 13:49:44 -07001348 if (mbmi->mode != NEWMV) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -07001349 zeromv_ctx = (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Jingning Han59b12632018-02-12 10:44:52 -08001350 if (mbmi->mode != GLOBALMV)
Di Chen56586622017-06-09 13:49:44 -07001351 refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Di Chen56586622017-06-09 13:49:44 -07001352 }
1353
Zoe Liuf40a9572017-10-13 12:37:19 -07001354 printf(
1355 "=== ENCODER ===: "
1356 "Frame=%d, (mi_row,mi_col)=(%d,%d), skip_mode=%d, mode=%d, bsize=%d, "
1357 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
1358 "ref[1]=%d, motion_mode=%d, mode_ctx=%d, "
1359 "newmv_ctx=%d, zeromv_ctx=%d, refmv_ctx=%d, tx_size=%d\n",
David Turnerd2a592e2018-11-16 14:59:31 +00001360 cm->current_frame.frame_number, mi_row, mi_col, mbmi->skip_mode,
1361 mbmi->mode, bsize, cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col,
Zoe Liuf40a9572017-10-13 12:37:19 -07001362 mv[1].as_mv.row, mv[1].as_mv.col, mbmi->ref_frame[0],
1363 mbmi->ref_frame[1], mbmi->motion_mode, mode_ctx, newmv_ctx,
1364 zeromv_ctx, refmv_ctx, mbmi->tx_size);
Di Chen56586622017-06-09 13:49:44 -07001365 }
1366 }
1367}
1368#endif // ENC_MISMATCH_DEBUG
1369
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301370static AOM_INLINE void write_mbmi_b(AV1_COMP *cpi, ThreadData *const td,
1371 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001372 AV1_COMMON *const cm = &cpi->common;
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301373 MACROBLOCKD *const xd = &td->mb.e_mbd;
Yue Chen53b53f02018-03-29 14:31:23 -07001374 MB_MODE_INFO *m = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001375
Yaowu Xuc27fc142016-08-22 16:08:15 -07001376 if (frame_is_intra_only(cm)) {
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301377 write_mb_modes_kf(cpi, xd, td->mb.mbmi_ext_frame, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001378 } else {
Angie Chiang38edf682017-02-21 15:13:09 -08001379 // has_subpel_mv_component needs the ref frame buffers set up to look
1380 // up if they are scaled. has_subpel_mv_component is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07001381 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
Yue Chen53b53f02018-03-29 14:31:23 -07001382 set_ref_ptrs(cm, xd, m->ref_frame[0], m->ref_frame[1]);
Zoe Liu85b66462017-04-20 14:28:19 -07001383
Di Chen56586622017-06-09 13:49:44 -07001384#if ENC_MISMATCH_DEBUG
Visheshd1317912020-04-07 14:39:44 +05301385 enc_dump_logs(cm, &cpi->mbmi_ext_info, xd->mi_row, xd->mi_col);
Di Chen56586622017-06-09 13:49:44 -07001386#endif // ENC_MISMATCH_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001387
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301388 pack_inter_mode_mvs(cpi, td, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001389 }
Yue Chen64550b62017-01-12 12:18:22 -08001390}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001391
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001392static AOM_INLINE void write_inter_txb_coeff(
1393 AV1_COMMON *const cm, MACROBLOCK *const x, MB_MODE_INFO *const mbmi,
Vishesh686aa772020-04-13 14:40:12 +05301394 aom_writer *w, const TokenExtra **tok, const TokenExtra *const tok_end,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001395 TOKEN_STATS *token_stats, const int row, const int col, int *block,
1396 const int plane) {
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001397 MACROBLOCKD *const xd = &x->e_mbd;
1398 const struct macroblockd_plane *const pd = &xd->plane[plane];
chiyotsai0f5cd052020-08-27 14:37:44 -07001399 const BLOCK_SIZE bsize = mbmi->bsize;
kyslove7ff3b62019-04-05 14:15:03 -07001400 assert(bsize < BLOCK_SIZES_ALL);
Hui Su56e838f2019-12-04 14:48:30 -08001401 const int ss_x = pd->subsampling_x;
1402 const int ss_y = pd->subsampling_y;
Hui Suaadb0b42019-12-06 15:56:47 -08001403 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, ss_x, ss_y);
Hui Su56e838f2019-12-04 14:48:30 -08001404 assert(plane_bsize < BLOCK_SIZES_ALL);
Urvang Joshi49c57d62018-05-03 11:37:38 -07001405 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, plane_bsize, plane);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001406 const int step =
1407 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
1408 const int bkw = tx_size_wide_unit[max_tx_size];
1409 const int bkh = tx_size_high_unit[max_tx_size];
Cheng Chen8ab1f442018-04-27 18:01:52 -07001410 const BLOCK_SIZE max_unit_bsize =
Hui Su56e838f2019-12-04 14:48:30 -08001411 get_plane_block_size(BLOCK_64X64, ss_x, ss_y);
Hui Suadda5872019-12-09 10:08:49 -08001412 const int num_4x4_w = mi_size_wide[plane_bsize];
1413 const int num_4x4_h = mi_size_high[plane_bsize];
1414 const int mu_blocks_wide = mi_size_wide[max_unit_bsize];
1415 const int mu_blocks_high = mi_size_high[max_unit_bsize];
Hui Su56e838f2019-12-04 14:48:30 -08001416 const int unit_height = AOMMIN(mu_blocks_high + (row >> ss_y), num_4x4_h);
1417 const int unit_width = AOMMIN(mu_blocks_wide + (col >> ss_x), num_4x4_w);
1418 for (int blk_row = row >> ss_y; blk_row < unit_height; blk_row += bkh) {
1419 for (int blk_col = col >> ss_x; blk_col < unit_width; blk_col += bkw) {
Sebastien Alaiwancad5ebc2018-02-20 16:18:20 +01001420 pack_txb_tokens(w, cm, x, tok, tok_end, xd, mbmi, plane, plane_bsize,
Tarundeep Singh4243e622021-04-20 16:10:22 +05301421 cm->seq_params->bit_depth, *block, blk_row, blk_col,
Urvang Joshi20cf30e2018-07-19 02:33:58 -07001422 max_tx_size, token_stats);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001423 *block += step;
1424 }
1425 }
1426}
1427
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301428static AOM_INLINE void write_tokens_b(AV1_COMP *cpi, MACROBLOCK *const x,
1429 aom_writer *w, const TokenExtra **tok,
Vishesh686aa772020-04-13 14:40:12 +05301430 const TokenExtra *const tok_end) {
Yue Chen64550b62017-01-12 12:18:22 -08001431 AV1_COMMON *const cm = &cpi->common;
Hui Suc6b79f72019-04-17 16:44:33 -07001432 MACROBLOCKD *const xd = &x->e_mbd;
1433 MB_MODE_INFO *const mbmi = xd->mi[0];
chiyotsai0f5cd052020-08-27 14:37:44 -07001434 const BLOCK_SIZE bsize = mbmi->bsize;
Yue Chen64550b62017-01-12 12:18:22 -08001435
chiyotsai8c004e12020-04-17 15:52:08 -07001436 assert(!mbmi->skip_txfm);
Yue Chen64550b62017-01-12 12:18:22 -08001437
Hui Suc6b79f72019-04-17 16:44:33 -07001438 const int is_inter = is_inter_block(mbmi);
1439 if (!is_inter) {
angiebird320e2282020-06-29 10:34:43 -07001440 av1_write_intra_coeffs_mb(cm, x, w, bsize);
Hui Suc6b79f72019-04-17 16:44:33 -07001441 } else {
1442 int block[MAX_MB_PLANE] = { 0 };
1443 assert(bsize == get_plane_block_size(bsize, xd->plane[0].subsampling_x,
1444 xd->plane[0].subsampling_y));
Hui Suadda5872019-12-09 10:08:49 -08001445 const int num_4x4_w = mi_size_wide[bsize];
1446 const int num_4x4_h = mi_size_high[bsize];
Hui Suc6b79f72019-04-17 16:44:33 -07001447 TOKEN_STATS token_stats;
1448 init_token_stats(&token_stats);
Yue Chen64550b62017-01-12 12:18:22 -08001449
Hui Suc6b79f72019-04-17 16:44:33 -07001450 const BLOCK_SIZE max_unit_bsize = BLOCK_64X64;
1451 assert(max_unit_bsize == get_plane_block_size(BLOCK_64X64,
1452 xd->plane[0].subsampling_x,
1453 xd->plane[0].subsampling_y));
Hui Suadda5872019-12-09 10:08:49 -08001454 int mu_blocks_wide = mi_size_wide[max_unit_bsize];
1455 int mu_blocks_high = mi_size_high[max_unit_bsize];
Hui Suc6b79f72019-04-17 16:44:33 -07001456 mu_blocks_wide = AOMMIN(num_4x4_w, mu_blocks_wide);
1457 mu_blocks_high = AOMMIN(num_4x4_h, mu_blocks_high);
Jingning Hanad54a982018-01-12 14:40:29 -08001458
Hui Suc6b79f72019-04-17 16:44:33 -07001459 const int num_planes = av1_num_planes(cm);
1460 for (int row = 0; row < num_4x4_h; row += mu_blocks_high) {
1461 for (int col = 0; col < num_4x4_w; col += mu_blocks_wide) {
1462 for (int plane = 0; plane < num_planes; ++plane) {
Hui Su474e1e12020-02-27 15:46:36 -08001463 if (plane && !xd->is_chroma_ref) break;
Hui Suc6b79f72019-04-17 16:44:33 -07001464 write_inter_txb_coeff(cm, x, mbmi, w, tok, tok_end, &token_stats, row,
1465 col, &block[plane], plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001466 }
Debargha Mukherjee514b52b2019-01-02 16:34:28 -08001467 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001468 }
Hui Suc6b79f72019-04-17 16:44:33 -07001469#if CONFIG_RD_DEBUG
1470 for (int plane = 0; plane < num_planes; ++plane) {
chiyotsai0f5cd052020-08-27 14:37:44 -07001471 if (mbmi->bsize >= BLOCK_8X8 &&
Hui Suc6b79f72019-04-17 16:44:33 -07001472 rd_token_stats_mismatch(&mbmi->rd_stats, &token_stats, plane)) {
1473 dump_mode_info(mbmi);
1474 assert(0);
1475 }
1476 }
1477#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001478 }
1479}
1480
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301481static AOM_INLINE void write_modes_b(AV1_COMP *cpi, ThreadData *const td,
1482 const TileInfo *const tile, aom_writer *w,
1483 const TokenExtra **tok,
Vishesh686aa772020-04-13 14:40:12 +05301484 const TokenExtra *const tok_end,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001485 int mi_row, int mi_col) {
Hui Su0e48db92019-04-17 15:55:57 -07001486 const AV1_COMMON *cm = &cpi->common;
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001487 const CommonModeInfoParams *const mi_params = &cm->mi_params;
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301488 MACROBLOCKD *xd = &td->mb.e_mbd;
Jayasanker J4969e722020-09-18 20:36:15 +05301489 FRAME_CONTEXT *tile_ctx = xd->tile_ctx;
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001490 const int grid_idx = mi_row * mi_params->mi_stride + mi_col;
1491 xd->mi = mi_params->mi_grid_base + grid_idx;
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301492 td->mb.mbmi_ext_frame =
Visheshd1317912020-04-07 14:39:44 +05301493 cpi->mbmi_ext_info.frame_base +
1494 get_mi_ext_idx(mi_row, mi_col, cm->mi_params.mi_alloc_bsize,
1495 cpi->mbmi_ext_info.stride);
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001496 xd->tx_type_map = mi_params->tx_type_map + grid_idx;
1497 xd->tx_type_map_stride = mi_params->mi_stride;
Hui Suc6b79f72019-04-17 16:44:33 -07001498
Hui Su0e48db92019-04-17 15:55:57 -07001499 const MB_MODE_INFO *mbmi = xd->mi[0];
chiyotsai0f5cd052020-08-27 14:37:44 -07001500 const BLOCK_SIZE bsize = mbmi->bsize;
Tarundeep Singh4243e622021-04-20 16:10:22 +05301501 assert(bsize <= cm->seq_params->sb_size ||
Hui Suc6b79f72019-04-17 16:44:33 -07001502 (bsize >= BLOCK_SIZES && bsize < BLOCK_SIZES_ALL));
1503
1504 const int bh = mi_size_high[bsize];
1505 const int bw = mi_size_wide[bsize];
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001506 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, mi_params->mi_rows,
1507 mi_params->mi_cols);
Hui Suc6b79f72019-04-17 16:44:33 -07001508
Urvang Joshi5c8625a2020-03-30 13:16:37 -07001509 xd->above_txfm_context = cm->above_contexts.txfm[tile->tile_row] + mi_col;
Hui Suc6b79f72019-04-17 16:44:33 -07001510 xd->left_txfm_context =
1511 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
1512
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301513 write_mbmi_b(cpi, td, w);
Hui Suc6b79f72019-04-17 16:44:33 -07001514
Jingning Han088217b2018-02-23 21:55:21 -08001515 for (int plane = 0; plane < AOMMIN(2, av1_num_planes(cm)); ++plane) {
1516 const uint8_t palette_size_plane =
1517 mbmi->palette_mode_info.palette_size[plane];
Jingning Han088217b2018-02-23 21:55:21 -08001518 assert(!mbmi->skip_mode || !palette_size_plane);
Jingning Han088217b2018-02-23 21:55:21 -08001519 if (palette_size_plane > 0) {
1520 assert(mbmi->use_intrabc == 0);
Urvang Joshib6409e92020-03-23 11:23:27 -07001521 assert(av1_allow_palette(cm->features.allow_screen_content_tools,
chiyotsai0f5cd052020-08-27 14:37:44 -07001522 mbmi->bsize));
Hui Sua0ea2b82020-03-02 14:47:31 -08001523 assert(!plane || xd->is_chroma_ref);
Jingning Han088217b2018-02-23 21:55:21 -08001524 int rows, cols;
chiyotsai0f5cd052020-08-27 14:37:44 -07001525 av1_get_block_dimensions(mbmi->bsize, plane, xd, NULL, NULL, &rows,
Jingning Han088217b2018-02-23 21:55:21 -08001526 &cols);
1527 assert(*tok < tok_end);
Jayasanker J4969e722020-09-18 20:36:15 +05301528 MapCdf map_pb_cdf = plane ? tile_ctx->palette_uv_color_index_cdf
1529 : tile_ctx->palette_y_color_index_cdf;
1530 pack_map_tokens(w, tok, palette_size_plane, rows * cols, map_pb_cdf);
Jingning Han088217b2018-02-23 21:55:21 -08001531 }
1532 }
1533
Hui Su0e48db92019-04-17 15:55:57 -07001534 const int is_inter_tx = is_inter_block(mbmi);
chiyotsai8c004e12020-04-17 15:52:08 -07001535 const int skip_txfm = mbmi->skip_txfm;
chiyotsaid730cef2022-10-26 13:58:20 -07001536 const uint8_t segment_id = mbmi->segment_id;
Urvang Joshi6237b882020-03-26 15:02:26 -07001537 if (cm->features.tx_mode == TX_MODE_SELECT && block_signals_txsize(bsize) &&
chiyotsai8c004e12020-04-17 15:52:08 -07001538 !(is_inter_tx && skip_txfm) && !xd->lossless[segment_id]) {
Jingning Han088217b2018-02-23 21:55:21 -08001539 if (is_inter_tx) { // This implies skip flag is 0.
1540 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, bsize, 0);
1541 const int txbh = tx_size_high_unit[max_tx_size];
1542 const int txbw = tx_size_wide_unit[max_tx_size];
Hui Suadda5872019-12-09 10:08:49 -08001543 const int width = mi_size_wide[bsize];
1544 const int height = mi_size_high[bsize];
Hui Su0e48db92019-04-17 15:55:57 -07001545 for (int idy = 0; idy < height; idy += txbh) {
1546 for (int idx = 0; idx < width; idx += txbw) {
Yaowu Xu6567c4a2018-03-19 21:39:31 -07001547 write_tx_size_vartx(xd, mbmi, max_tx_size, 0, idy, idx, w);
Hui Su0e48db92019-04-17 15:55:57 -07001548 }
1549 }
Jingning Han088217b2018-02-23 21:55:21 -08001550 } else {
Yaowu Xu6567c4a2018-03-19 21:39:31 -07001551 write_selected_tx_size(xd, w);
Urvang Joshi33930b82020-04-06 12:41:27 -07001552 set_txfm_ctxs(mbmi->tx_size, xd->width, xd->height, 0, xd);
Jingning Han088217b2018-02-23 21:55:21 -08001553 }
1554 } else {
chiyotsai8c004e12020-04-17 15:52:08 -07001555 set_txfm_ctxs(mbmi->tx_size, xd->width, xd->height,
1556 skip_txfm && is_inter_tx, xd);
Jingning Han088217b2018-02-23 21:55:21 -08001557 }
1558
chiyotsai8c004e12020-04-17 15:52:08 -07001559 if (!mbmi->skip_txfm) {
Cheng Chen8e74aaf2020-08-24 21:07:12 -07001560 int start = aom_tell_size(w);
1561
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301562 write_tokens_b(cpi, &td->mb, w, tok, tok_end);
Cheng Chen8e74aaf2020-08-24 21:07:12 -07001563
1564 const int end = aom_tell_size(w);
Cherma Rajan Ad0f59a72021-04-13 12:43:54 +05301565 td->coefficient_size += end - start;
Hui Suc6b79f72019-04-17 16:44:33 -07001566 }
Yue Chen64550b62017-01-12 12:18:22 -08001567}
1568
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001569static AOM_INLINE void write_partition(const AV1_COMMON *const cm,
1570 const MACROBLOCKD *const xd, int hbs,
1571 int mi_row, int mi_col, PARTITION_TYPE p,
1572 BLOCK_SIZE bsize, aom_writer *w) {
Alex Converse55c6bde2017-01-12 15:55:31 -08001573 const int is_partition_point = bsize >= BLOCK_8X8;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00001574
Jingning Hanbf9c6b72016-12-14 14:50:45 -08001575 if (!is_partition_point) return;
1576
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001577 const int has_rows = (mi_row + hbs) < cm->mi_params.mi_rows;
1578 const int has_cols = (mi_col + hbs) < cm->mi_params.mi_cols;
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001579 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
1580 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1581
1582 if (!has_rows && !has_cols) {
1583 assert(p == PARTITION_SPLIT);
1584 return;
1585 }
1586
Yaowu Xuc27fc142016-08-22 16:08:15 -07001587 if (has_rows && has_cols) {
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001588 aom_write_symbol(w, p, ec_ctx->partition_cdf[ctx],
1589 partition_cdf_length(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001590 } else if (!has_rows && has_cols) {
1591 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001592 assert(bsize > BLOCK_8X8);
1593 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001594 partition_gather_vert_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001595 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001596 } else {
1597 assert(has_rows && !has_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001598 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001599 assert(bsize > BLOCK_8X8);
1600 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001601 partition_gather_horz_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001602 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001603 }
1604}
1605
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001606static AOM_INLINE void write_modes_sb(
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301607 AV1_COMP *const cpi, ThreadData *const td, const TileInfo *const tile,
1608 aom_writer *const w, const TokenExtra **tok,
1609 const TokenExtra *const tok_end, int mi_row, int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001610 const AV1_COMMON *const cm = &cpi->common;
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001611 const CommonModeInfoParams *const mi_params = &cm->mi_params;
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301612 MACROBLOCKD *const xd = &td->mb.e_mbd;
kyslov5859dca2019-04-08 12:13:11 -07001613 assert(bsize < BLOCK_SIZES_ALL);
Jingning Hanc709e1f2016-12-06 14:48:09 -08001614 const int hbs = mi_size_wide[bsize] / 2;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001615 const int quarter_step = mi_size_wide[bsize] / 4;
1616 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001617 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
Cheng Chen82b4fa12018-05-02 18:43:17 -07001618 const BLOCK_SIZE subsize = get_partition_subsize(bsize, partition);
Jingning Han52261842016-12-14 12:17:49 -08001619
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001620 if (mi_row >= mi_params->mi_rows || mi_col >= mi_params->mi_cols) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001621
Jerome Jiang3dd9df92020-10-29 16:42:33 -07001622#if !CONFIG_REALTIME_ONLY
Debargha Mukherjeea78c8f52018-01-31 11:14:38 -08001623 const int num_planes = av1_num_planes(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001624 for (int plane = 0; plane < num_planes; ++plane) {
Cheng Chen55c44ce2018-06-26 12:34:24 -07001625 int rcol0, rcol1, rrow0, rrow1;
Rachel Barker2b98f1b2023-07-06 19:39:45 +00001626
1627 // Skip some unnecessary work if loop restoration is disabled
1628 if (cm->rst_info[plane].frame_restoration_type == RESTORE_NONE) continue;
1629
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001630 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
Cheng Chen55c44ce2018-06-26 12:34:24 -07001631 &rcol0, &rcol1, &rrow0, &rrow1)) {
Rachel Barker0483cbe2023-07-05 23:31:38 +00001632 const int rstride = cm->rst_info[plane].horz_units;
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001633 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
1634 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
Cheng Chen55c44ce2018-06-26 12:34:24 -07001635 const int runit_idx = rcol + rrow * rstride;
Rachel Barkerfe93d332023-07-10 10:01:15 +00001636 loop_restoration_write_sb_coeffs(cm, xd, runit_idx, w, plane,
1637 td->counts);
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001638 }
1639 }
1640 }
1641 }
Jerome Jiang3dd9df92020-10-29 16:42:33 -07001642#endif
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001643
Yaowu Xuc27fc142016-08-22 16:08:15 -07001644 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001645 switch (partition) {
1646 case PARTITION_NONE:
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301647 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001648 break;
1649 case PARTITION_HORZ:
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301650 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001651 if (mi_row + hbs < mi_params->mi_rows)
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301652 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001653 break;
1654 case PARTITION_VERT:
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301655 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001656 if (mi_col + hbs < mi_params->mi_cols)
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301657 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col + hbs);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001658 break;
1659 case PARTITION_SPLIT:
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301660 write_modes_sb(cpi, td, tile, w, tok, tok_end, mi_row, mi_col, subsize);
1661 write_modes_sb(cpi, td, tile, w, tok, tok_end, mi_row, mi_col + hbs,
1662 subsize);
1663 write_modes_sb(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col,
1664 subsize);
1665 write_modes_sb(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs,
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001666 subsize);
1667 break;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001668 case PARTITION_HORZ_A:
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301669 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
1670 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1671 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001672 break;
1673 case PARTITION_HORZ_B:
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301674 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
1675 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1676 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001677 break;
1678 case PARTITION_VERT_A:
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301679 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
1680 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1681 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col + hbs);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001682 break;
1683 case PARTITION_VERT_B:
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301684 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col);
1685 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1686 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001687 break;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001688 case PARTITION_HORZ_4:
1689 for (i = 0; i < 4; ++i) {
1690 int this_mi_row = mi_row + i * quarter_step;
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001691 if (i > 0 && this_mi_row >= mi_params->mi_rows) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001692
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301693 write_modes_b(cpi, td, tile, w, tok, tok_end, this_mi_row, mi_col);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001694 }
1695 break;
1696 case PARTITION_VERT_4:
1697 for (i = 0; i < 4; ++i) {
1698 int this_mi_col = mi_col + i * quarter_step;
Urvang Joshi9dc909d2020-03-23 16:07:02 -07001699 if (i > 0 && this_mi_col >= mi_params->mi_cols) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001700
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301701 write_modes_b(cpi, td, tile, w, tok, tok_end, mi_row, this_mi_col);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001702 }
1703 break;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001704 default: assert(0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001705 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001706
Debargha Mukherjee16870852018-02-28 10:00:17 -08001707 // update partition context
Yaowu Xuc27fc142016-08-22 16:08:15 -07001708 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001709}
1710
Jayasanker J92cca7a2021-10-27 15:05:52 +05301711// Populate token pointers appropriately based on token_info.
1712static AOM_INLINE void get_token_pointers(const TokenInfo *token_info,
1713 const int tile_row, int tile_col,
1714 const int sb_row_in_tile,
1715 const TokenExtra **tok,
1716 const TokenExtra **tok_end) {
1717 if (!is_token_info_allocated(token_info)) {
1718 *tok = NULL;
1719 *tok_end = NULL;
1720 return;
1721 }
1722 *tok = token_info->tplist[tile_row][tile_col][sb_row_in_tile].start;
1723 *tok_end =
1724 *tok + token_info->tplist[tile_row][tile_col][sb_row_in_tile].count;
1725}
1726
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301727static AOM_INLINE void write_modes(AV1_COMP *const cpi, ThreadData *const td,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001728 const TileInfo *const tile,
1729 aom_writer *const w, int tile_row,
1730 int tile_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001731 AV1_COMMON *const cm = &cpi->common;
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301732 MACROBLOCKD *const xd = &td->mb.e_mbd;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001733 const int mi_row_start = tile->mi_row_start;
1734 const int mi_row_end = tile->mi_row_end;
1735 const int mi_col_start = tile->mi_col_start;
1736 const int mi_col_end = tile->mi_col_end;
Urvang Joshi5c8625a2020-03-30 13:16:37 -07001737 const int num_planes = av1_num_planes(cm);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001738
Yunqing Wang6c509632018-07-03 12:38:33 -07001739 av1_zero_above_context(cm, xd, mi_col_start, mi_col_end, tile->tile_row);
Urvang Joshi5c8625a2020-03-30 13:16:37 -07001740 av1_init_above_context(&cm->above_contexts, num_planes, tile->tile_row, xd);
Cherma Rajan A71d20db2018-04-27 11:15:32 +05301741
David Turnerebf96f42018-11-14 16:57:57 +00001742 if (cpi->common.delta_q_info.delta_q_present_flag) {
Urvang Joshiacad1ca2020-04-27 17:03:25 -07001743 xd->current_base_qindex = cpi->common.quant_params.base_qindex;
David Turnerebf96f42018-11-14 16:57:57 +00001744 if (cpi->common.delta_q_info.delta_lf_present_flag) {
Urvang Joshi5c8625a2020-03-30 13:16:37 -07001745 av1_reset_loop_filter_delta(xd, num_planes);
Fangwen Fu231fe422017-04-24 17:52:29 -07001746 }
Arild Fuldseth07441162016-08-15 15:07:52 +02001747 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001748
Urvang Joshi5c8625a2020-03-30 13:16:37 -07001749 for (int mi_row = mi_row_start; mi_row < mi_row_end;
Tarundeep Singh4243e622021-04-20 16:10:22 +05301750 mi_row += cm->seq_params->mib_size) {
Urvang Joshi5c8625a2020-03-30 13:16:37 -07001751 const int sb_row_in_tile =
Tarundeep Singh4243e622021-04-20 16:10:22 +05301752 (mi_row - tile->mi_row_start) >> cm->seq_params->mib_size_log2;
Jayasanker J92cca7a2021-10-27 15:05:52 +05301753 const TokenInfo *token_info = &cpi->token_info;
1754 const TokenExtra *tok;
1755 const TokenExtra *tok_end;
1756 get_token_pointers(token_info, tile_row, tile_col, sb_row_in_tile, &tok,
1757 &tok_end);
Ravi Chaudhary73cf15b2018-08-30 10:52:51 +05301758
Yaowu Xuf883b422016-08-30 14:01:10 -07001759 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001760
Urvang Joshi5c8625a2020-03-30 13:16:37 -07001761 for (int mi_col = mi_col_start; mi_col < mi_col_end;
Tarundeep Singh4243e622021-04-20 16:10:22 +05301762 mi_col += cm->seq_params->mib_size) {
Cherma Rajan A8ba48242021-04-12 18:13:01 +05301763 td->mb.cb_coef_buff = av1_get_cb_coeff_buffer(cpi, mi_row, mi_col);
1764 write_modes_sb(cpi, td, tile, w, &tok, tok_end, mi_row, mi_col,
Tarundeep Singh4243e622021-04-20 16:10:22 +05301765 cm->seq_params->sb_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001766 }
Vishesh686aa772020-04-13 14:40:12 +05301767 assert(tok == tok_end);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001768 }
1769}
1770
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001771static AOM_INLINE void encode_restoration_mode(
1772 AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Urvang Joshib6409e92020-03-23 11:23:27 -07001773 assert(!cm->features.all_lossless);
Tarundeep Singh4243e622021-04-20 16:10:22 +05301774 if (!cm->seq_params->enable_restoration) return;
Urvang Joshib6409e92020-03-23 11:23:27 -07001775 if (cm->features.allow_intrabc) return;
Hui Su293f2812018-02-26 14:41:18 -08001776 const int num_planes = av1_num_planes(cm);
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001777 int all_none = 1, chroma_none = 1;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001778 for (int p = 0; p < num_planes; ++p) {
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00001779 RestorationInfo *rsi = &cm->rst_info[p];
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001780 if (rsi->frame_restoration_type != RESTORE_NONE) {
1781 all_none = 0;
1782 chroma_none &= p == 0;
1783 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001784 switch (rsi->frame_restoration_type) {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001785 case RESTORE_NONE:
1786 aom_wb_write_bit(wb, 0);
1787 aom_wb_write_bit(wb, 0);
1788 break;
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07001789 case RESTORE_WIENER:
1790 aom_wb_write_bit(wb, 1);
1791 aom_wb_write_bit(wb, 0);
1792 break;
1793 case RESTORE_SGRPROJ:
1794 aom_wb_write_bit(wb, 1);
1795 aom_wb_write_bit(wb, 1);
1796 break;
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001797 case RESTORE_SWITCHABLE:
1798 aom_wb_write_bit(wb, 0);
1799 aom_wb_write_bit(wb, 1);
1800 break;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001801 default: assert(0);
1802 }
1803 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001804 if (!all_none) {
Tarundeep Singh4243e622021-04-20 16:10:22 +05301805 assert(cm->seq_params->sb_size == BLOCK_64X64 ||
1806 cm->seq_params->sb_size == BLOCK_128X128);
1807 const int sb_size = cm->seq_params->sb_size == BLOCK_128X128 ? 128 : 64;
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00001808
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00001809 RestorationInfo *rsi = &cm->rst_info[0];
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00001810
1811 assert(rsi->restoration_unit_size >= sb_size);
Urvang Joshi813186b2018-03-08 15:38:46 -08001812 assert(RESTORATION_UNITSIZE_MAX == 256);
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00001813
1814 if (sb_size == 64) {
1815 aom_wb_write_bit(wb, rsi->restoration_unit_size > 64);
1816 }
1817 if (rsi->restoration_unit_size > 64) {
1818 aom_wb_write_bit(wb, rsi->restoration_unit_size > 128);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001819 }
1820 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001821
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001822 if (num_planes > 1) {
Tarundeep Singh4243e622021-04-20 16:10:22 +05301823 int s =
1824 AOMMIN(cm->seq_params->subsampling_x, cm->seq_params->subsampling_y);
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001825 if (s && !chroma_none) {
Johannb0ef6ff2018-02-08 14:32:21 -08001826 aom_wb_write_bit(wb, cm->rst_info[1].restoration_unit_size !=
1827 cm->rst_info[0].restoration_unit_size);
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001828 assert(cm->rst_info[1].restoration_unit_size ==
1829 cm->rst_info[0].restoration_unit_size ||
1830 cm->rst_info[1].restoration_unit_size ==
1831 (cm->rst_info[0].restoration_unit_size >> s));
1832 assert(cm->rst_info[2].restoration_unit_size ==
1833 cm->rst_info[1].restoration_unit_size);
1834 } else if (!s) {
1835 assert(cm->rst_info[1].restoration_unit_size ==
1836 cm->rst_info[0].restoration_unit_size);
1837 assert(cm->rst_info[2].restoration_unit_size ==
1838 cm->rst_info[1].restoration_unit_size);
1839 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001840 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001841}
1842
Jerome Jiang3dd9df92020-10-29 16:42:33 -07001843#if !CONFIG_REALTIME_ONLY
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001844static AOM_INLINE void write_wiener_filter(int wiener_win,
1845 const WienerInfo *wiener_info,
1846 WienerInfo *ref_wiener_info,
1847 aom_writer *wb) {
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001848 if (wiener_win == WIENER_WIN)
1849 aom_write_primitive_refsubexpfin(
1850 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1851 WIENER_FILT_TAP0_SUBEXP_K,
1852 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV,
1853 wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV);
1854 else
1855 assert(wiener_info->vfilter[0] == 0 &&
1856 wiener_info->vfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001857 aom_write_primitive_refsubexpfin(
1858 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1859 WIENER_FILT_TAP1_SUBEXP_K,
1860 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV,
1861 wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV);
1862 aom_write_primitive_refsubexpfin(
1863 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1864 WIENER_FILT_TAP2_SUBEXP_K,
1865 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV,
1866 wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV);
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001867 if (wiener_win == WIENER_WIN)
1868 aom_write_primitive_refsubexpfin(
1869 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1870 WIENER_FILT_TAP0_SUBEXP_K,
1871 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV,
1872 wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV);
1873 else
1874 assert(wiener_info->hfilter[0] == 0 &&
1875 wiener_info->hfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001876 aom_write_primitive_refsubexpfin(
1877 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1878 WIENER_FILT_TAP1_SUBEXP_K,
1879 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV,
1880 wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV);
1881 aom_write_primitive_refsubexpfin(
1882 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1883 WIENER_FILT_TAP2_SUBEXP_K,
1884 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV,
1885 wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV);
1886 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001887}
1888
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001889static AOM_INLINE void write_sgrproj_filter(const SgrprojInfo *sgrproj_info,
1890 SgrprojInfo *ref_sgrproj_info,
1891 aom_writer *wb) {
Imdad Sardharwallafdeb1162018-02-21 17:38:20 +00001892 aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS);
Yaowu Xubf732b82019-04-30 15:21:52 -07001893 const sgr_params_type *params = &av1_sgr_params[sgrproj_info->ep];
Imdad Sardharwallafdeb1162018-02-21 17:38:20 +00001894
Urvang Joshi3715b882018-05-14 20:05:25 -04001895 if (params->r[0] == 0) {
Imdad Sardharwallafdeb1162018-02-21 17:38:20 +00001896 assert(sgrproj_info->xqd[0] == 0);
1897 aom_write_primitive_refsubexpfin(
1898 wb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
1899 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
1900 sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1);
Urvang Joshi3715b882018-05-14 20:05:25 -04001901 } else if (params->r[1] == 0) {
Imdad Sardharwallafdeb1162018-02-21 17:38:20 +00001902 aom_write_primitive_refsubexpfin(
1903 wb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
1904 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
1905 sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0);
Imdad Sardharwallafdeb1162018-02-21 17:38:20 +00001906 } else {
1907 aom_write_primitive_refsubexpfin(
1908 wb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
1909 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
1910 sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0);
1911 aom_write_primitive_refsubexpfin(
1912 wb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
1913 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
1914 sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1);
1915 }
1916
1917 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
1918}
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001919
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001920static AOM_INLINE void loop_restoration_write_sb_coeffs(
Rachel Barkerfe93d332023-07-10 10:01:15 +00001921 const AV1_COMMON *const cm, MACROBLOCKD *xd, int runit_idx,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07001922 aom_writer *const w, int plane, FRAME_COUNTS *counts) {
Rachel Barkerfe93d332023-07-10 10:01:15 +00001923 const RestorationUnitInfo *rui = &cm->rst_info[plane].unit_info[runit_idx];
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001924 const RestorationInfo *rsi = cm->rst_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001925 RestorationType frame_rtype = rsi->frame_restoration_type;
susannadedc8a3a2020-11-06 16:39:21 +00001926 assert(frame_rtype != RESTORE_NONE);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001927
Yue Chen3dd9a122018-04-12 15:54:17 -07001928 (void)counts;
Urvang Joshib6409e92020-03-23 11:23:27 -07001929 assert(!cm->features.all_lossless);
Urvang Joshi5ec7b812018-02-28 14:37:06 -08001930
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001931 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
David Turner760a2f42018-12-07 15:25:36 +00001932 WienerInfo *ref_wiener_info = &xd->wiener_info[plane];
1933 SgrprojInfo *ref_sgrproj_info = &xd->sgrproj_info[plane];
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001934 RestorationType unit_rtype = rui->restoration_type;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001935
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001936 if (frame_rtype == RESTORE_SWITCHABLE) {
1937 aom_write_symbol(w, unit_rtype, xd->tile_ctx->switchable_restore_cdf,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001938 RESTORE_SWITCHABLE_TYPES);
Yue Chen3dd9a122018-04-12 15:54:17 -07001939#if CONFIG_ENTROPY_STATS
Yue Chen44391512018-03-13 15:37:26 -07001940 ++counts->switchable_restore[unit_rtype];
Yue Chen3dd9a122018-04-12 15:54:17 -07001941#endif
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001942 switch (unit_rtype) {
1943 case RESTORE_WIENER:
Rachel Barkerfe93d332023-07-10 10:01:15 +00001944#if DEBUG_LR_COSTING
1945 assert(!memcmp(
1946 ref_wiener_info,
1947 &lr_ref_params[RESTORE_SWITCHABLE][plane][runit_idx].wiener_info,
1948 sizeof(*ref_wiener_info)));
1949#endif
David Turner760a2f42018-12-07 15:25:36 +00001950 write_wiener_filter(wiener_win, &rui->wiener_info, ref_wiener_info, w);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001951 break;
1952 case RESTORE_SGRPROJ:
Rachel Barkerfe93d332023-07-10 10:01:15 +00001953#if DEBUG_LR_COSTING
1954 assert(!memcmp(&ref_sgrproj_info->xqd,
1955 &lr_ref_params[RESTORE_SWITCHABLE][plane][runit_idx]
1956 .sgrproj_info.xqd,
1957 sizeof(ref_sgrproj_info->xqd)));
1958#endif
David Turner760a2f42018-12-07 15:25:36 +00001959 write_sgrproj_filter(&rui->sgrproj_info, ref_sgrproj_info, w);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001960 break;
1961 default: assert(unit_rtype == RESTORE_NONE); break;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001962 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001963 } else if (frame_rtype == RESTORE_WIENER) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001964 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001965 xd->tile_ctx->wiener_restore_cdf, 2);
Yue Chen3dd9a122018-04-12 15:54:17 -07001966#if CONFIG_ENTROPY_STATS
Yue Chen44391512018-03-13 15:37:26 -07001967 ++counts->wiener_restore[unit_rtype != RESTORE_NONE];
Yue Chen3dd9a122018-04-12 15:54:17 -07001968#endif
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001969 if (unit_rtype != RESTORE_NONE) {
Rachel Barkerfe93d332023-07-10 10:01:15 +00001970#if DEBUG_LR_COSTING
1971 assert(
1972 !memcmp(ref_wiener_info,
1973 &lr_ref_params[RESTORE_WIENER][plane][runit_idx].wiener_info,
1974 sizeof(*ref_wiener_info)));
1975#endif
David Turner760a2f42018-12-07 15:25:36 +00001976 write_wiener_filter(wiener_win, &rui->wiener_info, ref_wiener_info, w);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001977 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001978 } else if (frame_rtype == RESTORE_SGRPROJ) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001979 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001980 xd->tile_ctx->sgrproj_restore_cdf, 2);
Yue Chen3dd9a122018-04-12 15:54:17 -07001981#if CONFIG_ENTROPY_STATS
Yue Chen44391512018-03-13 15:37:26 -07001982 ++counts->sgrproj_restore[unit_rtype != RESTORE_NONE];
Yue Chen3dd9a122018-04-12 15:54:17 -07001983#endif
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001984 if (unit_rtype != RESTORE_NONE) {
Rachel Barkerfe93d332023-07-10 10:01:15 +00001985#if DEBUG_LR_COSTING
1986 assert(!memcmp(
1987 &ref_sgrproj_info->xqd,
1988 &lr_ref_params[RESTORE_SGRPROJ][plane][runit_idx].sgrproj_info.xqd,
1989 sizeof(ref_sgrproj_info->xqd)));
1990#endif
David Turner760a2f42018-12-07 15:25:36 +00001991 write_sgrproj_filter(&rui->sgrproj_info, ref_sgrproj_info, w);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001992 }
1993 }
1994}
Jerome Jiang3dd9df92020-10-29 16:42:33 -07001995#endif // !CONFIG_REALTIME_ONLY
Yaowu Xuc27fc142016-08-22 16:08:15 -07001996
Elliott Karpilovsky00ca4a32019-10-31 17:06:50 -07001997// Only write out the ref delta section if any of the elements
1998// will signal a delta.
1999static bool is_mode_ref_delta_meaningful(AV1_COMMON *cm) {
2000 struct loopfilter *lf = &cm->lf;
2001 if (!lf->mode_ref_delta_update) {
2002 return 0;
2003 }
2004 const RefCntBuffer *buf = get_primary_ref_frame_buf(cm);
2005 int8_t last_ref_deltas[REF_FRAMES];
2006 int8_t last_mode_deltas[MAX_MODE_LF_DELTAS];
2007 if (buf == NULL) {
2008 av1_set_default_ref_deltas(last_ref_deltas);
2009 av1_set_default_mode_deltas(last_mode_deltas);
2010 } else {
2011 memcpy(last_ref_deltas, buf->ref_deltas, REF_FRAMES);
2012 memcpy(last_mode_deltas, buf->mode_deltas, MAX_MODE_LF_DELTAS);
2013 }
2014 for (int i = 0; i < REF_FRAMES; i++) {
2015 if (lf->ref_deltas[i] != last_ref_deltas[i]) {
2016 return true;
2017 }
2018 }
2019 for (int i = 0; i < MAX_MODE_LF_DELTAS; i++) {
2020 if (lf->mode_deltas[i] != last_mode_deltas[i]) {
2021 return true;
2022 }
2023 }
2024 return false;
2025}
2026
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002027static AOM_INLINE void encode_loopfilter(AV1_COMMON *cm,
2028 struct aom_write_bit_buffer *wb) {
Urvang Joshib6409e92020-03-23 11:23:27 -07002029 assert(!cm->features.coded_lossless);
2030 if (cm->features.allow_intrabc) return;
Hui Su293f2812018-02-26 14:41:18 -08002031 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002032 struct loopfilter *lf = &cm->lf;
2033
Debargha Mukherjee2382b142018-02-26 14:31:32 -08002034 // Encode the loop filter level and type
Cheng Chen76224b02017-12-15 12:21:01 -08002035 aom_wb_write_literal(wb, lf->filter_level[0], 6);
2036 aom_wb_write_literal(wb, lf->filter_level[1], 6);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002037 if (num_planes > 1) {
Cheng Chen76224b02017-12-15 12:21:01 -08002038 if (lf->filter_level[0] || lf->filter_level[1]) {
2039 aom_wb_write_literal(wb, lf->filter_level_u, 6);
2040 aom_wb_write_literal(wb, lf->filter_level_v, 6);
Cheng Chen765e34e2017-12-11 11:43:35 -08002041 }
Cheng Chene94df5c2017-07-19 17:25:33 -07002042 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002043 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002044
Yaowu Xuf883b422016-08-30 14:01:10 -07002045 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002046
Elliott Karpilovsky00ca4a32019-10-31 17:06:50 -07002047 // Write out loop filter deltas applied at the MB level based on mode or
2048 // ref frame (if they are enabled), only if there is information to write.
2049 int meaningful = is_mode_ref_delta_meaningful(cm);
2050 aom_wb_write_bit(wb, meaningful);
2051 if (!meaningful) {
2052 return;
2053 }
Cheng Chen7d089ae2018-03-15 19:11:03 -07002054
Elliott Karpilovsky00ca4a32019-10-31 17:06:50 -07002055 const RefCntBuffer *buf = get_primary_ref_frame_buf(cm);
2056 int8_t last_ref_deltas[REF_FRAMES];
2057 int8_t last_mode_deltas[MAX_MODE_LF_DELTAS];
2058 if (buf == NULL) {
2059 av1_set_default_ref_deltas(last_ref_deltas);
2060 av1_set_default_mode_deltas(last_mode_deltas);
2061 } else {
2062 memcpy(last_ref_deltas, buf->ref_deltas, REF_FRAMES);
2063 memcpy(last_mode_deltas, buf->mode_deltas, MAX_MODE_LF_DELTAS);
2064 }
2065 for (int i = 0; i < REF_FRAMES; i++) {
2066 const int delta = lf->ref_deltas[i];
2067 const int changed = delta != last_ref_deltas[i];
2068 aom_wb_write_bit(wb, changed);
2069 if (changed) aom_wb_write_inv_signed_literal(wb, delta, 6);
2070 }
2071 for (int i = 0; i < MAX_MODE_LF_DELTAS; i++) {
2072 const int delta = lf->mode_deltas[i];
2073 const int changed = delta != last_mode_deltas[i];
2074 aom_wb_write_bit(wb, changed);
2075 if (changed) aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002076 }
2077}
2078
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002079static AOM_INLINE void encode_cdef(const AV1_COMMON *cm,
2080 struct aom_write_bit_buffer *wb) {
Urvang Joshib6409e92020-03-23 11:23:27 -07002081 assert(!cm->features.coded_lossless);
Tarundeep Singh4243e622021-04-20 16:10:22 +05302082 if (!cm->seq_params->enable_cdef) return;
Urvang Joshib6409e92020-03-23 11:23:27 -07002083 if (cm->features.allow_intrabc) return;
Hui Su293f2812018-02-26 14:41:18 -08002084 const int num_planes = av1_num_planes(cm);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002085 int i;
Hui Su584ba482019-06-19 11:48:05 -07002086 aom_wb_write_literal(wb, cm->cdef_info.cdef_damping - 3, 2);
David Turnerebf96f42018-11-14 16:57:57 +00002087 aom_wb_write_literal(wb, cm->cdef_info.cdef_bits, 2);
2088 for (i = 0; i < cm->cdef_info.nb_cdef_strengths; i++) {
2089 aom_wb_write_literal(wb, cm->cdef_info.cdef_strengths[i],
2090 CDEF_STRENGTH_BITS);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002091 if (num_planes > 1)
David Turnerebf96f42018-11-14 16:57:57 +00002092 aom_wb_write_literal(wb, cm->cdef_info.cdef_uv_strengths[i],
2093 CDEF_STRENGTH_BITS);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002094 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002095}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002096
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002097static AOM_INLINE void write_delta_q(struct aom_write_bit_buffer *wb,
2098 int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002099 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002100 aom_wb_write_bit(wb, 1);
2101 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002102 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002103 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002104 }
2105}
2106
Urvang Joshi17814622020-03-27 17:26:17 -07002107static AOM_INLINE void encode_quantization(
2108 const CommonQuantParams *const quant_params, int num_planes,
2109 bool separate_uv_delta_q, struct aom_write_bit_buffer *wb) {
2110 aom_wb_write_literal(wb, quant_params->base_qindex, QINDEX_BITS);
2111 write_delta_q(wb, quant_params->y_dc_delta_q);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002112 if (num_planes > 1) {
Urvang Joshi17814622020-03-27 17:26:17 -07002113 int diff_uv_delta =
2114 (quant_params->u_dc_delta_q != quant_params->v_dc_delta_q) ||
2115 (quant_params->u_ac_delta_q != quant_params->v_ac_delta_q);
2116 if (separate_uv_delta_q) aom_wb_write_bit(wb, diff_uv_delta);
2117 write_delta_q(wb, quant_params->u_dc_delta_q);
2118 write_delta_q(wb, quant_params->u_ac_delta_q);
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002119 if (diff_uv_delta) {
Urvang Joshi17814622020-03-27 17:26:17 -07002120 write_delta_q(wb, quant_params->v_dc_delta_q);
2121 write_delta_q(wb, quant_params->v_ac_delta_q);
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002122 }
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002123 }
Urvang Joshi17814622020-03-27 17:26:17 -07002124 aom_wb_write_bit(wb, quant_params->using_qmatrix);
2125 if (quant_params->using_qmatrix) {
Urvang Joshi7fdbd052020-03-30 17:50:22 -07002126 aom_wb_write_literal(wb, quant_params->qmatrix_level_y, QM_LEVEL_BITS);
2127 aom_wb_write_literal(wb, quant_params->qmatrix_level_u, QM_LEVEL_BITS);
Urvang Joshi17814622020-03-27 17:26:17 -07002128 if (!separate_uv_delta_q)
Urvang Joshi7fdbd052020-03-30 17:50:22 -07002129 assert(quant_params->qmatrix_level_u == quant_params->qmatrix_level_v);
Yaowu Xuf7a12422018-01-31 15:29:20 -08002130 else
Urvang Joshi7fdbd052020-03-30 17:50:22 -07002131 aom_wb_write_literal(wb, quant_params->qmatrix_level_v, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002132 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002133}
2134
Cherma Rajan Ab4be7e52021-04-14 19:28:53 +05302135static AOM_INLINE void encode_segmentation(AV1_COMMON *cm,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002136 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002137 int i, j;
Rostislav Pehlivanovdd1a80c2018-03-05 21:26:45 +00002138 struct segmentation *seg = &cm->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002139
Yaowu Xuf883b422016-08-30 14:01:10 -07002140 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002141 if (!seg->enabled) return;
2142
David Barker190b77a2018-03-16 14:29:46 +00002143 // Write update flags
Cherma Rajan Ab4be7e52021-04-14 19:28:53 +05302144 if (cm->features.primary_ref_frame != PRIMARY_REF_NONE) {
David Barker190b77a2018-03-16 14:29:46 +00002145 aom_wb_write_bit(wb, seg->update_map);
Cherma Rajan Ab4be7e52021-04-14 19:28:53 +05302146 if (seg->update_map) aom_wb_write_bit(wb, seg->temporal_update);
David Barker190b77a2018-03-16 14:29:46 +00002147 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002148 }
2149
2150 // Segmentation data
Yaowu Xuc27fc142016-08-22 16:08:15 -07002151 if (seg->update_data) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002152 for (i = 0; i < MAX_SEGMENTS; i++) {
2153 for (j = 0; j < SEG_LVL_MAX; j++) {
2154 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002155 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002156 if (active) {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002157 const int data_max = av1_seg_feature_data_max(j);
2158 const int data_min = -data_max;
2159 const int ubits = get_unsigned_bits(data_max);
2160 const int data = clamp(get_segdata(seg, i, j), data_min, data_max);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002161
Yaowu Xuf883b422016-08-30 14:01:10 -07002162 if (av1_is_segfeature_signed(j)) {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002163 aom_wb_write_inv_signed_literal(wb, data, ubits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002164 } else {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002165 aom_wb_write_literal(wb, data, ubits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002166 }
2167 }
2168 }
2169 }
2170 }
2171}
2172
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002173static AOM_INLINE void write_frame_interp_filter(
2174 InterpFilter filter, struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002175 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002176 if (filter != SWITCHABLE)
Angie Chiang6305abe2016-10-24 12:24:44 -07002177 aom_wb_write_literal(wb, filter, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002178}
2179
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002180// Same function as write_uniform but writing to uncompresses header wb
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002181static AOM_INLINE void wb_write_uniform(struct aom_write_bit_buffer *wb, int n,
2182 int v) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002183 const int l = get_unsigned_bits(n);
2184 const int m = (1 << l) - n;
2185 if (l == 0) return;
2186 if (v < m) {
2187 aom_wb_write_literal(wb, v, l - 1);
2188 } else {
2189 aom_wb_write_literal(wb, m + ((v - m) >> 1), l - 1);
2190 aom_wb_write_literal(wb, (v - m) & 1, 1);
2191 }
2192}
2193
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002194static AOM_INLINE void write_tile_info_max_tile(
2195 const AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Mudassir Galagnath77f31602022-04-19 16:28:25 +05302196 int width_sb =
2197 CEIL_POWER_OF_TWO(cm->mi_params.mi_cols, cm->seq_params->mib_size_log2);
2198 int height_sb =
2199 CEIL_POWER_OF_TWO(cm->mi_params.mi_rows, cm->seq_params->mib_size_log2);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002200 int size_sb, i;
Urvang Joshi54ffae72020-03-23 13:37:10 -07002201 const CommonTileParams *const tiles = &cm->tiles;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002202
Urvang Joshi54ffae72020-03-23 13:37:10 -07002203 aom_wb_write_bit(wb, tiles->uniform_spacing);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002204
Urvang Joshi54ffae72020-03-23 13:37:10 -07002205 if (tiles->uniform_spacing) {
Urvang Joshi54ffae72020-03-23 13:37:10 -07002206 int ones = tiles->log2_cols - tiles->min_log2_cols;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002207 while (ones--) {
2208 aom_wb_write_bit(wb, 1);
2209 }
Urvang Joshi54ffae72020-03-23 13:37:10 -07002210 if (tiles->log2_cols < tiles->max_log2_cols) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002211 aom_wb_write_bit(wb, 0);
2212 }
2213
2214 // rows
Urvang Joshi54ffae72020-03-23 13:37:10 -07002215 ones = tiles->log2_rows - tiles->min_log2_rows;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002216 while (ones--) {
2217 aom_wb_write_bit(wb, 1);
2218 }
Urvang Joshi54ffae72020-03-23 13:37:10 -07002219 if (tiles->log2_rows < tiles->max_log2_rows) {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002220 aom_wb_write_bit(wb, 0);
2221 }
2222 } else {
2223 // Explicit tiles with configurable tile widths and heights
2224 // columns
Urvang Joshi54ffae72020-03-23 13:37:10 -07002225 for (i = 0; i < tiles->cols; i++) {
2226 size_sb = tiles->col_start_sb[i + 1] - tiles->col_start_sb[i];
2227 wb_write_uniform(wb, AOMMIN(width_sb, tiles->max_width_sb), size_sb - 1);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002228 width_sb -= size_sb;
2229 }
2230 assert(width_sb == 0);
2231
2232 // rows
Urvang Joshi54ffae72020-03-23 13:37:10 -07002233 for (i = 0; i < tiles->rows; i++) {
2234 size_sb = tiles->row_start_sb[i + 1] - tiles->row_start_sb[i];
2235 wb_write_uniform(wb, AOMMIN(height_sb, tiles->max_height_sb),
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002236 size_sb - 1);
2237 height_sb -= size_sb;
2238 }
2239 assert(height_sb == 0);
2240 }
2241}
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002242
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002243static AOM_INLINE void write_tile_info(const AV1_COMMON *const cm,
2244 struct aom_write_bit_buffer *saved_wb,
2245 struct aom_write_bit_buffer *wb) {
Yunqing Wang445739a2018-06-10 12:27:34 -07002246 write_tile_info_max_tile(cm, wb);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002247
James Zern8b007ff2018-02-26 22:49:13 -08002248 *saved_wb = *wb;
Urvang Joshi54ffae72020-03-23 13:37:10 -07002249 if (cm->tiles.rows * cm->tiles.cols > 1) {
Dominic Symesa48289a2018-04-03 21:07:59 +02002250 // tile id used for cdf update
Urvang Joshi54ffae72020-03-23 13:37:10 -07002251 aom_wb_write_literal(wb, 0, cm->tiles.log2_cols + cm->tiles.log2_rows);
James Zern9e9f7ad2018-03-02 17:38:53 -08002252 // Number of bytes in tile size - 1
2253 aom_wb_write_literal(wb, 3, 2);
2254 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002255}
2256
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002257static AOM_INLINE void write_ext_tile_info(
2258 const AV1_COMMON *const cm, struct aom_write_bit_buffer *saved_wb,
2259 struct aom_write_bit_buffer *wb) {
Yunqing Wang3f9c1e22018-06-16 16:48:15 -07002260 // This information is stored as a separate byte.
2261 int mod = wb->bit_offset % CHAR_BIT;
2262 if (mod > 0) aom_wb_write_literal(wb, 0, CHAR_BIT - mod);
2263 assert(aom_wb_is_byte_aligned(wb));
2264
Yunqing Wang445739a2018-06-10 12:27:34 -07002265 *saved_wb = *wb;
Urvang Joshi54ffae72020-03-23 13:37:10 -07002266 if (cm->tiles.rows * cm->tiles.cols > 1) {
Yunqing Wang445739a2018-06-10 12:27:34 -07002267 // Note that the last item in the uncompressed header is the data
2268 // describing tile configuration.
2269 // Number of bytes in tile column size - 1
2270 aom_wb_write_literal(wb, 0, 2);
2271 // Number of bytes in tile size - 1
2272 aom_wb_write_literal(wb, 0, 2);
2273 }
2274}
2275
Yaowu Xuc27fc142016-08-22 16:08:15 -07002276static INLINE int find_identical_tile(
2277 const int tile_row, const int tile_col,
Wan-Teh Chang04891e52018-05-24 17:31:10 -07002278 TileBufferEnc (*const tile_buffers)[MAX_TILE_COLS]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002279 const MV32 candidate_offset[1] = { { 1, 0 } };
2280 const uint8_t *const cur_tile_data =
2281 tile_buffers[tile_row][tile_col].data + 4;
Jingning Han99ffce62017-04-25 15:48:41 -07002282 const size_t cur_tile_size = tile_buffers[tile_row][tile_col].size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002283
2284 int i;
2285
2286 if (tile_row == 0) return 0;
2287
2288 // (TODO: yunqingwang) For now, only above tile is checked and used.
2289 // More candidates such as left tile can be added later.
2290 for (i = 0; i < 1; i++) {
2291 int row_offset = candidate_offset[0].row;
2292 int col_offset = candidate_offset[0].col;
2293 int row = tile_row - row_offset;
2294 int col = tile_col - col_offset;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002295 const uint8_t *tile_data;
2296 TileBufferEnc *candidate;
2297
2298 if (row < 0 || col < 0) continue;
2299
David Turnere7fea8e2018-12-10 15:57:05 +00002300 const uint32_t tile_hdr = mem_get_le32(tile_buffers[row][col].data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002301
David Turnere7fea8e2018-12-10 15:57:05 +00002302 // Read out tile-copy-mode bit:
2303 if ((tile_hdr >> 31) == 1) {
2304 // The candidate is a copy tile itself: the offset is stored in bits
2305 // 30 through 24 inclusive.
2306 row_offset += (tile_hdr >> 24) & 0x7f;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002307 row = tile_row - row_offset;
2308 }
2309
2310 candidate = &tile_buffers[row][col];
2311
2312 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
2313
2314 tile_data = candidate->data + 4;
2315
2316 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
2317
2318 // Identical tile found
2319 assert(row_offset > 0);
2320 return row_offset;
2321 }
2322
2323 // No identical tile found
2324 return 0;
2325}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002326
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002327static AOM_INLINE void write_render_size(const AV1_COMMON *cm,
2328 struct aom_write_bit_buffer *wb) {
Cheng Chen09c83a52018-06-05 12:27:36 -07002329 const int scaling_active = av1_resize_scaled(cm);
Yaowu Xuf883b422016-08-30 14:01:10 -07002330 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002331 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002332 aom_wb_write_literal(wb, cm->render_width - 1, 16);
2333 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002334 }
2335}
2336
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002337static AOM_INLINE void write_superres_scale(const AV1_COMMON *const cm,
2338 struct aom_write_bit_buffer *wb) {
Tarundeep Singh4243e622021-04-20 16:10:22 +05302339 const SequenceHeader *const seq_params = cm->seq_params;
Urvang Joshi2c92b072018-03-19 17:23:31 -07002340 if (!seq_params->enable_superres) {
2341 assert(cm->superres_scale_denominator == SCALE_NUMERATOR);
2342 return;
2343 }
2344
Fergus Simpsone7508412017-03-14 18:14:09 -07002345 // First bit is whether to to scale or not
Urvang Joshide71d142017-10-05 12:12:15 -07002346 if (cm->superres_scale_denominator == SCALE_NUMERATOR) {
Fergus Simpsone7508412017-03-14 18:14:09 -07002347 aom_wb_write_bit(wb, 0); // no scaling
2348 } else {
2349 aom_wb_write_bit(wb, 1); // scaling, write scale factor
Urvang Joshi83010182017-10-27 12:36:02 -07002350 assert(cm->superres_scale_denominator >= SUPERRES_SCALE_DENOMINATOR_MIN);
2351 assert(cm->superres_scale_denominator <
2352 SUPERRES_SCALE_DENOMINATOR_MIN + (1 << SUPERRES_SCALE_BITS));
Fergus Simpsone7508412017-03-14 18:14:09 -07002353 aom_wb_write_literal(
Urvang Joshide71d142017-10-05 12:12:15 -07002354 wb, cm->superres_scale_denominator - SUPERRES_SCALE_DENOMINATOR_MIN,
Fergus Simpsone7508412017-03-14 18:14:09 -07002355 SUPERRES_SCALE_BITS);
2356 }
2357}
Fergus Simpsone7508412017-03-14 18:14:09 -07002358
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002359static AOM_INLINE void write_frame_size(const AV1_COMMON *cm,
2360 int frame_size_override,
2361 struct aom_write_bit_buffer *wb) {
David Barker22171312017-11-20 11:26:04 +00002362 const int coded_width = cm->superres_upscaled_width - 1;
2363 const int coded_height = cm->superres_upscaled_height - 1;
David Barker22171312017-11-20 11:26:04 +00002364
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002365 if (frame_size_override) {
Tarundeep Singh4243e622021-04-20 16:10:22 +05302366 const SequenceHeader *seq_params = cm->seq_params;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002367 int num_bits_width = seq_params->num_bits_width;
2368 int num_bits_height = seq_params->num_bits_height;
David Barker22171312017-11-20 11:26:04 +00002369 aom_wb_write_literal(wb, coded_width, num_bits_width);
2370 aom_wb_write_literal(wb, coded_height, num_bits_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002371 }
David Barker22171312017-11-20 11:26:04 +00002372
David Barker22171312017-11-20 11:26:04 +00002373 write_superres_scale(cm, wb);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002374 write_render_size(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002375}
2376
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002377static AOM_INLINE void write_frame_size_with_refs(
2378 const AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002379 int found = 0;
2380
2381 MV_REFERENCE_FRAME ref_frame;
2382 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00002383 const YV12_BUFFER_CONFIG *cfg = get_ref_frame_yv12_buf(cm, ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002384
2385 if (cfg != NULL) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002386 found = cm->superres_upscaled_width == cfg->y_crop_width &&
2387 cm->superres_upscaled_height == cfg->y_crop_height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002388 found &= cm->render_width == cfg->render_width &&
2389 cm->render_height == cfg->render_height;
2390 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002391 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002392 if (found) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002393 write_superres_scale(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002394 break;
2395 }
2396 }
2397
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002398 if (!found) {
Tom Finegan8ab2bba2018-02-28 07:36:28 -08002399 int frame_size_override = 1; // Always equal to 1 in this function
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002400 write_frame_size(cm, frame_size_override, wb);
2401 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002402}
2403
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002404static AOM_INLINE void write_profile(BITSTREAM_PROFILE profile,
2405 struct aom_write_bit_buffer *wb) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002406 assert(profile >= PROFILE_0 && profile < MAX_PROFILES);
Debargha Mukherjee53396fb2018-03-30 12:19:38 -07002407 aom_wb_write_literal(wb, profile, PROFILE_BITS);
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002408}
2409
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002410static AOM_INLINE void write_bitdepth(const SequenceHeader *const seq_params,
2411 struct aom_write_bit_buffer *wb) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002412 // Profile 0/1: [0] for 8 bit, [1] 10-bit
2413 // Profile 2: [0] for 8 bit, [10] 10-bit, [11] - 12-bit
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002414 aom_wb_write_bit(wb, seq_params->bit_depth == AOM_BITS_8 ? 0 : 1);
2415 if (seq_params->profile == PROFILE_2 && seq_params->bit_depth != AOM_BITS_8) {
2416 aom_wb_write_bit(wb, seq_params->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002417 }
2418}
2419
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002420static AOM_INLINE void write_color_config(
2421 const SequenceHeader *const seq_params, struct aom_write_bit_buffer *wb) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002422 write_bitdepth(seq_params, wb);
2423 const int is_monochrome = seq_params->monochrome;
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002424 // monochrome bit
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002425 if (seq_params->profile != PROFILE_1)
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002426 aom_wb_write_bit(wb, is_monochrome);
2427 else
2428 assert(!is_monochrome);
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002429 if (seq_params->color_primaries == AOM_CICP_CP_UNSPECIFIED &&
2430 seq_params->transfer_characteristics == AOM_CICP_TC_UNSPECIFIED &&
2431 seq_params->matrix_coefficients == AOM_CICP_MC_UNSPECIFIED) {
Andrey Norkin9e694632017-12-21 18:50:57 -08002432 aom_wb_write_bit(wb, 0); // No color description present
2433 } else {
2434 aom_wb_write_bit(wb, 1); // Color description present
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002435 aom_wb_write_literal(wb, seq_params->color_primaries, 8);
2436 aom_wb_write_literal(wb, seq_params->transfer_characteristics, 8);
2437 aom_wb_write_literal(wb, seq_params->matrix_coefficients, 8);
Andrey Norkin9e694632017-12-21 18:50:57 -08002438 }
Debargha Mukherjee085095d2018-03-30 12:39:56 -07002439 if (is_monochrome) {
2440 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002441 aom_wb_write_bit(wb, seq_params->color_range);
Debargha Mukherjee085095d2018-03-30 12:39:56 -07002442 return;
2443 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002444 if (seq_params->color_primaries == AOM_CICP_CP_BT_709 &&
2445 seq_params->transfer_characteristics == AOM_CICP_TC_SRGB &&
Debargha Mukherjeeffffc562018-10-02 09:02:25 -07002446 seq_params->matrix_coefficients == AOM_CICP_MC_IDENTITY) {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002447 assert(seq_params->subsampling_x == 0 && seq_params->subsampling_y == 0);
2448 assert(seq_params->profile == PROFILE_1 ||
2449 (seq_params->profile == PROFILE_2 &&
2450 seq_params->bit_depth == AOM_BITS_12));
Imdad Sardharwalla317002f2017-12-05 16:24:56 +00002451 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002452 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002453 aom_wb_write_bit(wb, seq_params->color_range);
2454 if (seq_params->profile == PROFILE_0) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002455 // 420 only
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002456 assert(seq_params->subsampling_x == 1 && seq_params->subsampling_y == 1);
2457 } else if (seq_params->profile == PROFILE_1) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002458 // 444 only
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002459 assert(seq_params->subsampling_x == 0 && seq_params->subsampling_y == 0);
2460 } else if (seq_params->profile == PROFILE_2) {
2461 if (seq_params->bit_depth == AOM_BITS_12) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002462 // 420, 444 or 422
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002463 aom_wb_write_bit(wb, seq_params->subsampling_x);
2464 if (seq_params->subsampling_x == 0) {
2465 assert(seq_params->subsampling_y == 0 &&
David Barker0c3545b2018-01-16 17:32:23 +00002466 "4:4:0 subsampling not allowed in AV1");
2467 } else {
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002468 aom_wb_write_bit(wb, seq_params->subsampling_y);
David Barker0c3545b2018-01-16 17:32:23 +00002469 }
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002470 } else {
2471 // 422 only
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002472 assert(seq_params->subsampling_x == 1 &&
2473 seq_params->subsampling_y == 0);
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002474 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002475 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002476 if (seq_params->matrix_coefficients == AOM_CICP_MC_IDENTITY) {
2477 assert(seq_params->subsampling_x == 0 && seq_params->subsampling_y == 0);
Debargha Mukherjeef61c0d12018-03-30 19:29:59 -07002478 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002479 if (seq_params->subsampling_x == 1 && seq_params->subsampling_y == 1) {
2480 aom_wb_write_literal(wb, seq_params->chroma_sample_position, 2);
anorkin76fb1262017-03-22 15:12:12 -07002481 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002482 }
Urvang Joshi20cf30e2018-07-19 02:33:58 -07002483 aom_wb_write_bit(wb, seq_params->separate_uv_delta_q);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002484}
2485
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002486static AOM_INLINE void write_timing_info_header(
Urvang Joshi450a9a22020-03-31 16:00:22 -07002487 const aom_timing_info_t *const timing_info,
2488 struct aom_write_bit_buffer *wb) {
2489 aom_wb_write_unsigned_literal(wb, timing_info->num_units_in_display_tick, 32);
2490 aom_wb_write_unsigned_literal(wb, timing_info->time_scale, 32);
2491 aom_wb_write_bit(wb, timing_info->equal_picture_interval);
2492 if (timing_info->equal_picture_interval) {
2493 aom_wb_write_uvlc(wb, timing_info->num_ticks_per_picture - 1);
Andrey Norkin28e9ce22018-01-08 10:11:21 -08002494 }
2495}
Andrey Norkin28e9ce22018-01-08 10:11:21 -08002496
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002497static AOM_INLINE void write_decoder_model_info(
Urvang Joshi450a9a22020-03-31 16:00:22 -07002498 const aom_dec_model_info_t *const decoder_model_info,
2499 struct aom_write_bit_buffer *wb) {
Andrey Norkin795ba872018-03-06 13:24:14 -08002500 aom_wb_write_literal(
Urvang Joshi450a9a22020-03-31 16:00:22 -07002501 wb, decoder_model_info->encoder_decoder_buffer_delay_length - 1, 5);
2502 aom_wb_write_unsigned_literal(
2503 wb, decoder_model_info->num_units_in_decoding_tick, 32);
2504 aom_wb_write_literal(wb, decoder_model_info->buffer_removal_time_length - 1,
Andrey Norkin795ba872018-03-06 13:24:14 -08002505 5);
Urvang Joshi450a9a22020-03-31 16:00:22 -07002506 aom_wb_write_literal(
2507 wb, decoder_model_info->frame_presentation_time_length - 1, 5);
Andrey Norkin795ba872018-03-06 13:24:14 -08002508}
2509
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002510static AOM_INLINE void write_dec_model_op_parameters(
Urvang Joshi450a9a22020-03-31 16:00:22 -07002511 const aom_dec_model_op_parameters_t *op_params, int buffer_delay_length,
2512 struct aom_write_bit_buffer *wb) {
2513 aom_wb_write_unsigned_literal(wb, op_params->decoder_buffer_delay,
2514 buffer_delay_length);
2515 aom_wb_write_unsigned_literal(wb, op_params->encoder_buffer_delay,
2516 buffer_delay_length);
2517 aom_wb_write_bit(wb, op_params->low_delay_mode_flag);
Andrey Norkin795ba872018-03-06 13:24:14 -08002518}
2519
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002520static AOM_INLINE void write_tu_pts_info(AV1_COMMON *const cm,
2521 struct aom_write_bit_buffer *wb) {
Andrey Norkin795ba872018-03-06 13:24:14 -08002522 aom_wb_write_unsigned_literal(
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07002523 wb, cm->frame_presentation_time,
Tarundeep Singh4243e622021-04-20 16:10:22 +05302524 cm->seq_params->decoder_model_info.frame_presentation_time_length);
Andrey Norkin795ba872018-03-06 13:24:14 -08002525}
Andrey Norkin795ba872018-03-06 13:24:14 -08002526
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002527static AOM_INLINE void write_film_grain_params(
2528 const AV1_COMP *const cpi, struct aom_write_bit_buffer *wb) {
David Turner08f909c2018-12-18 13:29:14 +00002529 const AV1_COMMON *const cm = &cpi->common;
2530 const aom_film_grain_t *const pars = &cm->cur_frame->film_grain_params;
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002531 aom_wb_write_bit(wb, pars->apply_grain);
2532 if (!pars->apply_grain) return;
2533
2534 aom_wb_write_literal(wb, pars->random_seed, 16);
2535
David Turnerd2a592e2018-11-16 14:59:31 +00002536 if (cm->current_frame.frame_type == INTER_FRAME)
Andrey Norkin879488f2018-02-28 15:30:26 -08002537 aom_wb_write_bit(wb, pars->update_parameters);
David Turner08f909c2018-12-18 13:29:14 +00002538
Dominic Symesd4929012018-01-31 17:32:01 +01002539 if (!pars->update_parameters) {
David Turnere7ebf902018-12-04 14:04:55 +00002540 int ref_frame, ref_idx;
Zoe Liu27deb382018-03-27 15:13:56 -07002541 for (ref_frame = LAST_FRAME; ref_frame < REF_FRAMES; ref_frame++) {
David Turnera21966b2018-12-05 14:48:49 +00002542 ref_idx = get_ref_frame_map_idx(cm, ref_frame);
Dominic Symes4d375682018-02-28 17:26:04 +01002543 assert(ref_idx != INVALID_IDX);
David Turnere7ebf902018-12-04 14:04:55 +00002544 const RefCntBuffer *const buf = cm->ref_frame_map[ref_idx];
2545 if (buf->film_grain_params_present &&
Yaowu Xu754f6462021-07-12 14:00:23 -07002546 aom_check_grain_params_equiv(pars, &buf->film_grain_params)) {
Dominic Symesd4929012018-01-31 17:32:01 +01002547 break;
2548 }
2549 }
Zoe Liu27deb382018-03-27 15:13:56 -07002550 assert(ref_frame < REF_FRAMES);
Dominic Symesd4929012018-01-31 17:32:01 +01002551 aom_wb_write_literal(wb, ref_idx, 3);
2552 return;
2553 }
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002554
2555 // Scaling functions parameters
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002556 aom_wb_write_literal(wb, pars->num_y_points, 4); // max 14
2557 for (int i = 0; i < pars->num_y_points; i++) {
2558 aom_wb_write_literal(wb, pars->scaling_points_y[i][0], 8);
2559 aom_wb_write_literal(wb, pars->scaling_points_y[i][1], 8);
2560 }
2561
Tarundeep Singh4243e622021-04-20 16:10:22 +05302562 if (!cm->seq_params->monochrome) {
Andrey Norkin20be5452018-02-20 17:46:13 -08002563 aom_wb_write_bit(wb, pars->chroma_scaling_from_luma);
David Turner08f909c2018-12-18 13:29:14 +00002564 } else {
2565 assert(!pars->chroma_scaling_from_luma);
2566 }
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002567
Tarundeep Singh4243e622021-04-20 16:10:22 +05302568 if (cm->seq_params->monochrome || pars->chroma_scaling_from_luma ||
2569 ((cm->seq_params->subsampling_x == 1) &&
2570 (cm->seq_params->subsampling_y == 1) && (pars->num_y_points == 0))) {
David Turner08f909c2018-12-18 13:29:14 +00002571 assert(pars->num_cb_points == 0 && pars->num_cr_points == 0);
Andrey Norkin0c294fa2018-02-16 18:32:12 -08002572 } else {
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002573 aom_wb_write_literal(wb, pars->num_cb_points, 4); // max 10
2574 for (int i = 0; i < pars->num_cb_points; i++) {
2575 aom_wb_write_literal(wb, pars->scaling_points_cb[i][0], 8);
2576 aom_wb_write_literal(wb, pars->scaling_points_cb[i][1], 8);
2577 }
2578
2579 aom_wb_write_literal(wb, pars->num_cr_points, 4); // max 10
2580 for (int i = 0; i < pars->num_cr_points; i++) {
2581 aom_wb_write_literal(wb, pars->scaling_points_cr[i][0], 8);
2582 aom_wb_write_literal(wb, pars->scaling_points_cr[i][1], 8);
2583 }
2584 }
2585
2586 aom_wb_write_literal(wb, pars->scaling_shift - 8, 2); // 8 + value
2587
2588 // AR coefficients
2589 // Only sent if the corresponsing scaling function has
2590 // more than 0 points
2591
2592 aom_wb_write_literal(wb, pars->ar_coeff_lag, 2);
2593
2594 int num_pos_luma = 2 * pars->ar_coeff_lag * (pars->ar_coeff_lag + 1);
Andrey Norkin20be5452018-02-20 17:46:13 -08002595 int num_pos_chroma = num_pos_luma;
2596 if (pars->num_y_points > 0) ++num_pos_chroma;
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002597
2598 if (pars->num_y_points)
2599 for (int i = 0; i < num_pos_luma; i++)
2600 aom_wb_write_literal(wb, pars->ar_coeffs_y[i] + 128, 8);
2601
2602 if (pars->num_cb_points || pars->chroma_scaling_from_luma)
2603 for (int i = 0; i < num_pos_chroma; i++)
2604 aom_wb_write_literal(wb, pars->ar_coeffs_cb[i] + 128, 8);
2605
2606 if (pars->num_cr_points || pars->chroma_scaling_from_luma)
2607 for (int i = 0; i < num_pos_chroma; i++)
2608 aom_wb_write_literal(wb, pars->ar_coeffs_cr[i] + 128, 8);
2609
2610 aom_wb_write_literal(wb, pars->ar_coeff_shift - 6, 2); // 8 + value
2611
Andrey Norkina840cde2018-02-16 15:39:50 -08002612 aom_wb_write_literal(wb, pars->grain_scale_shift, 2);
2613
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002614 if (pars->num_cb_points) {
2615 aom_wb_write_literal(wb, pars->cb_mult, 8);
2616 aom_wb_write_literal(wb, pars->cb_luma_mult, 8);
2617 aom_wb_write_literal(wb, pars->cb_offset, 9);
2618 }
2619
2620 if (pars->num_cr_points) {
2621 aom_wb_write_literal(wb, pars->cr_mult, 8);
2622 aom_wb_write_literal(wb, pars->cr_luma_mult, 8);
2623 aom_wb_write_literal(wb, pars->cr_offset, 9);
2624 }
2625
2626 aom_wb_write_bit(wb, pars->overlap_flag);
2627
2628 aom_wb_write_bit(wb, pars->clip_to_restricted_range);
2629}
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002630
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002631static AOM_INLINE void write_sb_size(const SequenceHeader *const seq_params,
2632 struct aom_write_bit_buffer *wb) {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002633 (void)seq_params;
2634 (void)wb;
2635 assert(seq_params->mib_size == mi_size_wide[seq_params->sb_size]);
2636 assert(seq_params->mib_size == 1 << seq_params->mib_size_log2);
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002637 assert(seq_params->sb_size == BLOCK_128X128 ||
2638 seq_params->sb_size == BLOCK_64X64);
2639 aom_wb_write_bit(wb, seq_params->sb_size == BLOCK_128X128 ? 1 : 0);
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002640}
2641
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002642static AOM_INLINE void write_sequence_header(
2643 const SequenceHeader *const seq_params, struct aom_write_bit_buffer *wb) {
David Turner760a2f42018-12-07 15:25:36 +00002644 aom_wb_write_literal(wb, seq_params->num_bits_width - 1, 4);
2645 aom_wb_write_literal(wb, seq_params->num_bits_height - 1, 4);
2646 aom_wb_write_literal(wb, seq_params->max_frame_width - 1,
2647 seq_params->num_bits_width);
2648 aom_wb_write_literal(wb, seq_params->max_frame_height - 1,
2649 seq_params->num_bits_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002650
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002651 if (!seq_params->reduced_still_picture_hdr) {
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002652 aom_wb_write_bit(wb, seq_params->frame_id_numbers_present_flag);
2653 if (seq_params->frame_id_numbers_present_flag) {
2654 // We must always have delta_frame_id_length < frame_id_length,
2655 // in order for a frame to be referenced with a unique delta.
2656 // Avoid wasting bits by using a coding that enforces this restriction.
2657 aom_wb_write_literal(wb, seq_params->delta_frame_id_length - 2, 4);
2658 aom_wb_write_literal(
2659 wb,
2660 seq_params->frame_id_length - seq_params->delta_frame_id_length - 1,
2661 3);
2662 }
David Barker5e70a112017-10-03 14:28:17 +01002663 }
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002664
2665 write_sb_size(seq_params, wb);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002666
Debargha Mukherjee365eae82018-03-26 19:19:55 -07002667 aom_wb_write_bit(wb, seq_params->enable_filter_intra);
2668 aom_wb_write_bit(wb, seq_params->enable_intra_edge_filter);
2669
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002670 if (!seq_params->reduced_still_picture_hdr) {
2671 aom_wb_write_bit(wb, seq_params->enable_interintra_compound);
2672 aom_wb_write_bit(wb, seq_params->enable_masked_compound);
2673 aom_wb_write_bit(wb, seq_params->enable_warped_motion);
2674 aom_wb_write_bit(wb, seq_params->enable_dual_filter);
Jingning Han127c8232018-02-22 16:54:13 -08002675
David Turnerebf96f42018-11-14 16:57:57 +00002676 aom_wb_write_bit(wb, seq_params->order_hint_info.enable_order_hint);
Cheng Chenfecd9a72018-03-08 15:23:51 -08002677
David Turnerebf96f42018-11-14 16:57:57 +00002678 if (seq_params->order_hint_info.enable_order_hint) {
Debargha Mukherjee7ac3eb12018-12-12 10:26:50 -08002679 aom_wb_write_bit(wb, seq_params->order_hint_info.enable_dist_wtd_comp);
David Turnerebf96f42018-11-14 16:57:57 +00002680 aom_wb_write_bit(wb, seq_params->order_hint_info.enable_ref_frame_mvs);
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002681 }
2682 if (seq_params->force_screen_content_tools == 2) {
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002683 aom_wb_write_bit(wb, 1);
2684 } else {
2685 aom_wb_write_bit(wb, 0);
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002686 aom_wb_write_bit(wb, seq_params->force_screen_content_tools);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002687 }
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002688 if (seq_params->force_screen_content_tools > 0) {
2689 if (seq_params->force_integer_mv == 2) {
2690 aom_wb_write_bit(wb, 1);
2691 } else {
2692 aom_wb_write_bit(wb, 0);
2693 aom_wb_write_bit(wb, seq_params->force_integer_mv);
2694 }
2695 } else {
2696 assert(seq_params->force_integer_mv == 2);
2697 }
David Turnerebf96f42018-11-14 16:57:57 +00002698 if (seq_params->order_hint_info.enable_order_hint)
2699 aom_wb_write_literal(
2700 wb, seq_params->order_hint_info.order_hint_bits_minus_1, 3);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002701 }
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +00002702
Urvang Joshi2c92b072018-03-19 17:23:31 -07002703 aom_wb_write_bit(wb, seq_params->enable_superres);
Debargha Mukherjee98a311c2018-03-25 16:33:11 -07002704 aom_wb_write_bit(wb, seq_params->enable_cdef);
2705 aom_wb_write_bit(wb, seq_params->enable_restoration);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002706}
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002707
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002708static AOM_INLINE void write_global_motion_params(
2709 const WarpedMotionParams *params, const WarpedMotionParams *ref_params,
2710 struct aom_write_bit_buffer *wb, int allow_hp) {
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01002711 const TransformationType type = params->wmtype;
Sarah Parker3e579a62017-08-23 16:53:20 -07002712
Rachel Barker6496e8d2022-09-23 09:40:02 +00002713 // As a workaround for an AV1 spec bug, we avoid choosing TRANSLATION
2714 // type models. Check here that we don't accidentally pick one somehow.
2715 // See comments in gm_get_motion_vector() for details on the bug we're
2716 // working around here
2717 assert(type != TRANSLATION);
2718
Sarah Parker3e579a62017-08-23 16:53:20 -07002719 aom_wb_write_bit(wb, type != IDENTITY);
2720 if (type != IDENTITY) {
Sarah Parker3e579a62017-08-23 16:53:20 -07002721 aom_wb_write_bit(wb, type == ROTZOOM);
2722 if (type != ROTZOOM) aom_wb_write_bit(wb, type == TRANSLATION);
Sarah Parker3e579a62017-08-23 16:53:20 -07002723 }
2724
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01002725 if (type >= ROTZOOM) {
2726 aom_wb_write_signed_primitive_refsubexpfin(
2727 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2728 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
2729 (1 << GM_ALPHA_PREC_BITS),
2730 (params->wmmat[2] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
2731 aom_wb_write_signed_primitive_refsubexpfin(
2732 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2733 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF),
2734 (params->wmmat[3] >> GM_ALPHA_PREC_DIFF));
2735 }
2736
2737 if (type >= AFFINE) {
2738 aom_wb_write_signed_primitive_refsubexpfin(
2739 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2740 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF),
2741 (params->wmmat[4] >> GM_ALPHA_PREC_DIFF));
2742 aom_wb_write_signed_primitive_refsubexpfin(
2743 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2744 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
2745 (1 << GM_ALPHA_PREC_BITS),
2746 (params->wmmat[5] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
2747 }
2748
2749 if (type >= TRANSLATION) {
2750 const int trans_bits = (type == TRANSLATION)
2751 ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
2752 : GM_ABS_TRANS_BITS;
2753 const int trans_prec_diff = (type == TRANSLATION)
2754 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
2755 : GM_TRANS_PREC_DIFF;
2756 aom_wb_write_signed_primitive_refsubexpfin(
2757 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2758 (ref_params->wmmat[0] >> trans_prec_diff),
2759 (params->wmmat[0] >> trans_prec_diff));
2760 aom_wb_write_signed_primitive_refsubexpfin(
2761 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2762 (ref_params->wmmat[1] >> trans_prec_diff),
2763 (params->wmmat[1] >> trans_prec_diff));
Sarah Parker3e579a62017-08-23 16:53:20 -07002764 }
2765}
2766
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002767static AOM_INLINE void write_global_motion(AV1_COMP *cpi,
2768 struct aom_write_bit_buffer *wb) {
Sarah Parker3e579a62017-08-23 16:53:20 -07002769 AV1_COMMON *const cm = &cpi->common;
2770 int frame;
2771 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01002772 const WarpedMotionParams *ref_params =
David Barkercc615a82018-03-19 14:38:51 +00002773 cm->prev_frame ? &cm->prev_frame->global_motion[frame]
2774 : &default_warp_params;
David Barkerd7c8bd52017-09-25 14:47:29 +01002775 write_global_motion_params(&cm->global_motion[frame], ref_params, wb,
Urvang Joshib6409e92020-03-23 11:23:27 -07002776 cm->features.allow_high_precision_mv);
Sarah Parker3e579a62017-08-23 16:53:20 -07002777 // TODO(sarahparker, debargha): The logic in the commented out code below
2778 // does not work currently and causes mismatches when resize is on.
2779 // Fix it before turning the optimization back on.
2780 /*
David Turnera21966b2018-12-05 14:48:49 +00002781 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame_yv12_buf(cpi, frame);
Sarah Parker3e579a62017-08-23 16:53:20 -07002782 if (cpi->source->y_crop_width == ref_buf->y_crop_width &&
2783 cpi->source->y_crop_height == ref_buf->y_crop_height) {
2784 write_global_motion_params(&cm->global_motion[frame],
2785 &cm->prev_frame->global_motion[frame], wb,
Urvang Joshib6409e92020-03-23 11:23:27 -07002786 cm->features.allow_high_precision_mv);
Sarah Parker3e579a62017-08-23 16:53:20 -07002787 } else {
2788 assert(cm->global_motion[frame].wmtype == IDENTITY &&
2789 "Invalid warp type for frames of different resolutions");
2790 }
2791 */
2792 /*
2793 printf("Frame %d/%d: Enc Ref %d: %d %d %d %d\n",
David Turnerd2a592e2018-11-16 14:59:31 +00002794 cm->current_frame.frame_number, cm->show_frame, frame,
Sarah Parker3e579a62017-08-23 16:53:20 -07002795 cm->global_motion[frame].wmmat[0],
2796 cm->global_motion[frame].wmmat[1], cm->global_motion[frame].wmmat[2],
2797 cm->global_motion[frame].wmmat[3]);
2798 */
2799 }
2800}
Sarah Parker3e579a62017-08-23 16:53:20 -07002801
linzhenb4651282022-09-13 18:13:31 +00002802static int check_frame_refs_short_signaling(AV1_COMMON *const cm,
linzhen07374922022-09-19 23:09:28 +00002803 bool enable_ref_short_signaling) {
linzhenb4651282022-09-13 18:13:31 +00002804 // In rtc case when res < 360p and speed >= 9, we turn on
2805 // frame_refs_short_signaling if it won't break the decoder.
2806 if (enable_ref_short_signaling) {
2807 const int gld_map_idx = get_ref_frame_map_idx(cm, GOLDEN_FRAME);
2808 const int base =
2809 1 << (cm->seq_params->order_hint_info.order_hint_bits_minus_1 + 1);
2810
2811 const int order_hint_group_cur =
2812 cm->current_frame.display_order_hint / base;
2813 const int order_hint_group_gld =
2814 cm->ref_frame_map[gld_map_idx]->display_order_hint / base;
2815 const int relative_dist = cm->current_frame.order_hint -
2816 cm->ref_frame_map[gld_map_idx]->order_hint;
2817
2818 // If current frame and GOLDEN frame are in the same order_hint group, and
2819 // they are not far apart (i.e., > 64 frames), then return 1.
2820 if (order_hint_group_cur == order_hint_group_gld && relative_dist >= 0 &&
2821 relative_dist <= 64) {
2822 return 1;
2823 }
2824 return 0;
2825 }
2826
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002827 // Check whether all references are distinct frames.
Wan-Teh Chang2621bad2022-12-28 16:01:46 -08002828 const RefCntBuffer *seen_bufs[INTER_REFS_PER_FRAME] = { NULL };
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002829 int num_refs = 0;
David Turnere7ebf902018-12-04 14:04:55 +00002830 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00002831 const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
David Turnere7ebf902018-12-04 14:04:55 +00002832 if (buf != NULL) {
2833 int seen = 0;
2834 for (int i = 0; i < num_refs; i++) {
2835 if (seen_bufs[i] == buf) {
2836 seen = 1;
2837 break;
2838 }
2839 }
2840 if (!seen) seen_bufs[num_refs++] = buf;
2841 }
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002842 }
2843
2844 // We only turn on frame_refs_short_signaling when all references are
2845 // distinct.
2846 if (num_refs < INTER_REFS_PER_FRAME) {
2847 // It indicates that there exist more than one reference frame pointing to
2848 // the same reference buffer, i.e. two or more references are duplicate.
David Turner45f416c2018-11-15 11:34:30 +00002849 return 0;
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002850 }
2851
2852 // Check whether the encoder side ref frame choices are aligned with that to
2853 // be derived at the decoder side.
David Turner73245762019-02-11 16:42:34 +00002854 int remapped_ref_idx_decoder[REF_FRAMES];
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002855
David Turnera21966b2018-12-05 14:48:49 +00002856 const int lst_map_idx = get_ref_frame_map_idx(cm, LAST_FRAME);
2857 const int gld_map_idx = get_ref_frame_map_idx(cm, GOLDEN_FRAME);
Zoe Liua5cf7242018-05-14 14:26:18 -07002858
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002859 // Set up the frame refs mapping indexes according to the
2860 // frame_refs_short_signaling policy.
David Turner73245762019-02-11 16:42:34 +00002861 av1_set_frame_refs(cm, remapped_ref_idx_decoder, lst_map_idx, gld_map_idx);
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002862
2863 // We only turn on frame_refs_short_signaling when the encoder side decision
2864 // on ref frames is identical to that at the decoder side.
David Turner45f416c2018-11-15 11:34:30 +00002865 int frame_refs_short_signaling = 1;
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002866 for (int ref_idx = 0; ref_idx < INTER_REFS_PER_FRAME; ++ref_idx) {
2867 // Compare the buffer index between two reference frames indexed
2868 // respectively by the encoder and the decoder side decisions.
David Turner73245762019-02-11 16:42:34 +00002869 RefCntBuffer *ref_frame_buf_new = NULL;
2870 if (remapped_ref_idx_decoder[ref_idx] != INVALID_IDX) {
2871 ref_frame_buf_new = cm->ref_frame_map[remapped_ref_idx_decoder[ref_idx]];
2872 }
2873 if (get_ref_frame_buf(cm, LAST_FRAME + ref_idx) != ref_frame_buf_new) {
David Turner45f416c2018-11-15 11:34:30 +00002874 frame_refs_short_signaling = 0;
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002875 break;
2876 }
2877 }
2878
2879#if 0 // For debug
David Turnerd2a592e2018-11-16 14:59:31 +00002880 printf("\nFrame=%d: \n", cm->current_frame.frame_number);
David Turner45f416c2018-11-15 11:34:30 +00002881 printf("***frame_refs_short_signaling=%d\n", frame_refs_short_signaling);
Zoe Liua5cf7242018-05-14 14:26:18 -07002882 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
David Turnere7ebf902018-12-04 14:04:55 +00002883 printf("enc_ref(map_idx=%d)=%d, vs. "
David Turner1bcefb32018-11-19 17:54:00 +00002884 "dec_ref(map_idx=%d)=%d\n",
David Turnera21966b2018-12-05 14:48:49 +00002885 get_ref_frame_map_idx(cm, ref_frame), ref_frame,
2886 cm->remapped_ref_idx[ref_frame - LAST_FRAME],
David Turneracbe83b2018-11-21 13:22:20 +00002887 ref_frame);
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002888 }
2889#endif // 0
2890
David Turner45f416c2018-11-15 11:34:30 +00002891 return frame_refs_short_signaling;
Zoe Liu1d90ceb2018-04-16 16:53:37 -07002892}
2893
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04002894// New function based on HLS R18
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002895static AOM_INLINE void write_uncompressed_header_obu(
Vishesh8bd59d92021-04-27 11:34:17 +05302896 AV1_COMP *cpi, MACROBLOCKD *const xd, struct aom_write_bit_buffer *saved_wb,
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07002897 struct aom_write_bit_buffer *wb) {
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04002898 AV1_COMMON *const cm = &cpi->common;
Tarundeep Singh4243e622021-04-20 16:10:22 +05302899 const SequenceHeader *const seq_params = cm->seq_params;
Urvang Joshi17814622020-03-27 17:26:17 -07002900 const CommonQuantParams *quant_params = &cm->quant_params;
David Turnerd2a592e2018-11-16 14:59:31 +00002901 CurrentFrame *const current_frame = &cm->current_frame;
Urvang Joshib6409e92020-03-23 11:23:27 -07002902 FeatureFlags *const features = &cm->features;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04002903
linzhenb4651282022-09-13 18:13:31 +00002904 if (!cpi->sf.rt_sf.enable_ref_short_signaling ||
2905 !seq_params->order_hint_info.enable_order_hint ||
2906 seq_params->order_hint_info.enable_ref_frame_mvs) {
2907 current_frame->frame_refs_short_signaling = 0;
2908 } else {
2909 current_frame->frame_refs_short_signaling = 1;
2910 }
David Turner73245762019-02-11 16:42:34 +00002911
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07002912 if (seq_params->still_picture) {
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002913 assert(cm->show_existing_frame == 0);
2914 assert(cm->show_frame == 1);
David Turnerd2a592e2018-11-16 14:59:31 +00002915 assert(current_frame->frame_type == KEY_FRAME);
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002916 }
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07002917 if (!seq_params->reduced_still_picture_hdr) {
Sarah Parker33005522018-07-27 14:46:25 -07002918 if (encode_show_existing_frame(cm)) {
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002919 aom_wb_write_bit(wb, 1); // show_existing_frame
2920 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04002921
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07002922 if (seq_params->decoder_model_info_present_flag &&
Urvang Joshi450a9a22020-03-31 16:00:22 -07002923 seq_params->timing_info.equal_picture_interval == 0) {
Andrey Norkin795ba872018-03-06 13:24:14 -08002924 write_tu_pts_info(cm, wb);
2925 }
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07002926 if (seq_params->frame_id_numbers_present_flag) {
2927 int frame_id_len = seq_params->frame_id_length;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002928 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
2929 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
2930 }
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002931 return;
2932 } else {
2933 aom_wb_write_bit(wb, 0); // show_existing_frame
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04002934 }
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04002935
David Turnerd2a592e2018-11-16 14:59:31 +00002936 aom_wb_write_literal(wb, current_frame->frame_type, 2);
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002937
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002938 aom_wb_write_bit(wb, cm->show_frame);
Andrey Norkin795ba872018-03-06 13:24:14 -08002939 if (cm->show_frame) {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07002940 if (seq_params->decoder_model_info_present_flag &&
Urvang Joshi450a9a22020-03-31 16:00:22 -07002941 seq_params->timing_info.equal_picture_interval == 0)
Andrey Norkin795ba872018-03-06 13:24:14 -08002942 write_tu_pts_info(cm, wb);
2943 } else {
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002944 aom_wb_write_bit(wb, cm->showable_frame);
Zoe Liub4991202017-12-21 15:31:06 -08002945 }
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002946 if (frame_is_sframe(cm)) {
Urvang Joshib6409e92020-03-23 11:23:27 -07002947 assert(features->error_resilient_mode);
David Turnerd2a592e2018-11-16 14:59:31 +00002948 } else if (!(current_frame->frame_type == KEY_FRAME && cm->show_frame)) {
Urvang Joshib6409e92020-03-23 11:23:27 -07002949 aom_wb_write_bit(wb, features->error_resilient_mode);
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002950 }
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04002951 }
Urvang Joshib6409e92020-03-23 11:23:27 -07002952 aom_wb_write_bit(wb, features->disable_cdf_update);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04002953
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07002954 if (seq_params->force_screen_content_tools == 2) {
Urvang Joshib6409e92020-03-23 11:23:27 -07002955 aom_wb_write_bit(wb, features->allow_screen_content_tools);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002956 } else {
Urvang Joshib6409e92020-03-23 11:23:27 -07002957 assert(features->allow_screen_content_tools ==
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07002958 seq_params->force_screen_content_tools);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002959 }
2960
Urvang Joshib6409e92020-03-23 11:23:27 -07002961 if (features->allow_screen_content_tools) {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07002962 if (seq_params->force_integer_mv == 2) {
Urvang Joshib6409e92020-03-23 11:23:27 -07002963 aom_wb_write_bit(wb, features->cur_frame_force_integer_mv);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002964 } else {
Urvang Joshib6409e92020-03-23 11:23:27 -07002965 assert(features->cur_frame_force_integer_mv ==
2966 seq_params->force_integer_mv);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002967 }
2968 } else {
Urvang Joshib6409e92020-03-23 11:23:27 -07002969 assert(features->cur_frame_force_integer_mv == 0);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002970 }
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002971
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002972 int frame_size_override_flag = 0;
Zoe Liud4a67a82018-02-21 12:35:33 -08002973
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07002974 if (seq_params->reduced_still_picture_hdr) {
Urvang Joshi39e6dd52019-02-01 12:46:09 -08002975 assert(cm->superres_upscaled_width == seq_params->max_frame_width &&
2976 cm->superres_upscaled_height == seq_params->max_frame_height);
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002977 } else {
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07002978 if (seq_params->frame_id_numbers_present_flag) {
2979 int frame_id_len = seq_params->frame_id_length;
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002980 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
2981 }
Zoe Liu45a8fed2018-03-02 16:51:02 -08002982
Urvang Joshi39e6dd52019-02-01 12:46:09 -08002983 if (cm->superres_upscaled_width > seq_params->max_frame_width ||
2984 cm->superres_upscaled_height > seq_params->max_frame_height) {
Vishesh8bd59d92021-04-27 11:34:17 +05302985 aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002986 "Frame dimensions are larger than the maximum values");
2987 }
2988
2989 frame_size_override_flag =
Urvang Joshi39e6dd52019-02-01 12:46:09 -08002990 frame_is_sframe(cm)
2991 ? 1
2992 : (cm->superres_upscaled_width != seq_params->max_frame_width ||
2993 cm->superres_upscaled_height != seq_params->max_frame_height);
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07002994 if (!frame_is_sframe(cm)) aom_wb_write_bit(wb, frame_size_override_flag);
2995
David Turnerebf96f42018-11-14 16:57:57 +00002996 if (seq_params->order_hint_info.enable_order_hint)
2997 aom_wb_write_literal(
David Turnerd2a592e2018-11-16 14:59:31 +00002998 wb, current_frame->order_hint,
David Turnerebf96f42018-11-14 16:57:57 +00002999 seq_params->order_hint_info.order_hint_bits_minus_1 + 1);
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07003000
Urvang Joshib6409e92020-03-23 11:23:27 -07003001 if (!features->error_resilient_mode && !frame_is_intra_only(cm)) {
Urvang Joshi6237b882020-03-26 15:02:26 -07003002 aom_wb_write_literal(wb, features->primary_ref_frame, PRIMARY_REF_BITS);
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07003003 }
Yue Chend90d3432018-03-16 11:28:42 -07003004 }
3005
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07003006 if (seq_params->decoder_model_info_present_flag) {
Tarundeep Singhd00b98f2021-04-21 16:13:51 +05303007 aom_wb_write_bit(wb, cpi->ppi->buffer_removal_time_present);
3008 if (cpi->ppi->buffer_removal_time_present) {
Adrian Grangec56f6ec2018-05-31 14:19:32 -07003009 for (int op_num = 0;
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07003010 op_num < seq_params->operating_points_cnt_minus_1 + 1; op_num++) {
Urvang Joshi450a9a22020-03-31 16:00:22 -07003011 if (seq_params->op_params[op_num].decoder_model_param_present_flag) {
Wan-Teh Chang62e8d9c2021-05-28 12:30:23 -07003012 if (seq_params->operating_point_idc[op_num] == 0 ||
3013 ((seq_params->operating_point_idc[op_num] >>
Adrian Grangec56f6ec2018-05-31 14:19:32 -07003014 cm->temporal_layer_id) &
3015 0x1 &&
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07003016 (seq_params->operating_point_idc[op_num] >>
Adrian Grangec56f6ec2018-05-31 14:19:32 -07003017 (cm->spatial_layer_id + 8)) &
Wan-Teh Chang62e8d9c2021-05-28 12:30:23 -07003018 0x1)) {
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07003019 aom_wb_write_unsigned_literal(
Urvang Joshi450a9a22020-03-31 16:00:22 -07003020 wb, cm->buffer_removal_times[op_num],
3021 seq_params->decoder_model_info.buffer_removal_time_length);
3022 cm->buffer_removal_times[op_num]++;
3023 if (cm->buffer_removal_times[op_num] == 0) {
Vishesh8bd59d92021-04-27 11:34:17 +05303024 aom_internal_error(cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Wan-Teh Changf64b3bc2018-07-02 09:42:39 -07003025 "buffer_removal_time overflowed");
3026 }
Adrian Grangec56f6ec2018-05-31 14:19:32 -07003027 }
Andrey Norkin795ba872018-03-06 13:24:14 -08003028 }
3029 }
3030 }
3031 }
David Turner996b2c12018-12-07 15:52:30 +00003032
3033 // Shown keyframes and switch-frames automatically refreshes all reference
3034 // frames. For all other frame types, we need to write refresh_frame_flags.
bohanli99852502020-07-14 16:22:45 -07003035 if ((current_frame->frame_type == KEY_FRAME && !cm->show_frame) ||
David Turner996b2c12018-12-07 15:52:30 +00003036 current_frame->frame_type == INTER_FRAME ||
3037 current_frame->frame_type == INTRA_ONLY_FRAME)
3038 aom_wb_write_literal(wb, current_frame->refresh_frame_flags, REF_FRAMES);
3039
David Turner996b2c12018-12-07 15:52:30 +00003040 if (!frame_is_intra_only(cm) || current_frame->refresh_frame_flags != 0xff) {
Debargha Mukherjee33f42652018-04-12 14:27:32 -07003041 // Write all ref frame order hints if error_resilient_mode == 1
Urvang Joshib6409e92020-03-23 11:23:27 -07003042 if (features->error_resilient_mode &&
David Turnerebf96f42018-11-14 16:57:57 +00003043 seq_params->order_hint_info.enable_order_hint) {
Debargha Mukherjee33f42652018-04-12 14:27:32 -07003044 for (int ref_idx = 0; ref_idx < REF_FRAMES; ref_idx++) {
David Turnerebf96f42018-11-14 16:57:57 +00003045 aom_wb_write_literal(
David Turner996b2c12018-12-07 15:52:30 +00003046 wb, cm->ref_frame_map[ref_idx]->order_hint,
David Turnerebf96f42018-11-14 16:57:57 +00003047 seq_params->order_hint_info.order_hint_bits_minus_1 + 1);
Debargha Mukherjee33f42652018-04-12 14:27:32 -07003048 }
3049 }
3050 }
3051
David Turnerd2a592e2018-11-16 14:59:31 +00003052 if (current_frame->frame_type == KEY_FRAME) {
Debargha Mukherjee33f42652018-04-12 14:27:32 -07003053 write_frame_size(cm, frame_size_override_flag, wb);
Urvang Joshib6409e92020-03-23 11:23:27 -07003054 assert(!av1_superres_scaled(cm) || !features->allow_intrabc);
3055 if (features->allow_screen_content_tools && !av1_superres_scaled(cm))
3056 aom_wb_write_bit(wb, features->allow_intrabc);
Debargha Mukherjee33f42652018-04-12 14:27:32 -07003057 } else {
David Turnerd2a592e2018-11-16 14:59:31 +00003058 if (current_frame->frame_type == INTRA_ONLY_FRAME) {
Debargha Mukherjee33f42652018-04-12 14:27:32 -07003059 write_frame_size(cm, frame_size_override_flag, wb);
Urvang Joshib6409e92020-03-23 11:23:27 -07003060 assert(!av1_superres_scaled(cm) || !features->allow_intrabc);
3061 if (features->allow_screen_content_tools && !av1_superres_scaled(cm))
3062 aom_wb_write_bit(wb, features->allow_intrabc);
David Turnerd2a592e2018-11-16 14:59:31 +00003063 } else if (current_frame->frame_type == INTER_FRAME ||
3064 frame_is_sframe(cm)) {
Debargha Mukherjee33f42652018-04-12 14:27:32 -07003065 MV_REFERENCE_FRAME ref_frame;
3066
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003067 // NOTE: Error resilient mode turns off frame_refs_short_signaling
3068 // automatically.
Zoe Liua5cf7242018-05-14 14:26:18 -07003069#define FRAME_REFS_SHORT_SIGNALING 0
3070#if FRAME_REFS_SHORT_SIGNALING
David Turner73245762019-02-11 16:42:34 +00003071 current_frame->frame_refs_short_signaling =
David Turnerebf96f42018-11-14 16:57:57 +00003072 seq_params->order_hint_info.enable_order_hint;
Zoe Liua5cf7242018-05-14 14:26:18 -07003073#endif // FRAME_REFS_SHORT_SIGNALING
Zoe Liud4a67a82018-02-21 12:35:33 -08003074
David Turner73245762019-02-11 16:42:34 +00003075 if (current_frame->frame_refs_short_signaling) {
linzhen07374922022-09-19 23:09:28 +00003076 // In rtc case when cpi->sf.rt_sf.enable_ref_short_signaling is true,
3077 // we turn on frame_refs_short_signaling when the current frame and
3078 // golden frame are in the same order_hint group, and their relative
3079 // distance is <= 64 (in order to be decodable).
linzhenb4651282022-09-13 18:13:31 +00003080
3081 // For other cases, an example solution for encoder-side
3082 // implementation on frame_refs_short_signaling is also provided in
3083 // this function, where frame_refs_short_signaling is only turned on
3084 // when the encoder side decision on ref frames is identical to that
3085 // at the decoder side.
3086
David Turner73245762019-02-11 16:42:34 +00003087 current_frame->frame_refs_short_signaling =
linzhenb4651282022-09-13 18:13:31 +00003088 check_frame_refs_short_signaling(
3089 cm, cpi->sf.rt_sf.enable_ref_short_signaling);
Zoe Liua5cf7242018-05-14 14:26:18 -07003090 }
Zoe Liu1d90ceb2018-04-16 16:53:37 -07003091
David Turnerebf96f42018-11-14 16:57:57 +00003092 if (seq_params->order_hint_info.enable_order_hint)
David Turner73245762019-02-11 16:42:34 +00003093 aom_wb_write_bit(wb, current_frame->frame_refs_short_signaling);
Zoe Liua5cf7242018-05-14 14:26:18 -07003094
David Turner73245762019-02-11 16:42:34 +00003095 if (current_frame->frame_refs_short_signaling) {
David Turnera21966b2018-12-05 14:48:49 +00003096 const int lst_ref = get_ref_frame_map_idx(cm, LAST_FRAME);
Zoe Liua5cf7242018-05-14 14:26:18 -07003097 aom_wb_write_literal(wb, lst_ref, REF_FRAMES_LOG2);
3098
David Turnera21966b2018-12-05 14:48:49 +00003099 const int gld_ref = get_ref_frame_map_idx(cm, GOLDEN_FRAME);
Zoe Liua5cf7242018-05-14 14:26:18 -07003100 aom_wb_write_literal(wb, gld_ref, REF_FRAMES_LOG2);
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003101 }
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003102
3103 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
David Turnera21966b2018-12-05 14:48:49 +00003104 assert(get_ref_frame_map_idx(cm, ref_frame) != INVALID_IDX);
David Turner73245762019-02-11 16:42:34 +00003105 if (!current_frame->frame_refs_short_signaling)
David Turnera21966b2018-12-05 14:48:49 +00003106 aom_wb_write_literal(wb, get_ref_frame_map_idx(cm, ref_frame),
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003107 REF_FRAMES_LOG2);
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07003108 if (seq_params->frame_id_numbers_present_flag) {
David Turnera21966b2018-12-05 14:48:49 +00003109 int i = get_ref_frame_map_idx(cm, ref_frame);
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07003110 int frame_id_len = seq_params->frame_id_length;
3111 int diff_len = seq_params->delta_frame_id_length;
Wan-Teh Chang69582972018-05-15 13:18:52 -07003112 int delta_frame_id_minus_1 =
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003113 ((cm->current_frame_id - cm->ref_frame_id[i] +
3114 (1 << frame_id_len)) %
3115 (1 << frame_id_len)) -
3116 1;
Wan-Teh Chang69582972018-05-15 13:18:52 -07003117 if (delta_frame_id_minus_1 < 0 ||
David Turner45f416c2018-11-15 11:34:30 +00003118 delta_frame_id_minus_1 >= (1 << diff_len)) {
Vishesh8bd59d92021-04-27 11:34:17 +05303119 aom_internal_error(cm->error, AOM_CODEC_ERROR,
David Turner45f416c2018-11-15 11:34:30 +00003120 "Invalid delta_frame_id_minus_1");
3121 }
Wan-Teh Chang69582972018-05-15 13:18:52 -07003122 aom_wb_write_literal(wb, delta_frame_id_minus_1, diff_len);
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003123 }
Zoe Liu48acf882018-02-21 12:16:50 -08003124 }
Zoe Liuca0cd3f2018-02-26 15:07:50 -08003125
Urvang Joshib6409e92020-03-23 11:23:27 -07003126 if (!features->error_resilient_mode && frame_size_override_flag) {
David Turnera21966b2018-12-05 14:48:49 +00003127 write_frame_size_with_refs(cm, wb);
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003128 } else {
3129 write_frame_size(cm, frame_size_override_flag, wb);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003130 }
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003131
Urvang Joshib6409e92020-03-23 11:23:27 -07003132 if (!features->cur_frame_force_integer_mv)
3133 aom_wb_write_bit(wb, features->allow_high_precision_mv);
Urvang Joshi6237b882020-03-26 15:02:26 -07003134 write_frame_interp_filter(features->interp_filter, wb);
3135 aom_wb_write_bit(wb, features->switchable_motion_mode);
Debargha Mukherjeea5b810a2018-03-26 19:19:55 -07003136 if (frame_might_allow_ref_frame_mvs(cm)) {
Urvang Joshib6409e92020-03-23 11:23:27 -07003137 aom_wb_write_bit(wb, features->allow_ref_frame_mvs);
Debargha Mukherjeedaa8d7e2018-04-07 13:07:29 -07003138 } else {
Urvang Joshib6409e92020-03-23 11:23:27 -07003139 assert(features->allow_ref_frame_mvs == 0);
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003140 }
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003141 }
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003142 }
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003143
Urvang Joshib6409e92020-03-23 11:23:27 -07003144 const int might_bwd_adapt = !(seq_params->reduced_still_picture_hdr) &&
3145 !(features->disable_cdf_update);
Urvang Joshi54ffae72020-03-23 13:37:10 -07003146 if (cm->tiles.large_scale)
Urvang Joshi6237b882020-03-26 15:02:26 -07003147 assert(features->refresh_frame_context == REFRESH_FRAME_CONTEXT_DISABLED);
Yaowu Xu66663f72018-03-27 08:22:23 -07003148
Jingning Handa11e692017-12-19 08:45:08 -08003149 if (might_bwd_adapt) {
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003150 aom_wb_write_bit(
Urvang Joshi6237b882020-03-26 15:02:26 -07003151 wb, features->refresh_frame_context == REFRESH_FRAME_CONTEXT_DISABLED);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003152 }
Yue Chend90d3432018-03-16 11:28:42 -07003153
James Zern8b007ff2018-02-26 22:49:13 -08003154 write_tile_info(cm, saved_wb, wb);
Urvang Joshi17814622020-03-27 17:26:17 -07003155 encode_quantization(quant_params, av1_num_planes(cm),
Tarundeep Singh4243e622021-04-20 16:10:22 +05303156 cm->seq_params->separate_uv_delta_q, wb);
Cherma Rajan Ab4be7e52021-04-14 19:28:53 +05303157 encode_segmentation(cm, wb);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003158
David Turnerebf96f42018-11-14 16:57:57 +00003159 const DeltaQInfo *const delta_q_info = &cm->delta_q_info;
Urvang Joshi17814622020-03-27 17:26:17 -07003160 if (delta_q_info->delta_q_present_flag) assert(quant_params->base_qindex > 0);
3161 if (quant_params->base_qindex > 0) {
David Turnerebf96f42018-11-14 16:57:57 +00003162 aom_wb_write_bit(wb, delta_q_info->delta_q_present_flag);
3163 if (delta_q_info->delta_q_present_flag) {
3164 aom_wb_write_literal(wb, get_msb(delta_q_info->delta_q_res), 2);
Urvang Joshiacad1ca2020-04-27 17:03:25 -07003165 xd->current_base_qindex = quant_params->base_qindex;
Urvang Joshib6409e92020-03-23 11:23:27 -07003166 if (features->allow_intrabc)
David Turnerebf96f42018-11-14 16:57:57 +00003167 assert(delta_q_info->delta_lf_present_flag == 0);
Yue Chen6bec0572018-05-15 09:53:35 -07003168 else
David Turnerebf96f42018-11-14 16:57:57 +00003169 aom_wb_write_bit(wb, delta_q_info->delta_lf_present_flag);
3170 if (delta_q_info->delta_lf_present_flag) {
3171 aom_wb_write_literal(wb, get_msb(delta_q_info->delta_lf_res), 2);
3172 aom_wb_write_bit(wb, delta_q_info->delta_lf_multi);
Wan-Teh Chang69ac7eb2018-06-08 09:41:37 -07003173 av1_reset_loop_filter_delta(xd, av1_num_planes(cm));
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003174 }
3175 }
3176 }
Yue Chen6bec0572018-05-15 09:53:35 -07003177
Urvang Joshib6409e92020-03-23 11:23:27 -07003178 if (features->all_lossless) {
Cheng Chen09c83a52018-06-05 12:27:36 -07003179 assert(!av1_superres_scaled(cm));
Urvang Joshid6b5d512018-03-20 13:34:38 -07003180 } else {
Urvang Joshib6409e92020-03-23 11:23:27 -07003181 if (!features->coded_lossless) {
Urvang Joshic8b52d52018-03-23 13:16:51 -07003182 encode_loopfilter(cm, wb);
3183 encode_cdef(cm, wb);
3184 }
Urvang Joshi5ec7b812018-02-28 14:37:06 -08003185 encode_restoration_mode(cm, wb);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003186 }
Urvang Joshi5ec7b812018-02-28 14:37:06 -08003187
David Turner45f416c2018-11-15 11:34:30 +00003188 // Write TX mode
Urvang Joshib6409e92020-03-23 11:23:27 -07003189 if (features->coded_lossless)
Urvang Joshi6237b882020-03-26 15:02:26 -07003190 assert(features->tx_mode == ONLY_4X4);
David Turner45f416c2018-11-15 11:34:30 +00003191 else
Urvang Joshi6237b882020-03-26 15:02:26 -07003192 aom_wb_write_bit(wb, features->tx_mode == TX_MODE_SELECT);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003193
David Turnerb757ce02018-11-12 15:01:28 +00003194 if (!frame_is_intra_only(cm)) {
David Turnerd2a592e2018-11-16 14:59:31 +00003195 const int use_hybrid_pred =
3196 current_frame->reference_mode == REFERENCE_MODE_SELECT;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003197
3198 aom_wb_write_bit(wb, use_hybrid_pred);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003199 }
Zoe Liu4b847e12017-12-07 12:44:45 -08003200
David Turnerd2a592e2018-11-16 14:59:31 +00003201 if (current_frame->skip_mode_info.skip_mode_allowed)
3202 aom_wb_write_bit(wb, current_frame->skip_mode_info.skip_mode_flag);
Zoe Liu4b847e12017-12-07 12:44:45 -08003203
Debargha Mukherjeea5b810a2018-03-26 19:19:55 -07003204 if (frame_might_allow_warped_motion(cm))
Urvang Joshib6409e92020-03-23 11:23:27 -07003205 aom_wb_write_bit(wb, features->allow_warped_motion);
Debargha Mukherjee07a7c1f2018-03-21 17:39:13 -07003206 else
Urvang Joshib6409e92020-03-23 11:23:27 -07003207 assert(!features->allow_warped_motion);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003208
Urvang Joshib6409e92020-03-23 11:23:27 -07003209 aom_wb_write_bit(wb, features->reduced_tx_set_used);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003210
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003211 if (!frame_is_intra_only(cm)) write_global_motion(cpi, wb);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003212
Urvang Joshi8d5a4ba2018-07-19 16:26:34 -07003213 if (seq_params->film_grain_params_present &&
David Turner08f909c2018-12-18 13:29:14 +00003214 (cm->show_frame || cm->showable_frame))
Dominic Symesd4929012018-01-31 17:32:01 +01003215 write_film_grain_params(cpi, wb);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003216
Urvang Joshi54ffae72020-03-23 13:37:10 -07003217 if (cm->tiles.large_scale) write_ext_tile_info(cm, saved_wb, wb);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003218}
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003219
Yaowu Xuc27fc142016-08-22 16:08:15 -07003220static int choose_size_bytes(uint32_t size, int spare_msbs) {
3221 // Choose the number of bytes required to represent size, without
3222 // using the 'spare_msbs' number of most significant bits.
3223
3224 // Make sure we will fit in 4 bytes to start with..
3225 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
3226
3227 // Normalise to 32 bits
3228 size <<= spare_msbs;
3229
3230 if (size >> 24 != 0)
3231 return 4;
3232 else if (size >> 16 != 0)
3233 return 3;
3234 else if (size >> 8 != 0)
3235 return 2;
3236 else
3237 return 1;
3238}
3239
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07003240static AOM_INLINE void mem_put_varsize(uint8_t *const dst, const int sz,
3241 const int val) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003242 switch (sz) {
3243 case 1: dst[0] = (uint8_t)(val & 0xff); break;
3244 case 2: mem_put_le16(dst, val); break;
3245 case 3: mem_put_le24(dst, val); break;
3246 case 4: mem_put_le32(dst, val); break;
James Zern06c372d2017-04-20 16:08:29 -07003247 default: assert(0 && "Invalid size"); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003248 }
3249}
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003250
Urvang Joshi54ffae72020-03-23 13:37:10 -07003251static int remux_tiles(const CommonTileParams *const tiles, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003252 const uint32_t data_size, const uint32_t max_tile_size,
3253 const uint32_t max_tile_col_size,
3254 int *const tile_size_bytes,
3255 int *const tile_col_size_bytes) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003256 // Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
3257 int tsb;
3258 int tcsb;
3259
Urvang Joshi54ffae72020-03-23 13:37:10 -07003260 if (tiles->large_scale) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003261 // The top bit in the tile size field indicates tile copy mode, so we
3262 // have 1 less bit to code the tile size
3263 tsb = choose_size_bytes(max_tile_size, 1);
3264 tcsb = choose_size_bytes(max_tile_col_size, 0);
3265 } else {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003266 tsb = choose_size_bytes(max_tile_size, 0);
3267 tcsb = 4; // This is ignored
3268 (void)max_tile_col_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003269 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003270
3271 assert(tsb > 0);
3272 assert(tcsb > 0);
3273
3274 *tile_size_bytes = tsb;
3275 *tile_col_size_bytes = tcsb;
James Zerna60ff582018-02-24 14:02:12 -08003276 if (tsb == 4 && tcsb == 4) return data_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003277
James Zerna60ff582018-02-24 14:02:12 -08003278 uint32_t wpos = 0;
3279 uint32_t rpos = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003280
Urvang Joshi54ffae72020-03-23 13:37:10 -07003281 if (tiles->large_scale) {
James Zerna60ff582018-02-24 14:02:12 -08003282 int tile_row;
3283 int tile_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003284
Urvang Joshi54ffae72020-03-23 13:37:10 -07003285 for (tile_col = 0; tile_col < tiles->cols; tile_col++) {
James Zerna60ff582018-02-24 14:02:12 -08003286 // All but the last column has a column header
Urvang Joshi54ffae72020-03-23 13:37:10 -07003287 if (tile_col < tiles->cols - 1) {
James Zerna60ff582018-02-24 14:02:12 -08003288 uint32_t tile_col_size = mem_get_le32(dst + rpos);
3289 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003290
James Zerna60ff582018-02-24 14:02:12 -08003291 // Adjust the tile column size by the number of bytes removed
3292 // from the tile size fields.
Urvang Joshi54ffae72020-03-23 13:37:10 -07003293 tile_col_size -= (4 - tsb) * tiles->rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003294
James Zerna60ff582018-02-24 14:02:12 -08003295 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
3296 wpos += tcsb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003297 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003298
Urvang Joshi54ffae72020-03-23 13:37:10 -07003299 for (tile_row = 0; tile_row < tiles->rows; tile_row++) {
James Zerna60ff582018-02-24 14:02:12 -08003300 // All, including the last row has a header
3301 uint32_t tile_header = mem_get_le32(dst + rpos);
3302 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003303
James Zerna60ff582018-02-24 14:02:12 -08003304 // If this is a copy tile, we need to shift the MSB to the
3305 // top bit of the new width, and there is no data to copy.
3306 if (tile_header >> 31 != 0) {
3307 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
3308 mem_put_varsize(dst + wpos, tsb, tile_header);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003309 wpos += tsb;
James Zerna60ff582018-02-24 14:02:12 -08003310 } else {
3311 mem_put_varsize(dst + wpos, tsb, tile_header);
3312 wpos += tsb;
3313
James Zern2f22bfd2018-03-10 12:58:22 -08003314 tile_header += AV1_MIN_TILE_SIZE_BYTES;
James Zerna60ff582018-02-24 14:02:12 -08003315 memmove(dst + wpos, dst + rpos, tile_header);
3316 rpos += tile_header;
3317 wpos += tile_header;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003318 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003319 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003320 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003321
3322 assert(rpos > wpos);
3323 assert(rpos == data_size);
3324
3325 return wpos;
3326 }
Urvang Joshi54ffae72020-03-23 13:37:10 -07003327 const int n_tiles = tiles->cols * tiles->rows;
James Zerna60ff582018-02-24 14:02:12 -08003328 int n;
3329
3330 for (n = 0; n < n_tiles; n++) {
3331 int tile_size;
3332
3333 if (n == n_tiles - 1) {
3334 tile_size = data_size - rpos;
3335 } else {
3336 tile_size = mem_get_le32(dst + rpos);
3337 rpos += 4;
3338 mem_put_varsize(dst + wpos, tsb, tile_size);
James Zern2f22bfd2018-03-10 12:58:22 -08003339 tile_size += AV1_MIN_TILE_SIZE_BYTES;
James Zerna60ff582018-02-24 14:02:12 -08003340 wpos += tsb;
3341 }
3342
3343 memmove(dst + wpos, dst + rpos, tile_size);
3344
3345 rpos += tile_size;
3346 wpos += tile_size;
3347 }
3348
3349 assert(rpos > wpos);
3350 assert(rpos == data_size);
3351
3352 return wpos;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003353}
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003354
Vishesh8ac928b2020-04-01 02:36:35 +05303355uint32_t av1_write_obu_header(AV1LevelParams *const level_params,
Tarundeep Singhd37807f2021-03-31 17:30:55 +05303356 int *frame_header_count, OBU_TYPE obu_type,
3357 int obu_extension, uint8_t *const dst) {
Vishesh8ac928b2020-04-01 02:36:35 +05303358 if (level_params->keep_level_stats &&
Hui Su4fd11762019-03-26 16:05:07 -07003359 (obu_type == OBU_FRAME || obu_type == OBU_FRAME_HEADER))
Tarundeep Singhd37807f2021-03-31 17:30:55 +05303360 ++(*frame_header_count);
Hui Su4fd11762019-03-26 16:05:07 -07003361
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003362 struct aom_write_bit_buffer wb = { dst, 0 };
3363 uint32_t size = 0;
3364
Tom Fineganf9273812018-03-14 09:49:45 -07003365 aom_wb_write_literal(&wb, 0, 1); // forbidden bit.
Soo-Chul Han38427e82017-09-27 15:06:13 -04003366 aom_wb_write_literal(&wb, (int)obu_type, 4);
Vignesh Venkatasubramanian726f7952018-03-08 15:03:35 -08003367 aom_wb_write_literal(&wb, obu_extension ? 1 : 0, 1);
Wan-Teh Chang99ed4b72023-08-08 12:55:51 -07003368 aom_wb_write_literal(&wb, 1, 1); // obu_has_size_field
Vignesh Venkatasubramanian726f7952018-03-08 15:03:35 -08003369 aom_wb_write_literal(&wb, 0, 1); // reserved
Tom Finegan5427be12018-03-14 18:45:39 -07003370
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003371 if (obu_extension) {
3372 aom_wb_write_literal(&wb, obu_extension & 0xFF, 8);
3373 }
3374
3375 size = aom_wb_bytes_written(&wb);
3376 return size;
3377}
3378
Tom Fineganda8da7f2019-11-14 13:02:55 -08003379int av1_write_uleb_obu_size(size_t obu_header_size, size_t obu_payload_size,
Yaowu Xu797674b2019-05-01 17:38:11 -07003380 uint8_t *dest) {
Tom Fineganda8da7f2019-11-14 13:02:55 -08003381 const size_t offset = obu_header_size;
Tom Finegan41150ad2018-01-23 11:42:55 -08003382 size_t coded_obu_size = 0;
Tom Finegan9a1a9582019-11-14 14:28:09 -08003383 const uint32_t obu_size = (uint32_t)obu_payload_size;
3384 assert(obu_size == obu_payload_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08003385
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003386 if (aom_uleb_encode(obu_size, sizeof(obu_size), dest + offset,
3387 &coded_obu_size) != 0) {
Tom Finegan41150ad2018-01-23 11:42:55 -08003388 return AOM_CODEC_ERROR;
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003389 }
Tom Finegan41150ad2018-01-23 11:42:55 -08003390
3391 return AOM_CODEC_OK;
3392}
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003393
Vishesh5849e6d2021-04-23 19:15:29 +05303394size_t av1_obu_memmove(size_t obu_header_size, size_t obu_payload_size,
3395 uint8_t *data) {
Tom Finegan5427be12018-03-14 18:45:39 -07003396 const size_t length_field_size = aom_uleb_size_in_bytes(obu_payload_size);
Tom Fineganda8da7f2019-11-14 13:02:55 -08003397 const size_t move_dst_offset = length_field_size + obu_header_size;
3398 const size_t move_src_offset = obu_header_size;
3399 const size_t move_size = obu_payload_size;
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003400 memmove(data + move_dst_offset, data + move_src_offset, move_size);
3401 return length_field_size;
3402}
Tom Finegan41150ad2018-01-23 11:42:55 -08003403
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07003404static AOM_INLINE void add_trailing_bits(struct aom_write_bit_buffer *wb) {
Cyril Concolato2dab2752018-02-26 14:25:47 -08003405 if (aom_wb_is_byte_aligned(wb)) {
3406 aom_wb_write_literal(wb, 0x80, 8);
3407 } else {
3408 // assumes that the other bits are already 0s
3409 aom_wb_write_bit(wb, 1);
3410 }
3411}
Cyril Concolato2dab2752018-02-26 14:25:47 -08003412
Elliott Karpilovsky18fcd6a2019-09-16 15:15:06 -07003413static AOM_INLINE void write_bitstream_level(AV1_LEVEL seq_level_idx,
3414 struct aom_write_bit_buffer *wb) {
Debargha Mukherjee57498692018-05-11 13:29:31 -07003415 assert(is_valid_seq_level_idx(seq_level_idx));
3416 aom_wb_write_literal(wb, seq_level_idx, LEVEL_BITS);
Debargha Mukherjeeea675402018-05-10 16:10:41 -07003417}
3418
Urvang Joshi450a9a22020-03-31 16:00:22 -07003419uint32_t av1_write_sequence_header_obu(const SequenceHeader *seq_params,
3420 uint8_t *const dst) {
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003421 struct aom_write_bit_buffer wb = { dst, 0 };
3422 uint32_t size = 0;
3423
Urvang Joshi450a9a22020-03-31 16:00:22 -07003424 write_profile(seq_params->profile, &wb);
Soo-Chul Han79a501a2018-03-19 15:24:40 -04003425
Debargha Mukherjee5d6e3fb2018-04-03 13:05:54 -07003426 // Still picture or not
Urvang Joshi450a9a22020-03-31 16:00:22 -07003427 aom_wb_write_bit(&wb, seq_params->still_picture);
3428 assert(IMPLIES(!seq_params->still_picture,
3429 !seq_params->reduced_still_picture_hdr));
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07003430 // whether to use reduced still picture header
Urvang Joshi450a9a22020-03-31 16:00:22 -07003431 aom_wb_write_bit(&wb, seq_params->reduced_still_picture_hdr);
Debargha Mukherjee5d6e3fb2018-04-03 13:05:54 -07003432
Urvang Joshi450a9a22020-03-31 16:00:22 -07003433 if (seq_params->reduced_still_picture_hdr) {
3434 assert(seq_params->timing_info_present == 0);
3435 assert(seq_params->decoder_model_info_present_flag == 0);
3436 assert(seq_params->display_model_info_present_flag == 0);
3437 write_bitstream_level(seq_params->seq_level_idx[0], &wb);
Debargha Mukherjeeacd41f92018-04-11 07:58:34 -07003438 } else {
Urvang Joshi450a9a22020-03-31 16:00:22 -07003439 aom_wb_write_bit(
3440 &wb, seq_params->timing_info_present); // timing info present flag
Andrey Norkinc14f8452018-06-08 18:15:31 -07003441
Urvang Joshi450a9a22020-03-31 16:00:22 -07003442 if (seq_params->timing_info_present) {
Andrey Norkinc14f8452018-06-08 18:15:31 -07003443 // timing_info
Urvang Joshi450a9a22020-03-31 16:00:22 -07003444 write_timing_info_header(&seq_params->timing_info, &wb);
3445 aom_wb_write_bit(&wb, seq_params->decoder_model_info_present_flag);
3446 if (seq_params->decoder_model_info_present_flag) {
3447 write_decoder_model_info(&seq_params->decoder_model_info, &wb);
Andrey Norkinc14f8452018-06-08 18:15:31 -07003448 }
Adrian Grangec56f6ec2018-05-31 14:19:32 -07003449 }
Urvang Joshi450a9a22020-03-31 16:00:22 -07003450 aom_wb_write_bit(&wb, seq_params->display_model_info_present_flag);
3451 aom_wb_write_literal(&wb, seq_params->operating_points_cnt_minus_1,
Wan-Teh Chang69582972018-05-15 13:18:52 -07003452 OP_POINTS_CNT_MINUS_1_BITS);
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07003453 int i;
Urvang Joshi450a9a22020-03-31 16:00:22 -07003454 for (i = 0; i < seq_params->operating_points_cnt_minus_1 + 1; i++) {
3455 aom_wb_write_literal(&wb, seq_params->operating_point_idc[i],
Frank Bossenbaf66662018-04-30 12:21:04 -04003456 OP_POINTS_IDC_BITS);
Urvang Joshi450a9a22020-03-31 16:00:22 -07003457 write_bitstream_level(seq_params->seq_level_idx[i], &wb);
3458 if (seq_params->seq_level_idx[i] >= SEQ_LEVEL_4_0)
3459 aom_wb_write_bit(&wb, seq_params->tier[i]);
3460 if (seq_params->decoder_model_info_present_flag) {
3461 aom_wb_write_bit(
3462 &wb, seq_params->op_params[i].decoder_model_param_present_flag);
3463 if (seq_params->op_params[i].decoder_model_param_present_flag) {
3464 write_dec_model_op_parameters(
3465 &seq_params->op_params[i],
3466 seq_params->decoder_model_info
3467 .encoder_decoder_buffer_delay_length,
3468 &wb);
3469 }
Adrian Grangec56f6ec2018-05-31 14:19:32 -07003470 }
Urvang Joshi450a9a22020-03-31 16:00:22 -07003471 if (seq_params->display_model_info_present_flag) {
3472 aom_wb_write_bit(
3473 &wb, seq_params->op_params[i].display_model_param_present_flag);
3474 if (seq_params->op_params[i].display_model_param_present_flag) {
Wan-Teh Changd3c97c12022-09-29 14:34:35 -07003475 assert(seq_params->op_params[i].initial_display_delay >= 1);
Urvang Joshi450a9a22020-03-31 16:00:22 -07003476 assert(seq_params->op_params[i].initial_display_delay <= 10);
3477 aom_wb_write_literal(
3478 &wb, seq_params->op_params[i].initial_display_delay - 1, 4);
Adrian Grangec56f6ec2018-05-31 14:19:32 -07003479 }
3480 }
Debargha Mukherjeec6f24c22018-04-07 08:43:08 -07003481 }
Soo-Chul Han79a501a2018-03-19 15:24:40 -04003482 }
Urvang Joshi450a9a22020-03-31 16:00:22 -07003483 write_sequence_header(seq_params, &wb);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003484
Urvang Joshi450a9a22020-03-31 16:00:22 -07003485 write_color_config(seq_params, &wb);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003486
Urvang Joshi450a9a22020-03-31 16:00:22 -07003487 aom_wb_write_bit(&wb, seq_params->film_grain_params_present);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003488
Cyril Concolato2dab2752018-02-26 14:25:47 -08003489 add_trailing_bits(&wb);
Cyril Concolato2dab2752018-02-26 14:25:47 -08003490
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003491 size = aom_wb_bytes_written(&wb);
3492 return size;
3493}
3494
Vishesh8bd59d92021-04-27 11:34:17 +05303495static uint32_t write_frame_header_obu(AV1_COMP *cpi, MACROBLOCKD *const xd,
Jingning Handa11e692017-12-19 08:45:08 -08003496 struct aom_write_bit_buffer *saved_wb,
Vignesh Venkatasubramanianbd7b0e32018-04-10 11:31:59 -07003497 uint8_t *const dst,
3498 int append_trailing_bits) {
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003499 struct aom_write_bit_buffer wb = { dst, 0 };
Vishesh8bd59d92021-04-27 11:34:17 +05303500 write_uncompressed_header_obu(cpi, xd, saved_wb, &wb);
Vignesh Venkatasubramanianbd7b0e32018-04-10 11:31:59 -07003501 if (append_trailing_bits) add_trailing_bits(&wb);
3502 return aom_wb_bytes_written(&wb);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003503}
3504
Johanna7f959f2018-11-16 12:51:05 -05003505static uint32_t write_tile_group_header(uint8_t *const dst, int start_tile,
3506 int end_tile, int tiles_log2,
Vignesh Venkatasubramanian2a06b412018-03-01 15:18:06 -08003507 int tile_start_and_end_present_flag) {
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003508 struct aom_write_bit_buffer wb = { dst, 0 };
3509 uint32_t size = 0;
3510
Vignesh Venkatasubramanian2a06b412018-03-01 15:18:06 -08003511 if (!tiles_log2) return size;
3512
3513 aom_wb_write_bit(&wb, tile_start_and_end_present_flag);
3514
3515 if (tile_start_and_end_present_flag) {
Johanna7f959f2018-11-16 12:51:05 -05003516 aom_wb_write_literal(&wb, start_tile, tiles_log2);
3517 aom_wb_write_literal(&wb, end_tile, tiles_log2);
Vignesh Venkatasubramanian2a06b412018-03-01 15:18:06 -08003518 }
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003519
3520 size = aom_wb_bytes_written(&wb);
3521 return size;
3522}
3523
Yunqing Wangd8fd9e72019-12-26 15:36:31 -08003524extern void av1_print_uncompressed_frame_header(const uint8_t *data, int size,
3525 const char *filename);
3526
Vishesh1c320df2021-03-01 09:46:30 +05303527typedef struct {
3528 uint32_t tg_hdr_size;
3529 uint32_t frame_header_size;
3530} LargeTileFrameOBU;
3531
3532// Initialize OBU header for large scale tile case.
3533static uint32_t init_large_scale_tile_obu_header(
3534 AV1_COMP *const cpi, uint8_t **data, struct aom_write_bit_buffer *saved_wb,
3535 LargeTileFrameOBU *lst_obu) {
Tarundeep Singh3b3e9e12021-03-16 14:58:27 +05303536 AV1LevelParams *const level_params = &cpi->ppi->level_params;
Vishesh1c320df2021-03-01 09:46:30 +05303537 CurrentFrame *const current_frame = &cpi->common.current_frame;
3538 // For large_scale_tile case, we always have only one tile group, so it can
3539 // be written as an OBU_FRAME.
3540 const OBU_TYPE obu_type = OBU_FRAME;
Tarundeep Singhd37807f2021-03-31 17:30:55 +05303541 lst_obu->tg_hdr_size = av1_write_obu_header(
3542 level_params, &cpi->frame_header_count, obu_type, 0, *data);
Vishesh1c320df2021-03-01 09:46:30 +05303543 *data += lst_obu->tg_hdr_size;
3544
3545 const uint32_t frame_header_size =
Vishesh8bd59d92021-04-27 11:34:17 +05303546 write_frame_header_obu(cpi, &cpi->td.mb.e_mbd, saved_wb, *data, 0);
Vishesh1c320df2021-03-01 09:46:30 +05303547 *data += frame_header_size;
3548 lst_obu->frame_header_size = frame_header_size;
3549 // (yunqing) This test ensures the correctness of large scale tile coding.
3550 if (cpi->oxcf.tile_cfg.enable_ext_tile_debug) {
3551 char fn[20] = "./fh";
3552 fn[4] = current_frame->frame_number / 100 + '0';
3553 fn[5] = (current_frame->frame_number % 100) / 10 + '0';
3554 fn[6] = (current_frame->frame_number % 10) + '0';
3555 fn[7] = '\0';
3556 av1_print_uncompressed_frame_header(*data - frame_header_size,
3557 frame_header_size, fn);
3558 }
3559 return frame_header_size;
3560}
3561
3562// Write total buffer size and related information into the OBU header for large
3563// scale tile case.
3564static void write_large_scale_tile_obu_size(
3565 const CommonTileParams *const tiles, uint8_t *const dst, uint8_t *data,
3566 struct aom_write_bit_buffer *saved_wb, LargeTileFrameOBU *const lst_obu,
3567 int have_tiles, uint32_t *total_size, int max_tile_size,
3568 int max_tile_col_size) {
3569 int tile_size_bytes = 0;
3570 int tile_col_size_bytes = 0;
3571 if (have_tiles) {
3572 *total_size = remux_tiles(
3573 tiles, data, *total_size - lst_obu->frame_header_size, max_tile_size,
3574 max_tile_col_size, &tile_size_bytes, &tile_col_size_bytes);
3575 *total_size += lst_obu->frame_header_size;
3576 }
3577
3578 // In EXT_TILE case, only use 1 tile group. Follow the obu syntax, write
3579 // current tile group size before tile data(include tile column header).
3580 // Tile group size doesn't include the bytes storing tg size.
3581 *total_size += lst_obu->tg_hdr_size;
3582 const uint32_t obu_payload_size = *total_size - lst_obu->tg_hdr_size;
3583 const size_t length_field_size =
Vishesh5849e6d2021-04-23 19:15:29 +05303584 av1_obu_memmove(lst_obu->tg_hdr_size, obu_payload_size, dst);
Vishesh1c320df2021-03-01 09:46:30 +05303585 if (av1_write_uleb_obu_size(lst_obu->tg_hdr_size, obu_payload_size, dst) !=
3586 AOM_CODEC_OK)
3587 assert(0);
3588
3589 *total_size += (uint32_t)length_field_size;
3590 saved_wb->bit_buffer += length_field_size;
3591
3592 // Now fill in the gaps in the uncompressed header.
3593 if (have_tiles) {
3594 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
3595 aom_wb_overwrite_literal(saved_wb, tile_col_size_bytes - 1, 2);
3596
3597 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
3598 aom_wb_overwrite_literal(saved_wb, tile_size_bytes - 1, 2);
3599 }
3600}
3601
3602// Store information on each large scale tile in the OBU header.
3603static void write_large_scale_tile_obu(
3604 AV1_COMP *const cpi, uint8_t *const dst, LargeTileFrameOBU *const lst_obu,
3605 int *const largest_tile_id, uint32_t *total_size, const int have_tiles,
3606 unsigned int *const max_tile_size, unsigned int *const max_tile_col_size) {
Thomas Davies4822e142017-10-10 11:30:36 +01003607 AV1_COMMON *const cm = &cpi->common;
Urvang Joshi54ffae72020-03-23 13:37:10 -07003608 const CommonTileParams *const tiles = &cm->tiles;
Vishesh1c320df2021-03-01 09:46:30 +05303609
David Turnere7fea8e2018-12-10 15:57:05 +00003610 TileBufferEnc tile_buffers[MAX_TILE_ROWS][MAX_TILE_COLS];
Urvang Joshi54ffae72020-03-23 13:37:10 -07003611 const int tile_cols = tiles->cols;
3612 const int tile_rows = tiles->rows;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003613 unsigned int tile_size = 0;
Vishesh1c320df2021-03-01 09:46:30 +05303614
Vishesh5849e6d2021-04-23 19:15:29 +05303615 av1_reset_pack_bs_thread_data(&cpi->td);
Vishesh1c320df2021-03-01 09:46:30 +05303616 for (int tile_col = 0; tile_col < tile_cols; tile_col++) {
3617 TileInfo tile_info;
3618 const int is_last_col = (tile_col == tile_cols - 1);
3619 const uint32_t col_offset = *total_size;
3620
3621 av1_tile_set_col(&tile_info, cm, tile_col);
3622
3623 // The last column does not have a column header
3624 if (!is_last_col) *total_size += 4;
3625
3626 for (int tile_row = 0; tile_row < tile_rows; tile_row++) {
3627 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
3628 const int data_offset = have_tiles ? 4 : 0;
3629 const int tile_idx = tile_row * tile_cols + tile_col;
3630 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
3631 av1_tile_set_row(&tile_info, cm, tile_row);
3632 aom_writer mode_bc;
3633
3634 buf->data = dst + *total_size + lst_obu->tg_hdr_size;
3635
3636 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
3637 // even for the last one, unless no tiling is used at all.
3638 *total_size += data_offset;
3639 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
3640 mode_bc.allow_update_cdf = !tiles->large_scale;
3641 mode_bc.allow_update_cdf =
3642 mode_bc.allow_update_cdf && !cm->features.disable_cdf_update;
3643 aom_start_encode(&mode_bc, buf->data + data_offset);
Cherma Rajan A8ba48242021-04-12 18:13:01 +05303644 write_modes(cpi, &cpi->td, &tile_info, &mode_bc, tile_row, tile_col);
Satheesh Kumar173c1402023-12-11 15:44:04 +05303645 if (aom_stop_encode(&mode_bc) < 0) {
3646 aom_internal_error(cm->error, AOM_CODEC_ERROR, "Error writing modes");
3647 }
Vishesh1c320df2021-03-01 09:46:30 +05303648 tile_size = mode_bc.pos;
3649 buf->size = tile_size;
3650
3651 // Record the maximum tile size we see, so we can compact headers later.
3652 if (tile_size > *max_tile_size) {
3653 *max_tile_size = tile_size;
3654 *largest_tile_id = tile_cols * tile_row + tile_col;
3655 }
3656
3657 if (have_tiles) {
3658 // tile header: size of this tile, or copy offset
3659 uint32_t tile_header = tile_size - AV1_MIN_TILE_SIZE_BYTES;
3660 const int tile_copy_mode =
3661 ((AOMMAX(tiles->width, tiles->height) << MI_SIZE_LOG2) <= 256) ? 1
3662 : 0;
3663
3664 // If tile_copy_mode = 1, check if this tile is a copy tile.
3665 // Very low chances to have copy tiles on the key frames, so don't
3666 // search on key frames to reduce unnecessary search.
3667 if (cm->current_frame.frame_type != KEY_FRAME && tile_copy_mode) {
3668 const int identical_tile_offset =
3669 find_identical_tile(tile_row, tile_col, tile_buffers);
3670
3671 // Indicate a copy-tile by setting the most significant bit.
3672 // The row-offset to copy from is stored in the highest byte.
3673 // remux_tiles will move these around later
3674 if (identical_tile_offset > 0) {
3675 tile_size = 0;
3676 tile_header = identical_tile_offset | 0x80;
3677 tile_header <<= 24;
3678 }
3679 }
3680
James Zern8c3db282022-08-30 17:25:02 -07003681 mem_put_le32(buf->data, (MEM_VALUE_T)tile_header);
Vishesh1c320df2021-03-01 09:46:30 +05303682 }
3683
3684 *total_size += tile_size;
3685 }
3686 if (!is_last_col) {
3687 uint32_t col_size = *total_size - col_offset - 4;
3688 mem_put_le32(dst + col_offset + lst_obu->tg_hdr_size, col_size);
3689
3690 // Record the maximum tile column size we see.
3691 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
3692 }
3693 }
Vishesh5849e6d2021-04-23 19:15:29 +05303694 av1_accumulate_pack_bs_thread_data(cpi, &cpi->td);
Vishesh1c320df2021-03-01 09:46:30 +05303695}
3696
3697// Packs information in the obu header for large scale tiles.
3698static INLINE uint32_t pack_large_scale_tiles_in_tg_obus(
3699 AV1_COMP *const cpi, uint8_t *const dst,
3700 struct aom_write_bit_buffer *saved_wb, int *const largest_tile_id) {
3701 AV1_COMMON *const cm = &cpi->common;
3702 const CommonTileParams *const tiles = &cm->tiles;
3703 uint32_t total_size = 0;
James Zern2a7629e2018-03-23 00:06:45 -07003704 unsigned int max_tile_size = 0;
3705 unsigned int max_tile_col_size = 0;
Vishesh1c320df2021-03-01 09:46:30 +05303706 const int have_tiles = tiles->cols * tiles->rows > 1;
3707 uint8_t *data = dst;
3708
3709 LargeTileFrameOBU lst_obu;
3710
3711 total_size +=
3712 init_large_scale_tile_obu_header(cpi, &data, saved_wb, &lst_obu);
3713
3714 write_large_scale_tile_obu(cpi, dst, &lst_obu, largest_tile_id, &total_size,
3715 have_tiles, &max_tile_size, &max_tile_col_size);
3716
3717 write_large_scale_tile_obu_size(tiles, dst, data, saved_wb, &lst_obu,
3718 have_tiles, &total_size, max_tile_size,
3719 max_tile_col_size);
3720
3721 return total_size;
3722}
3723
Vishesh268a08f2021-04-22 17:28:44 +05303724// Writes obu, tile group and uncompressed headers to bitstream.
Vishesh8bd59d92021-04-27 11:34:17 +05303725void av1_write_obu_tg_tile_headers(AV1_COMP *const cpi, MACROBLOCKD *const xd,
Vishesh268a08f2021-04-22 17:28:44 +05303726 PackBSParams *const pack_bs_params,
3727 const int tile_idx) {
3728 AV1_COMMON *const cm = &cpi->common;
3729 const CommonTileParams *const tiles = &cm->tiles;
3730 int *const curr_tg_hdr_size = &pack_bs_params->curr_tg_hdr_size;
3731 const int tg_size =
3732 (tiles->rows * tiles->cols + cpi->num_tg - 1) / cpi->num_tg;
3733
3734 // Write Tile group, frame and OBU header
3735 // A new tile group begins at this tile. Write the obu header and
3736 // tile group header
3737 const OBU_TYPE obu_type = (cpi->num_tg == 1) ? OBU_FRAME : OBU_TILE_GROUP;
3738 *curr_tg_hdr_size = av1_write_obu_header(
3739 &cpi->ppi->level_params, &cpi->frame_header_count, obu_type,
3740 pack_bs_params->obu_extn_header, pack_bs_params->tile_data_curr);
3741 pack_bs_params->obu_header_size = *curr_tg_hdr_size;
3742
3743 if (cpi->num_tg == 1)
3744 *curr_tg_hdr_size += write_frame_header_obu(
Vishesh8bd59d92021-04-27 11:34:17 +05303745 cpi, xd, pack_bs_params->saved_wb,
Vishesh268a08f2021-04-22 17:28:44 +05303746 pack_bs_params->tile_data_curr + *curr_tg_hdr_size, 0);
3747 *curr_tg_hdr_size += write_tile_group_header(
3748 pack_bs_params->tile_data_curr + *curr_tg_hdr_size, tile_idx,
3749 AOMMIN(tile_idx + tg_size - 1, tiles->cols * tiles->rows - 1),
3750 (tiles->log2_rows + tiles->log2_cols), cpi->num_tg > 1);
3751 *pack_bs_params->total_size += *curr_tg_hdr_size;
3752}
3753
Visheshb2c65b92021-03-12 11:46:45 +05303754// Pack tile data in the bitstream with tile_group, frame
3755// and OBU header.
Vishesh5849e6d2021-04-23 19:15:29 +05303756void av1_pack_tile_info(AV1_COMP *const cpi, ThreadData *const td,
3757 PackBSParams *const pack_bs_params) {
Visheshb2c65b92021-03-12 11:46:45 +05303758 aom_writer mode_bc;
3759 AV1_COMMON *const cm = &cpi->common;
Visheshb2c65b92021-03-12 11:46:45 +05303760 int tile_row = pack_bs_params->tile_row;
3761 int tile_col = pack_bs_params->tile_col;
Visheshb2c65b92021-03-12 11:46:45 +05303762 uint32_t *const total_size = pack_bs_params->total_size;
Visheshb2c65b92021-03-12 11:46:45 +05303763 TileInfo tile_info;
3764 av1_tile_set_col(&tile_info, cm, tile_col);
3765 av1_tile_set_row(&tile_info, cm, tile_row);
3766 mode_bc.allow_update_cdf = 1;
3767 mode_bc.allow_update_cdf =
3768 mode_bc.allow_update_cdf && !cm->features.disable_cdf_update;
3769
Visheshb2c65b92021-03-12 11:46:45 +05303770 unsigned int tile_size;
3771
Vishesh268a08f2021-04-22 17:28:44 +05303772 const int num_planes = av1_num_planes(cm);
3773 av1_reset_loop_restoration(&td->mb.e_mbd, num_planes);
Visheshb2c65b92021-03-12 11:46:45 +05303774
3775 pack_bs_params->buf.data = pack_bs_params->dst + *total_size;
3776
3777 // The last tile of the tile group does not have a header.
3778 if (!pack_bs_params->is_last_tile_in_tg) *total_size += 4;
3779
3780 // Pack tile data
3781 aom_start_encode(&mode_bc, pack_bs_params->dst + *total_size);
Cherma Rajan A8ba48242021-04-12 18:13:01 +05303782 write_modes(cpi, td, &tile_info, &mode_bc, tile_row, tile_col);
Satheesh Kumar173c1402023-12-11 15:44:04 +05303783 if (aom_stop_encode(&mode_bc) < 0) {
3784 aom_internal_error(td->mb.e_mbd.error_info, AOM_CODEC_ERROR,
3785 "Error writing modes");
3786 }
Visheshb2c65b92021-03-12 11:46:45 +05303787 tile_size = mode_bc.pos;
3788 assert(tile_size >= AV1_MIN_TILE_SIZE_BYTES);
3789
3790 pack_bs_params->buf.size = tile_size;
3791
3792 // Write tile size
3793 if (!pack_bs_params->is_last_tile_in_tg) {
3794 // size of this tile
3795 mem_put_le32(pack_bs_params->buf.data, tile_size - AV1_MIN_TILE_SIZE_BYTES);
3796 }
3797}
3798
Vishesh5849e6d2021-04-23 19:15:29 +05303799void av1_write_last_tile_info(
3800 AV1_COMP *const cpi, const FrameHeaderInfo *fh_info,
3801 struct aom_write_bit_buffer *saved_wb, size_t *curr_tg_data_size,
3802 uint8_t *curr_tg_start, uint32_t *const total_size,
3803 uint8_t **tile_data_start, int *const largest_tile_id,
3804 int *const is_first_tg, uint32_t obu_header_size, uint8_t obu_extn_header) {
Visheshc1516b92021-04-05 15:33:23 +05303805 // write current tile group size
3806 const uint32_t obu_payload_size =
3807 (uint32_t)(*curr_tg_data_size) - obu_header_size;
3808 const size_t length_field_size =
Vishesh5849e6d2021-04-23 19:15:29 +05303809 av1_obu_memmove(obu_header_size, obu_payload_size, curr_tg_start);
Visheshc1516b92021-04-05 15:33:23 +05303810 if (av1_write_uleb_obu_size(obu_header_size, obu_payload_size,
3811 curr_tg_start) != AOM_CODEC_OK) {
3812 assert(0);
3813 }
3814 *curr_tg_data_size += (int)length_field_size;
3815 *total_size += (uint32_t)length_field_size;
3816 *tile_data_start += length_field_size;
3817 if (cpi->num_tg == 1) {
3818 // if this tg is combined with the frame header then update saved
chiyotsai7881d8f2021-04-19 18:11:51 -07003819 // frame header base offset according to length field size
Visheshc1516b92021-04-05 15:33:23 +05303820 saved_wb->bit_buffer += length_field_size;
3821 }
3822
3823 if (!(*is_first_tg) && cpi->common.features.error_resilient_mode) {
3824 // Make room for a duplicate Frame Header OBU.
3825 memmove(curr_tg_start + fh_info->total_length, curr_tg_start,
3826 *curr_tg_data_size);
3827
3828 // Insert a copy of the Frame Header OBU.
3829 memcpy(curr_tg_start, fh_info->frame_header, fh_info->total_length);
3830
3831 // Force context update tile to be the first tile in error
chiyotsai7881d8f2021-04-19 18:11:51 -07003832 // resilient mode as the duplicate frame headers will have
Visheshc1516b92021-04-05 15:33:23 +05303833 // context_update_tile_id set to 0
3834 *largest_tile_id = 0;
3835
3836 // Rewrite the OBU header to change the OBU type to Redundant Frame
3837 // Header.
3838 av1_write_obu_header(&cpi->ppi->level_params, &cpi->frame_header_count,
3839 OBU_REDUNDANT_FRAME_HEADER, obu_extn_header,
3840 &curr_tg_start[fh_info->obu_header_byte_offset]);
3841
3842 *curr_tg_data_size += (int)(fh_info->total_length);
3843 *total_size += (uint32_t)(fh_info->total_length);
3844 }
3845 *is_first_tg = 0;
3846}
3847
Vishesh5849e6d2021-04-23 19:15:29 +05303848void av1_reset_pack_bs_thread_data(ThreadData *const td) {
Cherma Rajan Ad0f59a72021-04-13 12:43:54 +05303849 td->coefficient_size = 0;
3850 td->max_mv_magnitude = 0;
3851 av1_zero(td->interp_filter_selected);
3852}
3853
Vishesh5849e6d2021-04-23 19:15:29 +05303854void av1_accumulate_pack_bs_thread_data(AV1_COMP *const cpi,
3855 ThreadData const *td) {
Aasaipriya Chandranad281242021-05-25 20:25:39 +05303856 int do_max_mv_magnitude_update = 1;
Cherma Rajan Ad0f59a72021-04-13 12:43:54 +05303857 cpi->rc.coefficient_size += td->coefficient_size;
3858
Aasaipriya Chandranad281242021-05-25 20:25:39 +05303859 // Disable max_mv_magnitude update for parallel frames based on update flag.
3860 if (!cpi->do_frame_data_update) do_max_mv_magnitude_update = 0;
Aasaipriya Chandranad281242021-05-25 20:25:39 +05303861
3862 if (cpi->sf.mv_sf.auto_mv_step_size && do_max_mv_magnitude_update)
Cherma Rajan Ad0f59a72021-04-13 12:43:54 +05303863 cpi->mv_search_params.max_mv_magnitude =
3864 AOMMAX(cpi->mv_search_params.max_mv_magnitude, td->max_mv_magnitude);
3865
3866 for (InterpFilter filter = EIGHTTAP_REGULAR; filter < SWITCHABLE; filter++)
3867 cpi->common.cur_frame->interp_filter_selected[filter] +=
3868 td->interp_filter_selected[filter];
3869}
3870
Vishesh1c320df2021-03-01 09:46:30 +05303871// Store information related to each default tile in the OBU header.
3872static void write_tile_obu(
3873 AV1_COMP *const cpi, uint8_t *const dst, uint32_t *total_size,
Visheshb2c65b92021-03-12 11:46:45 +05303874 struct aom_write_bit_buffer *saved_wb, uint8_t obu_extn_header,
Vishesh1c320df2021-03-01 09:46:30 +05303875 const FrameHeaderInfo *fh_info, int *const largest_tile_id,
3876 unsigned int *max_tile_size, uint32_t *const obu_header_size,
3877 uint8_t **tile_data_start) {
3878 AV1_COMMON *const cm = &cpi->common;
Vishesh8bd59d92021-04-27 11:34:17 +05303879 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Vishesh1c320df2021-03-01 09:46:30 +05303880 const CommonTileParams *const tiles = &cm->tiles;
Vishesh1c320df2021-03-01 09:46:30 +05303881 const int tile_cols = tiles->cols;
3882 const int tile_rows = tiles->rows;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003883 // Fixed size tile groups for the moment
Urvang Joshi1de67aa2020-03-20 11:21:57 -07003884 const int num_tg_hdrs = cpi->num_tg;
Vishesh1c320df2021-03-01 09:46:30 +05303885 const int tg_size = (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003886 int tile_count = 0;
Visheshb2c65b92021-03-12 11:46:45 +05303887 size_t curr_tg_data_size = 0;
3888 uint8_t *tile_data_curr = dst;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003889 int new_tg = 1;
Visheshb2c65b92021-03-12 11:46:45 +05303890 int is_first_tg = 1;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003891
Vishesh5849e6d2021-04-23 19:15:29 +05303892 av1_reset_pack_bs_thread_data(&cpi->td);
Vishesh1c320df2021-03-01 09:46:30 +05303893 for (int tile_row = 0; tile_row < tile_rows; tile_row++) {
3894 for (int tile_col = 0; tile_col < tile_cols; tile_col++) {
James Zerna60ff582018-02-24 14:02:12 -08003895 const int tile_idx = tile_row * tile_cols + tile_col;
James Zerna60ff582018-02-24 14:02:12 -08003896 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
Visheshb2c65b92021-03-12 11:46:45 +05303897
James Zerna60ff582018-02-24 14:02:12 -08003898 int is_last_tile_in_tg = 0;
James Zerna60ff582018-02-24 14:02:12 -08003899 if (new_tg) {
Visheshb2c65b92021-03-12 11:46:45 +05303900 tile_data_curr = dst + *total_size;
James Zerna60ff582018-02-24 14:02:12 -08003901 tile_count = 0;
3902 }
3903 tile_count++;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003904
Visheshb2c65b92021-03-12 11:46:45 +05303905 if (tile_count == tg_size || tile_idx == (tile_cols * tile_rows - 1))
James Zerna60ff582018-02-24 14:02:12 -08003906 is_last_tile_in_tg = 1;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003907
Vishesh8bd59d92021-04-27 11:34:17 +05303908 xd->tile_ctx = &this_tile->tctx;
Soo-Chul Han13f0d9c2017-10-22 21:55:52 -04003909
Visheshb2c65b92021-03-12 11:46:45 +05303910 // PackBSParams stores all parameters required to pack tile and header
3911 // info.
3912 PackBSParams pack_bs_params;
3913 pack_bs_params.dst = dst;
3914 pack_bs_params.curr_tg_hdr_size = 0;
3915 pack_bs_params.is_last_tile_in_tg = is_last_tile_in_tg;
3916 pack_bs_params.new_tg = new_tg;
3917 pack_bs_params.obu_extn_header = obu_extn_header;
Visheshc1516b92021-04-05 15:33:23 +05303918 pack_bs_params.obu_header_size = 0;
Visheshb2c65b92021-03-12 11:46:45 +05303919 pack_bs_params.saved_wb = saved_wb;
3920 pack_bs_params.tile_col = tile_col;
3921 pack_bs_params.tile_row = tile_row;
3922 pack_bs_params.tile_data_curr = tile_data_curr;
3923 pack_bs_params.total_size = total_size;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003924
Vishesh268a08f2021-04-22 17:28:44 +05303925 if (new_tg)
Vishesh8bd59d92021-04-27 11:34:17 +05303926 av1_write_obu_tg_tile_headers(cpi, xd, &pack_bs_params, tile_idx);
Vishesh268a08f2021-04-22 17:28:44 +05303927
Vishesh5849e6d2021-04-23 19:15:29 +05303928 av1_pack_tile_info(cpi, &cpi->td, &pack_bs_params);
Visheshb2c65b92021-03-12 11:46:45 +05303929
3930 if (new_tg) {
3931 curr_tg_data_size = pack_bs_params.curr_tg_hdr_size;
3932 *tile_data_start += pack_bs_params.curr_tg_hdr_size;
Visheshc1516b92021-04-05 15:33:23 +05303933 *obu_header_size = pack_bs_params.obu_header_size;
Visheshb2c65b92021-03-12 11:46:45 +05303934 new_tg = 0;
3935 }
3936 if (is_last_tile_in_tg) new_tg = 1;
3937
3938 curr_tg_data_size +=
3939 (pack_bs_params.buf.size + (is_last_tile_in_tg ? 0 : 4));
3940
3941 if (pack_bs_params.buf.size > *max_tile_size) {
3942 *largest_tile_id = tile_idx;
3943 *max_tile_size = (unsigned int)pack_bs_params.buf.size;
James Zerna60ff582018-02-24 14:02:12 -08003944 }
Thomas Davies4822e142017-10-10 11:30:36 +01003945
Visheshc1516b92021-04-05 15:33:23 +05303946 if (is_last_tile_in_tg)
Vishesh5849e6d2021-04-23 19:15:29 +05303947 av1_write_last_tile_info(cpi, fh_info, saved_wb, &curr_tg_data_size,
3948 tile_data_curr, total_size, tile_data_start,
3949 largest_tile_id, &is_first_tg,
3950 *obu_header_size, obu_extn_header);
Visheshb2c65b92021-03-12 11:46:45 +05303951 *total_size += (uint32_t)pack_bs_params.buf.size;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003952 }
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04003953 }
Vishesh5849e6d2021-04-23 19:15:29 +05303954 av1_accumulate_pack_bs_thread_data(cpi, &cpi->td);
Vishesh1c320df2021-03-01 09:46:30 +05303955}
James Zerne7c82372018-03-18 20:43:55 -07003956
Vishesh1c320df2021-03-01 09:46:30 +05303957// Write total buffer size and related information into the OBU header for
3958// default tile case.
3959static void write_tile_obu_size(AV1_COMP *const cpi, uint8_t *const dst,
3960 struct aom_write_bit_buffer *saved_wb,
3961 int largest_tile_id, uint32_t *const total_size,
3962 unsigned int max_tile_size,
3963 uint32_t obu_header_size,
3964 uint8_t *tile_data_start) {
3965 const CommonTileParams *const tiles = &cpi->common.tiles;
James Zerne7c82372018-03-18 20:43:55 -07003966
Vishesh1c320df2021-03-01 09:46:30 +05303967 // Fill in context_update_tile_id indicating the tile to use for the
3968 // cdf update. The encoder currently sets it to the largest tile
3969 // (but is up to the encoder)
3970 aom_wb_overwrite_literal(saved_wb, largest_tile_id,
3971 (tiles->log2_cols + tiles->log2_rows));
3972 // If more than one tile group. tile_size_bytes takes the default value 4
3973 // and does not need to be set. For a single tile group it is set in the
3974 // section below.
3975 if (cpi->num_tg != 1) return;
3976 int tile_size_bytes = 4, unused;
3977 const uint32_t tile_data_offset = (uint32_t)(tile_data_start - dst);
3978 const uint32_t tile_data_size = *total_size - tile_data_offset;
James Zerne7c82372018-03-18 20:43:55 -07003979
Vishesh1c320df2021-03-01 09:46:30 +05303980 *total_size = remux_tiles(tiles, tile_data_start, tile_data_size,
3981 max_tile_size, 0, &tile_size_bytes, &unused);
3982 *total_size += tile_data_offset;
3983 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
Dominic Symesccc38782018-05-04 18:24:00 +02003984
Vishesh1c320df2021-03-01 09:46:30 +05303985 aom_wb_overwrite_literal(saved_wb, tile_size_bytes - 1, 2);
Dominic Symesccc38782018-05-04 18:24:00 +02003986
Vishesh1c320df2021-03-01 09:46:30 +05303987 // Update the OBU length if remux_tiles() reduced the size.
3988 uint64_t payload_size;
3989 size_t length_field_size;
3990 int res =
3991 aom_uleb_decode(dst + obu_header_size, *total_size - obu_header_size,
3992 &payload_size, &length_field_size);
3993 assert(res == 0);
3994 (void)res;
3995
3996 const uint64_t new_payload_size =
3997 *total_size - obu_header_size - length_field_size;
3998 if (new_payload_size != payload_size) {
3999 size_t new_length_field_size;
4000 res = aom_uleb_encode(new_payload_size, length_field_size,
4001 dst + obu_header_size, &new_length_field_size);
4002 assert(res == 0);
4003 if (new_length_field_size < length_field_size) {
4004 const size_t src_offset = obu_header_size + length_field_size;
4005 const size_t dst_offset = obu_header_size + new_length_field_size;
4006 memmove(dst + dst_offset, dst + src_offset, (size_t)payload_size);
4007 *total_size -= (int)(length_field_size - new_length_field_size);
James Zerne7c82372018-03-18 20:43:55 -07004008 }
4009 }
Vishesh1c320df2021-03-01 09:46:30 +05304010}
4011
Vishesh6465ae62021-05-26 15:12:09 +05304012// As per the experiments, single-thread bitstream packing is better for
4013// frames with a smaller bitstream size. This behavior is due to setup time
4014// overhead of multithread function would be more than that of time required
Cherma Rajan A248e8d32021-06-15 09:06:38 +05304015// to pack the smaller bitstream of such frames. This function computes the
4016// number of required number of workers based on setup time overhead and job
4017// dispatch time overhead for given tiles and available workers.
4018int calc_pack_bs_mt_workers(const TileDataEnc *tile_data, int num_tiles,
Cherma Rajan Aa92f3c82022-02-17 20:14:47 +05304019 int avail_workers, bool pack_bs_mt_enabled) {
4020 if (!pack_bs_mt_enabled) return 1;
Vishesh6465ae62021-05-26 15:12:09 +05304021
Vishesh6465ae62021-05-26 15:12:09 +05304022 uint64_t frame_abs_sum_level = 0;
Cherma Rajan A248e8d32021-06-15 09:06:38 +05304023
Vishesh6465ae62021-05-26 15:12:09 +05304024 for (int idx = 0; idx < num_tiles; idx++)
4025 frame_abs_sum_level += tile_data[idx].abs_sum_level;
Cherma Rajan A248e8d32021-06-15 09:06:38 +05304026
Cherma Rajan A248e8d32021-06-15 09:06:38 +05304027 int ideal_num_workers = 1;
4028 const float job_disp_time_const = (float)num_tiles * JOB_DISP_TIME_OH_CONST;
4029 float max_sum = 0.0;
4030
4031 for (int num_workers = avail_workers; num_workers > 1; num_workers--) {
4032 const float fas_per_worker_const =
4033 ((float)(num_workers - 1) / num_workers) * frame_abs_sum_level;
4034 const float setup_time_const = (float)num_workers * SETUP_TIME_OH_CONST;
4035 const float this_sum = fas_per_worker_const - setup_time_const -
4036 job_disp_time_const / num_workers;
4037
4038 if (this_sum > max_sum) {
4039 max_sum = this_sum;
4040 ideal_num_workers = num_workers;
4041 }
4042 }
4043 return ideal_num_workers;
Vishesh6465ae62021-05-26 15:12:09 +05304044}
4045
Vishesh1c320df2021-03-01 09:46:30 +05304046static INLINE uint32_t pack_tiles_in_tg_obus(
4047 AV1_COMP *const cpi, uint8_t *const dst,
4048 struct aom_write_bit_buffer *saved_wb, uint8_t obu_extension_header,
4049 const FrameHeaderInfo *fh_info, int *const largest_tile_id) {
4050 const CommonTileParams *const tiles = &cpi->common.tiles;
4051 uint32_t total_size = 0;
4052 unsigned int max_tile_size = 0;
4053 uint32_t obu_header_size = 0;
4054 uint8_t *tile_data_start = dst;
Vishesh1c320df2021-03-01 09:46:30 +05304055 const int tile_cols = tiles->cols;
4056 const int tile_rows = tiles->rows;
Cherma Rajan Ace0c4232021-04-23 21:29:51 +05304057 const int num_tiles = tile_rows * tile_cols;
Vishesh1c320df2021-03-01 09:46:30 +05304058
Cherma Rajan A248e8d32021-06-15 09:06:38 +05304059 const int num_workers = calc_pack_bs_mt_workers(
Cherma Rajan Aa92f3c82022-02-17 20:14:47 +05304060 cpi->tile_data, num_tiles, cpi->mt_info.num_mod_workers[MOD_PACK_BS],
4061 cpi->mt_info.pack_bs_mt_enabled);
Cherma Rajan Ace0c4232021-04-23 21:29:51 +05304062
Cherma Rajan A248e8d32021-06-15 09:06:38 +05304063 if (num_workers > 1) {
Cherma Rajan Ace0c4232021-04-23 21:29:51 +05304064 av1_write_tile_obu_mt(cpi, dst, &total_size, saved_wb, obu_extension_header,
4065 fh_info, largest_tile_id, &max_tile_size,
Cherma Rajan A248e8d32021-06-15 09:06:38 +05304066 &obu_header_size, &tile_data_start, num_workers);
Cherma Rajan Ace0c4232021-04-23 21:29:51 +05304067 } else {
4068 write_tile_obu(cpi, dst, &total_size, saved_wb, obu_extension_header,
4069 fh_info, largest_tile_id, &max_tile_size, &obu_header_size,
4070 &tile_data_start);
4071 }
4072
4073 if (num_tiles > 1)
Vishesh1c320df2021-03-01 09:46:30 +05304074 write_tile_obu_size(cpi, dst, saved_wb, *largest_tile_id, &total_size,
4075 max_tile_size, obu_header_size, tile_data_start);
Yaowu Xu8cabab82018-03-24 21:49:43 -07004076 return total_size;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04004077}
4078
Vishesh1c320df2021-03-01 09:46:30 +05304079static uint32_t write_tiles_in_tg_obus(AV1_COMP *const cpi, uint8_t *const dst,
4080 struct aom_write_bit_buffer *saved_wb,
4081 uint8_t obu_extension_header,
4082 const FrameHeaderInfo *fh_info,
4083 int *const largest_tile_id) {
4084 AV1_COMMON *const cm = &cpi->common;
4085 const CommonTileParams *const tiles = &cm->tiles;
4086 *largest_tile_id = 0;
4087
Cherma Rajan Ab4be7e52021-04-14 19:28:53 +05304088 // Select the coding strategy (temporal or spatial)
Jingning Han564fe2c2022-03-08 23:35:01 -08004089 if (cm->seg.enabled && cm->seg.update_map) {
4090 if (cm->features.primary_ref_frame == PRIMARY_REF_NONE) {
4091 cm->seg.temporal_update = 0;
4092 } else {
4093 cm->seg.temporal_update = 1;
4094 if (cpi->td.rd_counts.seg_tmp_pred_cost[0] <
4095 cpi->td.rd_counts.seg_tmp_pred_cost[1])
4096 cm->seg.temporal_update = 0;
4097 }
4098 }
Cherma Rajan Ab4be7e52021-04-14 19:28:53 +05304099
Vishesh1c320df2021-03-01 09:46:30 +05304100 if (tiles->large_scale)
4101 return pack_large_scale_tiles_in_tg_obus(cpi, dst, saved_wb,
4102 largest_tile_id);
4103
4104 return pack_tiles_in_tg_obus(cpi, dst, saved_wb, obu_extension_header,
4105 fh_info, largest_tile_id);
4106}
4107
Daniel Max Valenzuela62eab672020-01-08 12:12:47 -08004108static size_t av1_write_metadata_obu(const aom_metadata_t *metadata,
4109 uint8_t *const dst) {
Daniel Max Valenzuela0a6c8082019-11-04 10:52:56 -08004110 size_t coded_metadata_size = 0;
4111 const uint64_t metadata_type = (uint64_t)metadata->type;
4112 if (aom_uleb_encode(metadata_type, sizeof(metadata_type), dst,
4113 &coded_metadata_size) != 0) {
4114 return 0;
4115 }
4116 memcpy(dst + coded_metadata_size, metadata->payload, metadata->sz);
4117 // Add trailing bits.
4118 dst[coded_metadata_size + metadata->sz] = 0x80;
4119 return (uint32_t)(coded_metadata_size + metadata->sz + 1);
4120}
4121
Tom Fineganda8da7f2019-11-14 13:02:55 -08004122static size_t av1_write_metadata_array(AV1_COMP *const cpi, uint8_t *dst) {
Daniel Max Valenzuela0a6c8082019-11-04 10:52:56 -08004123 if (!cpi->source) return 0;
Daniel Max Valenzuela62eab672020-01-08 12:12:47 -08004124 AV1_COMMON *const cm = &cpi->common;
Daniel Max Valenzuela0a6c8082019-11-04 10:52:56 -08004125 aom_metadata_array_t *arr = cpi->source->metadata;
4126 if (!arr) return 0;
Tom Fineganda8da7f2019-11-14 13:02:55 -08004127 size_t obu_header_size = 0;
4128 size_t obu_payload_size = 0;
4129 size_t total_bytes_written = 0;
Daniel Max Valenzuela0a6c8082019-11-04 10:52:56 -08004130 size_t length_field_size = 0;
4131 for (size_t i = 0; i < arr->sz; i++) {
4132 aom_metadata_t *current_metadata = arr->metadata_array[i];
4133 if (current_metadata && current_metadata->payload) {
Daniel Max Valenzuela62eab672020-01-08 12:12:47 -08004134 if ((cm->current_frame.frame_type == KEY_FRAME &&
4135 current_metadata->insert_flag == AOM_MIF_KEY_FRAME) ||
4136 (cm->current_frame.frame_type != KEY_FRAME &&
4137 current_metadata->insert_flag == AOM_MIF_NON_KEY_FRAME) ||
4138 current_metadata->insert_flag == AOM_MIF_ANY_FRAME) {
Tarundeep Singh3b3e9e12021-03-16 14:58:27 +05304139 obu_header_size = av1_write_obu_header(&cpi->ppi->level_params,
4140 &cpi->frame_header_count,
4141 OBU_METADATA, 0, dst);
Daniel Max Valenzuela62eab672020-01-08 12:12:47 -08004142 obu_payload_size =
4143 av1_write_metadata_obu(current_metadata, dst + obu_header_size);
Vishesh5849e6d2021-04-23 19:15:29 +05304144 length_field_size =
4145 av1_obu_memmove(obu_header_size, obu_payload_size, dst);
Daniel Max Valenzuela62eab672020-01-08 12:12:47 -08004146 if (av1_write_uleb_obu_size(obu_header_size, obu_payload_size, dst) ==
4147 AOM_CODEC_OK) {
4148 const size_t obu_size = obu_header_size + obu_payload_size;
4149 dst += obu_size + length_field_size;
4150 total_bytes_written += obu_size + length_field_size;
4151 } else {
Tarundeep Singhcfa5fb42021-04-23 18:25:32 +05304152 aom_internal_error(cpi->common.error, AOM_CODEC_ERROR,
Daniel Max Valenzuela62eab672020-01-08 12:12:47 -08004153 "Error writing metadata OBU size");
4154 }
Daniel Max Valenzuela0a6c8082019-11-04 10:52:56 -08004155 }
4156 }
4157 }
4158 return total_bytes_written;
4159}
4160
David Turner35cba132018-12-10 15:48:15 +00004161int av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size,
4162 int *const largest_tile_id) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004163 uint8_t *data = dst;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004164 uint32_t data_size;
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004165 AV1_COMMON *const cm = &cpi->common;
Tarundeep Singh3b3e9e12021-03-16 14:58:27 +05304166 AV1LevelParams *const level_params = &cpi->ppi->level_params;
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08004167 uint32_t obu_header_size = 0;
4168 uint32_t obu_payload_size = 0;
Tom Fineganf9273812018-03-14 09:49:45 -07004169 FrameHeaderInfo fh_info = { NULL, 0, 0 };
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004170 const uint8_t obu_extension_header =
Soo-Chul Hand2f317c2018-05-08 14:21:24 -04004171 cm->temporal_layer_id << 5 | cm->spatial_layer_id << 3 | 0;
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004172
Debargha Mukherjee472df412019-04-29 15:00:02 -07004173 // If no non-zero delta_q has been used, reset delta_q_present_flag
Yue Chenc87d7492019-05-30 17:22:49 -07004174 if (cm->delta_q_info.delta_q_present_flag && cpi->deltaq_used == 0) {
Debargha Mukherjee472df412019-04-29 15:00:02 -07004175 cm->delta_q_info.delta_q_present_flag = 0;
4176 }
4177
Angie Chiangb11aedf2017-03-10 17:31:46 -08004178#if CONFIG_BITSTREAM_DEBUG
4179 bitstream_queue_reset_write();
4180#endif
4181
Tarundeep Singhd37807f2021-03-31 17:30:55 +05304182 cpi->frame_header_count = 0;
Hui Su4fd11762019-03-26 16:05:07 -07004183
Soo-Chul Han38427e82017-09-27 15:06:13 -04004184 // The TD is now written outside the frame encode loop
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04004185
Marco Paniconidfcdc7b2022-02-11 08:42:58 -08004186 // write sequence header obu at each key frame or intra_only frame,
4187 // preceded by 4-byte size
4188 if (cm->current_frame.frame_type == INTRA_ONLY_FRAME ||
Jingning Han2b922a52022-11-15 14:36:29 -08004189 cm->current_frame.frame_type == KEY_FRAME) {
Tarundeep Singhd37807f2021-03-31 17:30:55 +05304190 obu_header_size = av1_write_obu_header(
4191 level_params, &cpi->frame_header_count, OBU_SEQUENCE_HEADER, 0, data);
Yaowu Xu797674b2019-05-01 17:38:11 -07004192 obu_payload_size =
Tarundeep Singh4243e622021-04-20 16:10:22 +05304193 av1_write_sequence_header_obu(cm->seq_params, data + obu_header_size);
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08004194 const size_t length_field_size =
Vishesh5849e6d2021-04-23 19:15:29 +05304195 av1_obu_memmove(obu_header_size, obu_payload_size, data);
Yaowu Xu797674b2019-05-01 17:38:11 -07004196 if (av1_write_uleb_obu_size(obu_header_size, obu_payload_size, data) !=
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07004197 AOM_CODEC_OK) {
Tom Finegan41150ad2018-01-23 11:42:55 -08004198 return AOM_CODEC_ERROR;
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07004199 }
Tom Finegan41150ad2018-01-23 11:42:55 -08004200
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08004201 data += obu_header_size + obu_payload_size + length_field_size;
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04004202 }
4203
Daniel Max Valenzuela0a6c8082019-11-04 10:52:56 -08004204 // write metadata obus before the frame obu that has the show_frame flag set
4205 if (cm->show_frame) data += av1_write_metadata_array(cpi, data);
4206
Sarah Parker33005522018-07-27 14:46:25 -07004207 const int write_frame_header =
Urvang Joshi1de67aa2020-03-20 11:21:57 -07004208 (cpi->num_tg > 1 || encode_show_existing_frame(cm));
Jayasanker J4d1dfa32020-07-10 21:25:24 +05304209 struct aom_write_bit_buffer saved_wb = { NULL, 0 };
Wan-Teh Chang6e9ea9d2022-04-08 18:18:09 +00004210 size_t length_field = 0;
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08004211 if (write_frame_header) {
4212 // Write Frame Header OBU.
4213 fh_info.frame_header = data;
Tarundeep Singhd37807f2021-03-31 17:30:55 +05304214 obu_header_size =
4215 av1_write_obu_header(level_params, &cpi->frame_header_count,
4216 OBU_FRAME_HEADER, obu_extension_header, data);
Vishesh8bd59d92021-04-27 11:34:17 +05304217 obu_payload_size = write_frame_header_obu(cpi, &cpi->td.mb.e_mbd, &saved_wb,
Cherma Rajan A8ba48242021-04-12 18:13:01 +05304218 data + obu_header_size, 1);
Tom Finegan41150ad2018-01-23 11:42:55 -08004219
Wan-Teh Chang6e9ea9d2022-04-08 18:18:09 +00004220 length_field = av1_obu_memmove(obu_header_size, obu_payload_size, data);
Yaowu Xu797674b2019-05-01 17:38:11 -07004221 if (av1_write_uleb_obu_size(obu_header_size, obu_payload_size, data) !=
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08004222 AOM_CODEC_OK) {
4223 return AOM_CODEC_ERROR;
4224 }
Tom Finegan41150ad2018-01-23 11:42:55 -08004225
Tom Fineganf9273812018-03-14 09:49:45 -07004226 fh_info.obu_header_byte_offset = 0;
Jayasanker J4d1dfa32020-07-10 21:25:24 +05304227 fh_info.total_length = obu_header_size + obu_payload_size + length_field;
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08004228 data += fh_info.total_length;
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08004229 }
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04004230
Sarah Parker33005522018-07-27 14:46:25 -07004231 if (encode_show_existing_frame(cm)) {
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04004232 data_size = 0;
4233 } else {
Wan-Teh Chang6e9ea9d2022-04-08 18:18:09 +00004234 // Since length_field is determined adaptively after frame header
4235 // encoding, saved_wb must be adjusted accordingly.
4236 if (saved_wb.bit_buffer != NULL) {
4237 saved_wb.bit_buffer += length_field;
4238 }
4239
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04004240 // Each tile group obu will be preceded by 4-byte size of the tile group
4241 // obu
David Turner35cba132018-12-10 15:48:15 +00004242 data_size = write_tiles_in_tg_obus(
4243 cpi, data, &saved_wb, obu_extension_header, &fh_info, largest_tile_id);
Soo-Chul Han65c00ae12017-09-07 13:12:35 -04004244 }
Yaowu Xud29ea972018-02-22 09:50:58 -08004245 data += data_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004246 *size = data - dst;
Tom Finegane4099e32018-01-23 12:01:51 -08004247 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004248}