blob: 050f8c0d9b980489c8e3a5d6577c5b24adb24e22 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070017#include "aom_dsp/aom_dsp_common.h"
Debargha Mukherjee47748b52017-03-24 12:20:49 -070018#include "aom_dsp/binary_codes_writer.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070019#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070020#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070021#include "aom_ports/mem_ops.h"
22#include "aom_ports/system_state.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070023#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070025#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070026
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010027#include "av1/common/cdef.h"
Luc Trudeaud183b642017-11-28 11:42:37 -050028#if CONFIG_CFL
29#include "av1/common/cfl.h"
30#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070031#include "av1/common/entropy.h"
32#include "av1/common/entropymode.h"
33#include "av1/common/entropymv.h"
34#include "av1/common/mvref_common.h"
Thomas Daviesf6936102016-09-05 16:51:31 +010035#include "av1/common/odintrin.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070036#include "av1/common/pred_common.h"
37#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080038#include "av1/common/reconintra.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070039#include "av1/common/seg_common.h"
40#include "av1/common/tile_common.h"
41
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/encoder/bitstream.h"
43#include "av1/encoder/cost.h"
44#include "av1/encoder/encodemv.h"
Hui Suec73b442018-01-04 12:47:53 -080045#include "av1/encoder/encodetxb.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070046#include "av1/encoder/mcomp.h"
hui sud13c24a2017-04-07 16:13:07 -070047#include "av1/encoder/palette.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070048#include "av1/encoder/segmentation.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070049#include "av1/encoder/tokenize.h"
50
Di Chen56586622017-06-09 13:49:44 -070051#define ENC_MISMATCH_DEBUG 0
Zoe Liu85b66462017-04-20 14:28:19 -070052
Yaowu Xuf883b422016-08-30 14:01:10 -070053static INLINE void write_uniform(aom_writer *w, int n, int v) {
hui su37499292017-04-26 09:49:53 -070054 const int l = get_unsigned_bits(n);
55 const int m = (1 << l) - n;
Yaowu Xuc27fc142016-08-22 16:08:15 -070056 if (l == 0) return;
57 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -070058 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070059 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -070060 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
61 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070062 }
63}
64
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070065#if CONFIG_LOOP_RESTORATION
Rupert Swarbrick6c545212017-09-01 17:17:25 +010066static void loop_restoration_write_sb_coeffs(const AV1_COMMON *const cm,
67 MACROBLOCKD *xd,
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +010068 const RestorationUnitInfo *rui,
69 aom_writer *const w, int plane);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070070#endif // CONFIG_LOOP_RESTORATION
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040071#if CONFIG_OBU
72static void write_uncompressed_header_obu(AV1_COMP *cpi,
Jingning Handa11e692017-12-19 08:45:08 -080073#if CONFIG_EXT_TILE
74 struct aom_write_bit_buffer *saved_wb,
75#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040076 struct aom_write_bit_buffer *wb);
77#else
78static void write_uncompressed_header_frame(AV1_COMP *cpi,
79 struct aom_write_bit_buffer *wb);
80#endif
81
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040082#if !CONFIG_OBU || CONFIG_EXT_TILE
Thomas Daviesdbfc4f92017-01-18 16:46:09 +000083static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
84 const uint32_t data_size, const uint32_t max_tile_size,
85 const uint32_t max_tile_col_size,
86 int *const tile_size_bytes,
87 int *const tile_col_size_bytes);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040088#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070089
Jingning Han3e4c6a62017-12-14 14:50:57 -080090static void write_intra_mode_kf(FRAME_CONTEXT *frame_ctx, const MODE_INFO *mi,
91 const MODE_INFO *above_mi,
92 const MODE_INFO *left_mi, PREDICTION_MODE mode,
93 aom_writer *w) {
Alex Converse28744302017-04-13 14:46:22 -070094#if CONFIG_INTRABC
95 assert(!is_intrabc_block(&mi->mbmi));
96#endif // CONFIG_INTRABC
Jingning Han9010e202017-12-14 14:48:09 -080097 (void)mi;
Jingning Han9010e202017-12-14 14:48:09 -080098 aom_write_symbol(w, mode, get_y_mode_cdf(frame_ctx, above_mi, left_mi),
Jingning Hanf04254f2017-03-08 10:51:35 -080099 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -0800100}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700101
Thomas Davies1de6c882017-01-11 17:47:49 +0000102static void write_inter_mode(aom_writer *w, PREDICTION_MODE mode,
Zoe Liu7f24e1b2017-03-17 17:42:05 -0700103 FRAME_CONTEXT *ec_ctx, const int16_t mode_ctx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700104 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700105
Thomas Davies149eda52017-06-12 18:11:55 +0100106 aom_write_symbol(w, mode != NEWMV, ec_ctx->newmv_cdf[newmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700107
Jingning Hanf2b87bd2017-05-18 16:27:30 -0700108 if (mode != NEWMV) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700109 const int16_t zeromv_ctx =
110 (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700111 aom_write_symbol(w, mode != GLOBALMV, ec_ctx->zeromv_cdf[zeromv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700112
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700113 if (mode != GLOBALMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700114 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Thomas Davies149eda52017-06-12 18:11:55 +0100115 aom_write_symbol(w, mode != NEARESTMV, ec_ctx->refmv_cdf[refmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700116 }
117 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700118}
119
Thomas Davies149eda52017-06-12 18:11:55 +0100120static void write_drl_idx(FRAME_CONTEXT *ec_ctx, const MB_MODE_INFO *mbmi,
Yaowu Xuf883b422016-08-30 14:01:10 -0700121 const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) {
122 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700123
124 assert(mbmi->ref_mv_idx < 3);
125
Sebastien Alaiwan34d55662017-11-15 09:36:03 +0100126 const int new_mv = mbmi->mode == NEWMV || mbmi->mode == NEW_NEWMV;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000127 if (new_mv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700128 int idx;
129 for (idx = 0; idx < 2; ++idx) {
Jingning Hanb4fc74d2017-12-21 14:34:03 -0800130 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700131 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700132 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700133
Thomas Davies149eda52017-06-12 18:11:55 +0100134 aom_write_symbol(w, mbmi->ref_mv_idx != idx, ec_ctx->drl_cdf[drl_ctx],
135 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700136 if (mbmi->ref_mv_idx == idx) return;
137 }
138 }
139 return;
140 }
141
David Barker3dfba992017-04-03 16:10:09 +0100142 if (have_nearmv_in_inter_mode(mbmi->mode)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700143 int idx;
144 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
145 for (idx = 1; idx < 3; ++idx) {
Jingning Hanb4fc74d2017-12-21 14:34:03 -0800146 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700147 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700148 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
Thomas Davies149eda52017-06-12 18:11:55 +0100149 aom_write_symbol(w, mbmi->ref_mv_idx != (idx - 1),
150 ec_ctx->drl_cdf[drl_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700151 if (mbmi->ref_mv_idx == (idx - 1)) return;
152 }
153 }
154 return;
155 }
156}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700157
Thomas Davies8c08a332017-06-26 17:30:34 +0100158static void write_inter_compound_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
159 aom_writer *w, PREDICTION_MODE mode,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700160 const int16_t mode_ctx) {
Thomas Davies8c08a332017-06-26 17:30:34 +0100161 assert(is_inter_compound_mode(mode));
Thomas Davies8c08a332017-06-26 17:30:34 +0100162 (void)cm;
163 aom_write_symbol(w, INTER_COMPOUND_OFFSET(mode),
164 xd->tile_ctx->inter_compound_mode_cdf[mode_ctx],
165 INTER_COMPOUND_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700166}
Zoe Liu85b66462017-04-20 14:28:19 -0700167
Thomas Davies985bfc32017-06-27 16:51:26 +0100168static void write_tx_size_vartx(const AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700169 const MB_MODE_INFO *mbmi, TX_SIZE tx_size,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700170 int depth, int blk_row, int blk_col,
171 aom_writer *w) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100172 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
173 (void)cm;
Jingning Hanf65b8702016-10-31 12:13:20 -0700174 const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0);
175 const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0);
176
Yaowu Xuc27fc142016-08-22 16:08:15 -0700177 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
178
Jingning Han571189c2016-10-24 10:38:43 -0700179 if (depth == MAX_VARTX_DEPTH) {
Jingning Han331662e2017-05-30 17:03:32 -0700180 txfm_partition_update(xd->above_txfm_context + blk_col,
181 xd->left_txfm_context + blk_row, tx_size, tx_size);
Jingning Han94d5bfc2016-10-21 10:14:36 -0700182 return;
183 }
184
Hui Su7167d952018-02-01 16:33:12 -0800185 const int ctx = txfm_partition_context(xd->above_txfm_context + blk_col,
186 xd->left_txfm_context + blk_row,
187 mbmi->sb_type, tx_size);
188 const int txb_size_index =
189 av1_get_txb_size_index(mbmi->sb_type, blk_row, blk_col);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000190 const int write_txfm_partition =
Hui Su7167d952018-02-01 16:33:12 -0800191 tx_size == mbmi->inter_tx_size[txb_size_index];
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000192 if (write_txfm_partition) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100193 aom_write_symbol(w, 0, ec_ctx->txfm_partition_cdf[ctx], 2);
Thomas Davies985bfc32017-06-27 16:51:26 +0100194
Jingning Han331662e2017-05-30 17:03:32 -0700195 txfm_partition_update(xd->above_txfm_context + blk_col,
196 xd->left_txfm_context + blk_row, tx_size, tx_size);
Yue Chend6bdd462017-07-19 16:05:43 -0700197 // TODO(yuec): set correct txfm partition update for qttx
Yaowu Xuc27fc142016-08-22 16:08:15 -0700198 } else {
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800199 const TX_SIZE sub_txs = sub_tx_size_map[1][tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700200 const int bsw = tx_size_wide_unit[sub_txs];
201 const int bsh = tx_size_high_unit[sub_txs];
Jingning Hanf64062f2016-11-02 16:22:18 -0700202
Thomas Davies985bfc32017-06-27 16:51:26 +0100203 aom_write_symbol(w, 1, ec_ctx->txfm_partition_cdf[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700204
David Barker16c64e32017-08-23 16:54:59 +0100205 if (sub_txs == TX_4X4) {
Jingning Han331662e2017-05-30 17:03:32 -0700206 txfm_partition_update(xd->above_txfm_context + blk_col,
207 xd->left_txfm_context + blk_row, sub_txs, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700208 return;
209 }
210
Yue Chen0797a202017-10-27 17:24:56 -0700211 assert(bsw > 0 && bsh > 0);
212 for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh)
213 for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
214 int offsetr = blk_row + row;
215 int offsetc = blk_col + col;
216 write_tx_size_vartx(cm, xd, mbmi, sub_txs, depth + 1, offsetr, offsetc,
217 w);
218 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 }
220}
221
Yaowu Xuf883b422016-08-30 14:01:10 -0700222static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd,
223 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700224 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
225 const BLOCK_SIZE bsize = mbmi->sb_type;
Thomas Davies15580c52017-03-09 13:53:42 +0000226 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
227 (void)cm;
Rupert Swarbrickfcff0b22017-10-05 09:26:04 +0100228 if (block_signals_txsize(bsize)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700229 const TX_SIZE tx_size = mbmi->tx_size;
David Barker84dc6e92018-01-16 15:57:49 +0000230 const int tx_size_ctx = get_tx_size_context(xd, 0);
Debargha Mukherjee0fa057f2017-12-06 17:06:29 -0800231 const int depth = tx_size_to_depth(tx_size, bsize, 0);
232 const int max_depths = bsize_to_max_depth(bsize, 0);
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800233 const int32_t tx_size_cat = bsize_to_tx_size_cat(bsize, 0);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800234
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800235 assert(depth >= 0 && depth <= max_depths);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800236 assert(!is_inter_block(mbmi));
Yue Chen49587a72016-09-28 17:09:47 -0700237 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700238
Thomas Davies15580c52017-03-09 13:53:42 +0000239 aom_write_symbol(w, depth, ec_ctx->tx_size_cdf[tx_size_cat][tx_size_ctx],
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800240 max_depths + 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700241 }
242}
243
Yaowu Xuf883b422016-08-30 14:01:10 -0700244static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
245 int segment_id, const MODE_INFO *mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700246 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
247 return 1;
248 } else {
249 const int skip = mi->mbmi.skip;
Zoe Liue646daa2017-10-17 15:28:46 -0700250 const int ctx = av1_get_skip_context(xd);
Thomas Davies61e3e372017-04-04 16:10:23 +0100251 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Davies61e3e372017-04-04 16:10:23 +0100252 aom_write_symbol(w, skip, ec_ctx->skip_cdfs[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700253 return skip;
254 }
255}
256
Zoe Liuf40a9572017-10-13 12:37:19 -0700257#if CONFIG_EXT_SKIP
258static int write_skip_mode(const AV1_COMMON *cm, const MACROBLOCKD *xd,
259 int segment_id, const MODE_INFO *mi, aom_writer *w) {
260 if (!cm->skip_mode_flag) return 0;
261 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
262 return 0;
263 }
264 const int skip_mode = mi->mbmi.skip_mode;
265 if (!is_comp_ref_allowed(mi->mbmi.sb_type)) {
266 assert(!skip_mode);
267 return 0;
268 }
269 const int ctx = av1_get_skip_mode_context(xd);
270 aom_write_symbol(w, skip_mode, xd->tile_ctx->skip_mode_cdfs[ctx], 2);
271 return skip_mode;
272}
273#endif // CONFIG_EXT_SKIP
274
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100275static void write_is_inter(const AV1_COMMON *cm, const MACROBLOCKD *xd,
276 int segment_id, aom_writer *w, const int is_inter) {
277 if (!segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
Frederic Barbier5e911422017-12-12 17:17:07 +0100278 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)
279#if CONFIG_SEGMENT_GLOBALMV
280 || segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV)
281#endif
Johannb0ef6ff2018-02-08 14:32:21 -0800282 )
Frederic Barbier5e911422017-12-12 17:17:07 +0100283 if (!av1_is_valid_scale(&cm->frame_refs[0].sf))
284 return; // LAST_FRAME not valid for reference
285
Yue Chen170678a2017-10-17 13:43:10 -0700286 const int ctx = av1_get_intra_inter_context(xd);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100287 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100288 aom_write_symbol(w, is_inter, ec_ctx->intra_inter_cdf[ctx], 2);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100289 }
290}
291
Thomas Daviesd9b57262017-06-27 17:43:25 +0100292static void write_motion_mode(const AV1_COMMON *cm, MACROBLOCKD *xd,
293 const MODE_INFO *mi, aom_writer *w) {
Sarah Parker19234cc2017-03-10 16:43:25 -0800294 const MB_MODE_INFO *mbmi = &mi->mbmi;
Thomas Daviesd9b57262017-06-27 17:43:25 +0100295
Sebastien Alaiwan48795802017-10-30 12:07:13 +0100296 MOTION_MODE last_motion_mode_allowed =
Luc Trudeau2eb9b842017-12-13 11:19:16 -0500297 motion_mode_allowed(cm->global_motion, xd, mi);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000298 switch (last_motion_mode_allowed) {
299 case SIMPLE_TRANSLATION: break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000300 case OBMC_CAUSAL:
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000301 aom_write_symbol(w, mbmi->motion_mode == OBMC_CAUSAL,
302 xd->tile_ctx->obmc_cdf[mbmi->sb_type], 2);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000303 break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000304 default:
305 aom_write_symbol(w, mbmi->motion_mode,
306 xd->tile_ctx->motion_mode_cdf[mbmi->sb_type],
307 MOTION_MODES);
Yue Chen69f18e12016-09-08 14:48:15 -0700308 }
Yue Chen69f18e12016-09-08 14:48:15 -0700309}
Wei-Ting Lin85a8f702017-06-22 13:55:15 -0700310
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000311static void write_delta_qindex(const AV1_COMMON *cm, const MACROBLOCKD *xd,
312 int delta_qindex, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +0200313 int sign = delta_qindex < 0;
314 int abs = sign ? -delta_qindex : delta_qindex;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000315 int rem_bits, thr;
Thomas Daviesf6936102016-09-05 16:51:31 +0100316 int smallval = abs < DELTA_Q_SMALL ? 1 : 0;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000317 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
318 (void)cm;
Thomas Daviesf6936102016-09-05 16:51:31 +0100319
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000320 aom_write_symbol(w, AOMMIN(abs, DELTA_Q_SMALL), ec_ctx->delta_q_cdf,
321 DELTA_Q_PROBS + 1);
Thomas Daviesf6936102016-09-05 16:51:31 +0100322
323 if (!smallval) {
324 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
325 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100326 aom_write_literal(w, rem_bits - 1, 3);
Thomas Daviesf6936102016-09-05 16:51:31 +0100327 aom_write_literal(w, abs - thr, rem_bits);
Arild Fuldseth07441162016-08-15 15:07:52 +0200328 }
329 if (abs > 0) {
330 aom_write_bit(w, sign);
331 }
332}
Thomas Daviesf6936102016-09-05 16:51:31 +0100333
Fangwen Fu231fe422017-04-24 17:52:29 -0700334#if CONFIG_EXT_DELTA_Q
335static void write_delta_lflevel(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Cheng Chena97394f2017-09-27 15:05:14 -0700336#if CONFIG_LOOPFILTER_LEVEL
337 int lf_id,
338#endif
Fangwen Fu231fe422017-04-24 17:52:29 -0700339 int delta_lflevel, aom_writer *w) {
340 int sign = delta_lflevel < 0;
341 int abs = sign ? -delta_lflevel : delta_lflevel;
342 int rem_bits, thr;
343 int smallval = abs < DELTA_LF_SMALL ? 1 : 0;
Fangwen Fu231fe422017-04-24 17:52:29 -0700344 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
345 (void)cm;
Fangwen Fu231fe422017-04-24 17:52:29 -0700346
Cheng Chena97394f2017-09-27 15:05:14 -0700347#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -0700348 if (cm->delta_lf_multi) {
349 assert(lf_id >= 0 && lf_id < FRAME_LF_COUNT);
350 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL),
351 ec_ctx->delta_lf_multi_cdf[lf_id], DELTA_LF_PROBS + 1);
352 } else {
353 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
354 DELTA_LF_PROBS + 1);
355 }
Cheng Chena97394f2017-09-27 15:05:14 -0700356#else
Fangwen Fu231fe422017-04-24 17:52:29 -0700357 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
358 DELTA_LF_PROBS + 1);
Cheng Chena97394f2017-09-27 15:05:14 -0700359#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -0700360
361 if (!smallval) {
362 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
363 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100364 aom_write_literal(w, rem_bits - 1, 3);
Fangwen Fu231fe422017-04-24 17:52:29 -0700365 aom_write_literal(w, abs - thr, rem_bits);
366 }
367 if (abs > 0) {
368 aom_write_bit(w, sign);
369 }
370}
Fangwen Fu231fe422017-04-24 17:52:29 -0700371#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +0200372
Sarah Parker99e7daa2017-08-29 10:30:13 -0700373static void pack_map_tokens(aom_writer *w, const TOKENEXTRA **tp, int n,
374 int num) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700375 const TOKENEXTRA *p = *tp;
hui su40b9e7f2017-07-13 18:15:56 -0700376 write_uniform(w, n, p->token); // The first color index.
377 ++p;
378 --num;
379 for (int i = 0; i < num; ++i) {
Sarah Parker0cf4d9f2017-08-18 13:09:14 -0700380 aom_write_symbol(w, p->token, p->color_map_cdf, n);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700381 ++p;
382 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700383 *tp = p;
384}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700385
Jingning Hana2285692017-10-25 15:14:31 -0700386static void pack_txb_tokens(aom_writer *w, AV1_COMMON *cm, MACROBLOCK *const x,
Jingning Han4fe5f672017-05-19 15:46:07 -0700387 const TOKENEXTRA **tp,
Jingning Hana2285692017-10-25 15:14:31 -0700388 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
389 MB_MODE_INFO *mbmi, int plane,
Jingning Han4fe5f672017-05-19 15:46:07 -0700390 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
391 int block, int blk_row, int blk_col,
392 TX_SIZE tx_size, TOKEN_STATS *token_stats) {
393 const struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han4fe5f672017-05-19 15:46:07 -0700394 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
395 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
396
397 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
398
Debargha Mukherjee8aec7f32017-12-20 15:48:49 -0800399 const TX_SIZE plane_tx_size =
Debargha Mukherjee6396d3a2017-12-21 00:12:38 -0800400 plane ? av1_get_uv_tx_size(mbmi, pd->subsampling_x, pd->subsampling_y)
Hui Su7167d952018-02-01 16:33:12 -0800401 : mbmi->inter_tx_size[av1_get_txb_size_index(plane_bsize, blk_row,
402 blk_col)];
Jingning Han4fe5f672017-05-19 15:46:07 -0700403
Debargha Mukherjee6396d3a2017-12-21 00:12:38 -0800404 if (tx_size == plane_tx_size || plane) {
Jingning Han4fe5f672017-05-19 15:46:07 -0700405 TOKEN_STATS tmp_token_stats;
406 init_token_stats(&tmp_token_stats);
407
Jingning Han4fe5f672017-05-19 15:46:07 -0700408 tran_low_t *tcoeff = BLOCK_OFFSET(x->mbmi_ext->tcoeff[plane], block);
409 uint16_t eob = x->mbmi_ext->eobs[plane][block];
410 TXB_CTX txb_ctx = { x->mbmi_ext->txb_skip_ctx[plane][block],
411 x->mbmi_ext->dc_sign_ctx[plane][block] };
Luc Trudeau2eb9b842017-12-13 11:19:16 -0500412 av1_write_coeffs_txb(cm, xd, w, blk_row, blk_col, plane, tx_size, tcoeff,
413 eob, &txb_ctx);
Jingning Han4fe5f672017-05-19 15:46:07 -0700414#if CONFIG_RD_DEBUG
415 token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
416 token_stats->cost += tmp_token_stats.cost;
417#endif
418 } else {
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800419 const TX_SIZE sub_txs = sub_tx_size_map[1][tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700420 const int bsw = tx_size_wide_unit[sub_txs];
421 const int bsh = tx_size_high_unit[sub_txs];
Jingning Han4fe5f672017-05-19 15:46:07 -0700422
Yue Chen0797a202017-10-27 17:24:56 -0700423 assert(bsw > 0 && bsh > 0);
Jingning Han4fe5f672017-05-19 15:46:07 -0700424
Yue Chen0797a202017-10-27 17:24:56 -0700425 for (int r = 0; r < tx_size_high_unit[tx_size]; r += bsh) {
426 for (int c = 0; c < tx_size_wide_unit[tx_size]; c += bsw) {
427 const int offsetr = blk_row + r;
428 const int offsetc = blk_col + c;
429 const int step = bsh * bsw;
Jingning Han4fe5f672017-05-19 15:46:07 -0700430
Yue Chen0797a202017-10-27 17:24:56 -0700431 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
Jingning Han4fe5f672017-05-19 15:46:07 -0700432
Yue Chen0797a202017-10-27 17:24:56 -0700433 pack_txb_tokens(w, cm, x, tp, tok_end, xd, mbmi, plane, plane_bsize,
434 bit_depth, block, offsetr, offsetc, sub_txs,
435 token_stats);
436 block += step;
437 }
Jingning Han4fe5f672017-05-19 15:46:07 -0700438 }
439 }
440}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700441
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000442#if CONFIG_SPATIAL_SEGMENTATION
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100443static int neg_interleave(int x, int ref, int max) {
444 const int diff = x - ref;
445 if (!ref) return x;
446 if (ref >= (max - 1)) return -diff;
447 if (2 * ref < max) {
448 if (abs(diff) <= ref) {
449 if (diff > 0)
450 return (diff << 1) - 1;
451 else
452 return ((-diff) << 1);
453 }
454 return x;
455 } else {
456 if (abs(diff) < (max - ref)) {
457 if (diff > 0)
458 return (diff << 1) - 1;
459 else
460 return ((-diff) << 1);
461 }
462 return (max - x) - 1;
463 }
464}
465
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000466static void write_segment_id(AV1_COMP *cpi, const MB_MODE_INFO *const mbmi,
467 aom_writer *w, const struct segmentation *seg,
468 struct segmentation_probs *segp, int mi_row,
469 int mi_col, int skip) {
470 AV1_COMMON *const cm = &cpi->common;
Rostislav Pehlivanov3874cb52018-01-11 17:29:00 +0000471 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Rostislav Pehlivanov6a0245c2018-01-17 18:30:00 +0000472 int prev_ul = -1; /* Top left segment_id */
473 int prev_l = -1; /* Current left segment_id */
474 int prev_u = -1; /* Current top segment_id */
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100475
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000476 if (!seg->enabled || !seg->update_map) return;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100477
Rostislav Pehlivanov3874cb52018-01-11 17:29:00 +0000478 if ((xd->up_available) && (xd->left_available))
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000479 prev_ul = get_segment_id(cm, cm->current_frame_seg_map, BLOCK_4X4,
480 mi_row - 1, mi_col - 1);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100481
Rostislav Pehlivanov3874cb52018-01-11 17:29:00 +0000482 if (xd->up_available)
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000483 prev_u = get_segment_id(cm, cm->current_frame_seg_map, BLOCK_4X4,
484 mi_row - 1, mi_col - 0);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100485
Rostislav Pehlivanov3874cb52018-01-11 17:29:00 +0000486 if (xd->left_available)
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000487 prev_l = get_segment_id(cm, cm->current_frame_seg_map, BLOCK_4X4,
488 mi_row - 0, mi_col - 1);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100489
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000490 int cdf_num = pick_spatial_seg_cdf(prev_ul, prev_u, prev_l);
491 int pred = pick_spatial_seg_pred(prev_ul, prev_u, prev_l);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100492
493 if (skip) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000494 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type, mi_row,
495 mi_col, pred);
496 set_spatial_segment_id(cm, cpi->segmentation_map, mbmi->sb_type, mi_row,
497 mi_col, pred);
498 /* mbmi is read only but we need to update segment_id */
499 ((MB_MODE_INFO *)mbmi)->segment_id = pred;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100500 return;
501 }
502
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000503 int coded_id =
504 neg_interleave(mbmi->segment_id, pred, cm->last_active_segid + 1);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100505
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000506 aom_cdf_prob *pred_cdf = segp->spatial_pred_seg_cdf[cdf_num];
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100507 aom_write_symbol(w, coded_id, pred_cdf, 8);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100508
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000509 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type, mi_row,
510 mi_col, mbmi->segment_id);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100511}
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000512#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700513static void write_segment_id(aom_writer *w, const struct segmentation *seg,
Thomas9ac55082016-09-23 18:04:17 +0100514 struct segmentation_probs *segp, int segment_id) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400515 if (seg->enabled && seg->update_map) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400516 aom_write_symbol(w, segment_id, segp->tree_cdf, MAX_SEGMENTS);
Nathan E. Eggef627e582016-08-19 20:06:51 -0400517 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700518}
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000519#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700520
Thomas Davies315f5782017-06-14 15:14:55 +0100521#define WRITE_REF_BIT(bname, pname) \
Thomas Davies0fbd2b72017-09-12 10:49:45 +0100522 aom_write_symbol(w, bname, av1_get_pred_cdf_##pname(xd), 2)
Thomas Davies315f5782017-06-14 15:14:55 +0100523
Yaowu Xuc27fc142016-08-22 16:08:15 -0700524// This function encodes the reference frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700525static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd,
526 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700527 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
528 const int is_compound = has_second_ref(mbmi);
529 const int segment_id = mbmi->segment_id;
530
531 // If segment level coding of this signal is disabled...
532 // or the segment allows multiple reference frame options
533 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
534 assert(!is_compound);
535 assert(mbmi->ref_frame[0] ==
536 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
David Barkerd92f3562017-10-09 17:46:23 +0100537 }
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700538#if CONFIG_SEGMENT_GLOBALMV
David Barkerd92f3562017-10-09 17:46:23 +0100539 else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP) ||
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700540 segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV))
David Barkerd92f3562017-10-09 17:46:23 +0100541#else
542 else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP))
543#endif
544 {
545 assert(!is_compound);
546 assert(mbmi->ref_frame[0] == LAST_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700547 } else {
548 // does the feature use compound prediction or not
549 // (if not specified at the frame/segment level)
550 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Debargha Mukherjee0f248c42017-09-07 12:40:18 -0700551 if (is_comp_ref_allowed(mbmi->sb_type))
Thomas Davies860def62017-06-14 10:00:03 +0100552 aom_write_symbol(w, is_compound, av1_get_reference_mode_cdf(cm, xd), 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700553 } else {
554 assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE));
555 }
556
557 if (is_compound) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700558#if CONFIG_EXT_COMP_REFS
559 const COMP_REFERENCE_TYPE comp_ref_type = has_uni_comp_refs(mbmi)
560 ? UNIDIR_COMP_REFERENCE
561 : BIDIR_COMP_REFERENCE;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100562 aom_write_symbol(w, comp_ref_type, av1_get_comp_reference_type_cdf(xd),
563 2);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700564
565 if (comp_ref_type == UNIDIR_COMP_REFERENCE) {
566 const int bit = mbmi->ref_frame[0] == BWDREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800567 WRITE_REF_BIT(bit, uni_comp_ref_p);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700568
Zoe Liuc082bbc2017-05-17 13:31:37 -0700569 if (!bit) {
Zoe Liufcf5fa22017-06-26 16:00:38 -0700570 assert(mbmi->ref_frame[0] == LAST_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100571 const int bit1 = mbmi->ref_frame[1] == LAST3_FRAME ||
572 mbmi->ref_frame[1] == GOLDEN_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800573 WRITE_REF_BIT(bit1, uni_comp_ref_p1);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100574 if (bit1) {
575 const int bit2 = mbmi->ref_frame[1] == GOLDEN_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800576 WRITE_REF_BIT(bit2, uni_comp_ref_p2);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700577 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700578 } else {
579 assert(mbmi->ref_frame[1] == ALTREF_FRAME);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700580 }
581
582 return;
583 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700584
585 assert(comp_ref_type == BIDIR_COMP_REFERENCE);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700586#endif // CONFIG_EXT_COMP_REFS
587
Yaowu Xuc27fc142016-08-22 16:08:15 -0700588 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
589 mbmi->ref_frame[0] == LAST3_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100590 WRITE_REF_BIT(bit, comp_ref_p);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700591
Yaowu Xuc27fc142016-08-22 16:08:15 -0700592 if (!bit) {
Zoe Liu87818282017-11-26 17:09:59 -0800593 const int bit1 = mbmi->ref_frame[0] == LAST2_FRAME;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100594 WRITE_REF_BIT(bit1, comp_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700595 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100596 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
597 WRITE_REF_BIT(bit2, comp_ref_p2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700598 }
Zoe Liu7b1ec7a2017-05-24 22:28:24 -0700599
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100600 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800601 WRITE_REF_BIT(bit_bwd, comp_bwdref_p);
Zoe Liue9b15e22017-07-19 15:53:01 -0700602
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100603 if (!bit_bwd) {
Zoe Liu49172952018-01-23 14:32:31 -0800604 WRITE_REF_BIT(mbmi->ref_frame[1] == ALTREF2_FRAME, comp_bwdref_p1);
Zoe Liue9b15e22017-07-19 15:53:01 -0700605 }
Zoe Liue9b15e22017-07-19 15:53:01 -0700606
Yaowu Xuc27fc142016-08-22 16:08:15 -0700607 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -0700608 const int bit0 = (mbmi->ref_frame[0] <= ALTREF_FRAME &&
609 mbmi->ref_frame[0] >= BWDREF_FRAME);
Zoe Liu49172952018-01-23 14:32:31 -0800610 WRITE_REF_BIT(bit0, single_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700611
612 if (bit0) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100613 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800614 WRITE_REF_BIT(bit1, single_ref_p2);
Zoe Liue9b15e22017-07-19 15:53:01 -0700615
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100616 if (!bit1) {
Zoe Liu49172952018-01-23 14:32:31 -0800617 WRITE_REF_BIT(mbmi->ref_frame[0] == ALTREF2_FRAME, single_ref_p6);
Zoe Liue9b15e22017-07-19 15:53:01 -0700618 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700619 } else {
620 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
621 mbmi->ref_frame[0] == GOLDEN_FRAME);
Zoe Liu49172952018-01-23 14:32:31 -0800622 WRITE_REF_BIT(bit2, single_ref_p3);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700623
624 if (!bit2) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100625 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800626 WRITE_REF_BIT(bit3, single_ref_p4);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700627 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100628 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800629 WRITE_REF_BIT(bit4, single_ref_p5);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700630 }
631 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700632 }
633 }
634}
635
hui su5db97432016-10-14 16:10:14 -0700636#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -0800637static void write_filter_intra_mode_info(const MACROBLOCKD *xd,
hui su5db97432016-10-14 16:10:14 -0700638 const MB_MODE_INFO *const mbmi,
639 aom_writer *w) {
Yue Chen95e13e22017-11-01 23:56:35 -0700640 if (mbmi->mode == DC_PRED && mbmi->palette_mode_info.palette_size[0] == 0 &&
641 av1_filter_intra_allowed_txsize(mbmi->tx_size)) {
Yue Chenb0571872017-12-18 18:12:59 -0800642 aom_write_symbol(w, mbmi->filter_intra_mode_info.use_filter_intra,
Yue Chen4eba69b2017-11-09 22:37:35 -0800643 xd->tile_ctx->filter_intra_cdfs[mbmi->tx_size], 2);
Yue Chenb0571872017-12-18 18:12:59 -0800644 if (mbmi->filter_intra_mode_info.use_filter_intra) {
hui su5db97432016-10-14 16:10:14 -0700645 const FILTER_INTRA_MODE mode =
Yue Chenb0571872017-12-18 18:12:59 -0800646 mbmi->filter_intra_mode_info.filter_intra_mode;
Yue Chen994dba22017-12-19 15:27:26 -0800647 aom_write_symbol(w, mode, xd->tile_ctx->filter_intra_mode_cdf,
Yue Chen63ce36f2017-10-10 23:37:31 -0700648 FILTER_INTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700649 }
650 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700651}
hui su5db97432016-10-14 16:10:14 -0700652#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -0700653
Luc Trudeau866da792018-02-12 11:13:34 -0500654static void write_angle_delta(aom_writer *w, int angle_delta,
655 aom_cdf_prob *cdf) {
Joe Young3ca43bf2017-10-06 15:12:46 -0700656#if CONFIG_EXT_INTRA_MOD
Luc Trudeau866da792018-02-12 11:13:34 -0500657 aom_write_symbol(w, angle_delta + MAX_ANGLE_DELTA, cdf,
658 2 * MAX_ANGLE_DELTA + 1);
Joe Young3ca43bf2017-10-06 15:12:46 -0700659#else
Luc Trudeau866da792018-02-12 11:13:34 -0500660 (void)cdf;
661 write_uniform(w, 2 * MAX_ANGLE_DELTA + 1, MAX_ANGLE_DELTA + angle_delta);
Joe Young3ca43bf2017-10-06 15:12:46 -0700662#endif // CONFIG_EXT_INTRA_MOD
Yaowu Xuc27fc142016-08-22 16:08:15 -0700663}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700664
Angie Chiang5678ad92016-11-21 09:38:40 -0800665static void write_mb_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd,
666 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700667 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700668 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
Thomas Davies77c7c402017-01-11 17:58:54 +0000669 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Jingning Han203b1d32017-01-12 16:00:13 -0800670
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700671 if (!av1_is_interp_needed(xd)) {
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100672 assert(mbmi->interp_filters ==
673 av1_broadcast_interp_filter(
674 av1_unswitchable_filter(cm->interp_filter)));
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700675 return;
676 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700677 if (cm->interp_filter == SWITCHABLE) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700678#if CONFIG_DUAL_FILTER
Jingning Han203b1d32017-01-12 16:00:13 -0800679 int dir;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700680 for (dir = 0; dir < 2; ++dir) {
681 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
682 (mbmi->ref_frame[1] > INTRA_FRAME &&
683 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700684 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100685 InterpFilter filter =
686 av1_extract_interp_filter(mbmi->interp_filters, dir);
687 aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
Angie Chiangb9b42a02017-01-20 12:47:36 -0800688 SWITCHABLE_FILTERS);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100689 ++cpi->interp_filter_selected[0][filter];
Angie Chiang38edf682017-02-21 15:13:09 -0800690 } else {
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100691 assert(av1_extract_interp_filter(mbmi->interp_filters, dir) ==
692 EIGHTTAP_REGULAR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700693 }
694 }
695#else
696 {
Yaowu Xuf883b422016-08-30 14:01:10 -0700697 const int ctx = av1_get_pred_context_switchable_interp(xd);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100698 InterpFilter filter = av1_extract_interp_filter(mbmi->interp_filters, 0);
699 aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
700 SWITCHABLE_FILTERS);
701 ++cpi->interp_filter_selected[0][filter];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700702 }
Jingning Han203b1d32017-01-12 16:00:13 -0800703#endif // CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -0700704 }
705}
706
hui su33567b22017-04-30 16:40:19 -0700707// Transmit color values with delta encoding. Write the first value as
708// literal, and the deltas between each value and the previous one. "min_val" is
709// the smallest possible value of the deltas.
710static void delta_encode_palette_colors(const int *colors, int num,
711 int bit_depth, int min_val,
712 aom_writer *w) {
713 if (num <= 0) return;
hui sufa4ff852017-05-15 12:20:50 -0700714 assert(colors[0] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700715 aom_write_literal(w, colors[0], bit_depth);
716 if (num == 1) return;
717 int max_delta = 0;
718 int deltas[PALETTE_MAX_SIZE];
719 memset(deltas, 0, sizeof(deltas));
720 for (int i = 1; i < num; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700721 assert(colors[i] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700722 const int delta = colors[i] - colors[i - 1];
723 deltas[i - 1] = delta;
724 assert(delta >= min_val);
725 if (delta > max_delta) max_delta = delta;
726 }
727 const int min_bits = bit_depth - 3;
728 int bits = AOMMAX(av1_ceil_log2(max_delta + 1 - min_val), min_bits);
hui sufa4ff852017-05-15 12:20:50 -0700729 assert(bits <= bit_depth);
hui su33567b22017-04-30 16:40:19 -0700730 int range = (1 << bit_depth) - colors[0] - min_val;
hui sud13c24a2017-04-07 16:13:07 -0700731 aom_write_literal(w, bits - min_bits, 2);
hui su33567b22017-04-30 16:40:19 -0700732 for (int i = 0; i < num - 1; ++i) {
733 aom_write_literal(w, deltas[i] - min_val, bits);
734 range -= deltas[i];
735 bits = AOMMIN(bits, av1_ceil_log2(range));
hui sud13c24a2017-04-07 16:13:07 -0700736 }
737}
738
hui su33567b22017-04-30 16:40:19 -0700739// Transmit luma palette color values. First signal if each color in the color
740// cache is used. Those colors that are not in the cache are transmitted with
741// delta encoding.
742static void write_palette_colors_y(const MACROBLOCKD *const xd,
743 const PALETTE_MODE_INFO *const pmi,
744 int bit_depth, aom_writer *w) {
745 const int n = pmi->palette_size[0];
hui su33567b22017-04-30 16:40:19 -0700746 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700747 const int n_cache = av1_get_palette_cache(xd, 0, color_cache);
hui su33567b22017-04-30 16:40:19 -0700748 int out_cache_colors[PALETTE_MAX_SIZE];
749 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
750 const int n_out_cache =
751 av1_index_color_cache(color_cache, n_cache, pmi->palette_colors, n,
752 cache_color_found, out_cache_colors);
753 int n_in_cache = 0;
754 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
755 const int found = cache_color_found[i];
756 aom_write_bit(w, found);
757 n_in_cache += found;
758 }
759 assert(n_in_cache + n_out_cache == n);
760 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 1, w);
761}
762
763// Write chroma palette color values. U channel is handled similarly to the luma
764// channel. For v channel, either use delta encoding or transmit raw values
765// directly, whichever costs less.
766static void write_palette_colors_uv(const MACROBLOCKD *const xd,
767 const PALETTE_MODE_INFO *const pmi,
hui sud13c24a2017-04-07 16:13:07 -0700768 int bit_depth, aom_writer *w) {
hui sud13c24a2017-04-07 16:13:07 -0700769 const int n = pmi->palette_size[1];
hui sud13c24a2017-04-07 16:13:07 -0700770 const uint16_t *colors_u = pmi->palette_colors + PALETTE_MAX_SIZE;
771 const uint16_t *colors_v = pmi->palette_colors + 2 * PALETTE_MAX_SIZE;
hui sud13c24a2017-04-07 16:13:07 -0700772 // U channel colors.
hui su33567b22017-04-30 16:40:19 -0700773 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700774 const int n_cache = av1_get_palette_cache(xd, 1, color_cache);
hui su33567b22017-04-30 16:40:19 -0700775 int out_cache_colors[PALETTE_MAX_SIZE];
776 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
777 const int n_out_cache = av1_index_color_cache(
778 color_cache, n_cache, colors_u, n, cache_color_found, out_cache_colors);
779 int n_in_cache = 0;
780 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
781 const int found = cache_color_found[i];
782 aom_write_bit(w, found);
783 n_in_cache += found;
hui sud13c24a2017-04-07 16:13:07 -0700784 }
hui su33567b22017-04-30 16:40:19 -0700785 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 0, w);
786
787 // V channel colors. Don't use color cache as the colors are not sorted.
hui sud13c24a2017-04-07 16:13:07 -0700788 const int max_val = 1 << bit_depth;
789 int zero_count = 0, min_bits_v = 0;
790 int bits_v =
791 av1_get_palette_delta_bits_v(pmi, bit_depth, &zero_count, &min_bits_v);
792 const int rate_using_delta =
793 2 + bit_depth + (bits_v + 1) * (n - 1) - zero_count;
794 const int rate_using_raw = bit_depth * n;
795 if (rate_using_delta < rate_using_raw) { // delta encoding
hui sufa4ff852017-05-15 12:20:50 -0700796 assert(colors_v[0] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700797 aom_write_bit(w, 1);
798 aom_write_literal(w, bits_v - min_bits_v, 2);
799 aom_write_literal(w, colors_v[0], bit_depth);
hui su33567b22017-04-30 16:40:19 -0700800 for (int i = 1; i < n; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700801 assert(colors_v[i] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700802 if (colors_v[i] == colors_v[i - 1]) { // No need to signal sign bit.
803 aom_write_literal(w, 0, bits_v);
804 continue;
805 }
806 const int delta = abs((int)colors_v[i] - colors_v[i - 1]);
807 const int sign_bit = colors_v[i] < colors_v[i - 1];
808 if (delta <= max_val - delta) {
809 aom_write_literal(w, delta, bits_v);
810 aom_write_bit(w, sign_bit);
811 } else {
812 aom_write_literal(w, max_val - delta, bits_v);
813 aom_write_bit(w, !sign_bit);
814 }
815 }
816 } else { // Transmit raw values.
817 aom_write_bit(w, 0);
hui sufa4ff852017-05-15 12:20:50 -0700818 for (int i = 0; i < n; ++i) {
819 assert(colors_v[i] < (1 << bit_depth));
820 aom_write_literal(w, colors_v[i], bit_depth);
821 }
hui sud13c24a2017-04-07 16:13:07 -0700822 }
823}
hui sud13c24a2017-04-07 16:13:07 -0700824
Yaowu Xuf883b422016-08-30 14:01:10 -0700825static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Hui Su8b618f62017-12-20 12:03:35 -0800826 const MODE_INFO *const mi, int mi_row,
827 int mi_col, aom_writer *w) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000828 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700829 const MB_MODE_INFO *const mbmi = &mi->mbmi;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700830 const BLOCK_SIZE bsize = mbmi->sb_type;
Hui Su473cf892017-11-08 18:14:31 -0800831 assert(av1_allow_palette(cm->allow_screen_content_tools, bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700832 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
Hui Suc1f411b2017-12-19 15:58:28 -0800833 const int bsize_ctx = av1_get_palette_bsize_ctx(bsize);
Rupert Swarbrick6f9cd942017-08-02 15:57:18 +0100834
Yaowu Xuc27fc142016-08-22 16:08:15 -0700835 if (mbmi->mode == DC_PRED) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800836 const int n = pmi->palette_size[0];
Hui Sudb685552018-01-12 16:38:33 -0800837 const int palette_y_mode_ctx = av1_get_palette_mode_ctx(xd);
Thomas Davies59f92312017-08-23 00:33:12 +0100838 aom_write_symbol(
839 w, n > 0,
Hui Suc1f411b2017-12-19 15:58:28 -0800840 xd->tile_ctx->palette_y_mode_cdf[bsize_ctx][palette_y_mode_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700841 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +0100842 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Hui Suc1f411b2017-12-19 15:58:28 -0800843 xd->tile_ctx->palette_y_size_cdf[bsize_ctx],
Thomas Daviesce7272d2017-07-04 16:11:08 +0100844 PALETTE_SIZES);
hui su33567b22017-04-30 16:40:19 -0700845 write_palette_colors_y(xd, pmi, cm->bit_depth, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700846 }
847 }
848
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +0000849 const int uv_dc_pred =
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000850 num_planes > 1 && mbmi->uv_mode == UV_DC_PRED &&
Hui Su8b618f62017-12-20 12:03:35 -0800851 is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
852 xd->plane[1].subsampling_y);
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +0000853 if (uv_dc_pred) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800854 const int n = pmi->palette_size[1];
855 const int palette_uv_mode_ctx = (pmi->palette_size[0] > 0);
Thomas Davies59f92312017-08-23 00:33:12 +0100856 aom_write_symbol(w, n > 0,
857 xd->tile_ctx->palette_uv_mode_cdf[palette_uv_mode_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700858 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +0100859 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Hui Suc1f411b2017-12-19 15:58:28 -0800860 xd->tile_ctx->palette_uv_size_cdf[bsize_ctx],
Thomas Daviesce7272d2017-07-04 16:11:08 +0100861 PALETTE_SIZES);
hui su33567b22017-04-30 16:40:19 -0700862 write_palette_colors_uv(xd, pmi, cm->bit_depth, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700863 }
864 }
865}
866
Angie Chiangc31ea682017-04-13 16:20:54 -0700867void av1_write_tx_type(const AV1_COMMON *const cm, const MACROBLOCKD *xd,
Angie Chiangcd9b03f2017-04-16 13:37:13 -0700868#if CONFIG_TXK_SEL
Luc Trudeau2eb9b842017-12-13 11:19:16 -0500869 int blk_row, int blk_col, int plane, TX_SIZE tx_size,
Angie Chiangc31ea682017-04-13 16:20:54 -0700870#endif
871 aom_writer *w) {
872 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Jingning Han2a4da942016-11-03 18:31:30 -0700873 const int is_inter = is_inter_block(mbmi);
Jingning Han243b66b2017-06-23 12:11:47 -0700874#if !CONFIG_TXK_SEL
Debargha Mukherjee5577bd12017-11-20 16:04:26 -0800875 const TX_SIZE mtx_size =
876 get_max_rect_tx_size(xd->mi[0]->mbmi.sb_type, is_inter);
Sarah Parker90024e42017-10-06 16:50:47 -0700877 const TX_SIZE tx_size =
Debargha Mukherjee3ebb0d02017-12-14 05:05:18 -0800878 is_inter ? TXSIZEMAX(sub_tx_size_map[1][mtx_size], mbmi->min_tx_size)
Sarah Parker90024e42017-10-06 16:50:47 -0700879 : mbmi->tx_size;
Jingning Han243b66b2017-06-23 12:11:47 -0700880#endif // !CONFIG_TXK_SEL
Thomas Daviescef09622017-01-11 17:27:12 +0000881 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviescef09622017-01-11 17:27:12 +0000882
Angie Chiangcd9b03f2017-04-16 13:37:13 -0700883#if !CONFIG_TXK_SEL
Angie Chiangc31ea682017-04-13 16:20:54 -0700884 TX_TYPE tx_type = mbmi->tx_type;
885#else
886 // Only y plane's tx_type is transmitted
Angie Chiang39b06eb2017-04-14 09:52:29 -0700887 if (plane > 0) return;
888 PLANE_TYPE plane_type = get_plane_type(plane);
Sarah Parker7c71cc02018-01-29 12:27:58 -0800889 TX_TYPE tx_type = av1_get_tx_type(plane_type, xd, blk_row, blk_col, tx_size,
890 cm->reduced_tx_set_used);
Angie Chiangc31ea682017-04-13 16:20:54 -0700891#endif
892
Hui Su99350a62018-01-11 16:41:09 -0800893 const TX_SIZE square_tx_size = txsize_sqr_map[tx_size];
894 const BLOCK_SIZE bsize = mbmi->sb_type;
895 if (get_ext_tx_types(tx_size, bsize, is_inter, cm->reduced_tx_set_used) > 1 &&
896 ((!cm->seg.enabled && cm->base_qindex > 0) ||
897 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
898 !mbmi->skip &&
899 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
900 const TxSetType tx_set_type =
901 get_ext_tx_set_type(tx_size, bsize, is_inter, cm->reduced_tx_set_used);
902 const int eset =
903 get_ext_tx_set(tx_size, bsize, is_inter, cm->reduced_tx_set_used);
904 // eset == 0 should correspond to a set with only DCT_DCT and there
905 // is no need to send the tx_type
906 assert(eset > 0);
907 assert(av1_ext_tx_used[tx_set_type][tx_type]);
908 if (is_inter) {
909 aom_write_symbol(w, av1_ext_tx_ind[tx_set_type][tx_type],
910 ec_ctx->inter_ext_tx_cdf[eset][square_tx_size],
911 av1_num_ext_tx_set[tx_set_type]);
912 } else {
Yue Chen57b8ff62017-10-10 23:37:31 -0700913#if CONFIG_FILTER_INTRA
Hui Su99350a62018-01-11 16:41:09 -0800914 PREDICTION_MODE intra_dir;
915 if (mbmi->filter_intra_mode_info.use_filter_intra)
916 intra_dir =
917 fimode_to_intradir[mbmi->filter_intra_mode_info.filter_intra_mode];
918 else
919 intra_dir = mbmi->mode;
920 aom_write_symbol(
921 w, av1_ext_tx_ind[tx_set_type][tx_type],
922 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][intra_dir],
923 av1_num_ext_tx_set[tx_set_type]);
Yue Chen57b8ff62017-10-10 23:37:31 -0700924#else
Hui Su99350a62018-01-11 16:41:09 -0800925 aom_write_symbol(
926 w, av1_ext_tx_ind[tx_set_type][tx_type],
927 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][mbmi->mode],
928 av1_num_ext_tx_set[tx_set_type]);
Yue Chen57b8ff62017-10-10 23:37:31 -0700929#endif
Lester Lu432012f2017-08-17 14:39:29 -0700930 }
Jingning Han2a4da942016-11-03 18:31:30 -0700931 }
932}
933
Jingning Hanf04254f2017-03-08 10:51:35 -0800934static void write_intra_mode(FRAME_CONTEXT *frame_ctx, BLOCK_SIZE bsize,
935 PREDICTION_MODE mode, aom_writer *w) {
Hui Su814f41e2017-10-02 12:21:24 -0700936 aom_write_symbol(w, mode, frame_ctx->y_mode_cdf[size_group_lookup[bsize]],
Jingning Hanf04254f2017-03-08 10:51:35 -0800937 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -0800938}
939
940static void write_intra_uv_mode(FRAME_CONTEXT *frame_ctx,
Luc Trudeaud6d9eee2017-07-12 12:36:50 -0400941 UV_PREDICTION_MODE uv_mode,
David Michael Barrcb3a8ef2018-01-06 15:48:49 +0900942 PREDICTION_MODE y_mode,
943#if CONFIG_CFL
944 CFL_ALLOWED_TYPE cfl_allowed,
Luc Trudeau6e1cd782017-06-21 13:52:36 -0400945#endif
David Michael Barrcb3a8ef2018-01-06 15:48:49 +0900946 aom_writer *w) {
947#if CONFIG_CFL
948 aom_write_symbol(w, uv_mode, frame_ctx->uv_mode_cdf[cfl_allowed][y_mode],
949 UV_INTRA_MODES - !cfl_allowed);
950#else
951 uv_mode = get_uv_mode(uv_mode);
Luc Trudeau6e1cd782017-06-21 13:52:36 -0400952 aom_write_symbol(w, uv_mode, frame_ctx->uv_mode_cdf[y_mode], UV_INTRA_MODES);
David Michael Barrcb3a8ef2018-01-06 15:48:49 +0900953#endif
Jingning Hanf04254f2017-03-08 10:51:35 -0800954}
955
Luc Trudeauf5334002017-04-25 12:21:26 -0400956#if CONFIG_CFL
David Michael Barrf6eaa152017-07-19 19:42:28 +0900957static void write_cfl_alphas(FRAME_CONTEXT *const ec_ctx, int idx,
958 int joint_sign, aom_writer *w) {
959 aom_write_symbol(w, joint_sign, ec_ctx->cfl_sign_cdf, CFL_JOINT_SIGNS);
960 // Magnitudes are only signaled for nonzero codes.
961 if (CFL_SIGN_U(joint_sign) != CFL_SIGN_ZERO) {
962 aom_cdf_prob *cdf_u = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_U(joint_sign)];
963 aom_write_symbol(w, CFL_IDX_U(idx), cdf_u, CFL_ALPHABET_SIZE);
964 }
965 if (CFL_SIGN_V(joint_sign) != CFL_SIGN_ZERO) {
966 aom_cdf_prob *cdf_v = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_V(joint_sign)];
967 aom_write_symbol(w, CFL_IDX_V(idx), cdf_v, CFL_ALPHABET_SIZE);
968 }
Luc Trudeauf5334002017-04-25 12:21:26 -0400969}
970#endif
971
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200972static void write_cdef(AV1_COMMON *cm, aom_writer *w, int skip, int mi_col,
973 int mi_row) {
974 if (cm->all_lossless) return;
975
976 const int m = ~((1 << (6 - MI_SIZE_LOG2)) - 1);
977 const MB_MODE_INFO *mbmi =
978 &cm->mi_grid_visible[(mi_row & m) * cm->mi_stride + (mi_col & m)]->mbmi;
979 // Initialise when at top left part of the superblock
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000980 if (!(mi_row & (cm->seq_params.mib_size - 1)) &&
981 !(mi_col & (cm->seq_params.mib_size - 1))) { // Top left?
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200982#if CONFIG_EXT_PARTITION
983 cm->cdef_preset[0] = cm->cdef_preset[1] = cm->cdef_preset[2] =
984 cm->cdef_preset[3] = -1;
985#else
986 cm->cdef_preset = -1;
987#endif
988 }
989
990// Emit CDEF param at first non-skip coding block
991#if CONFIG_EXT_PARTITION
992 const int mask = 1 << (6 - MI_SIZE_LOG2);
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000993 const int index = cm->seq_params.sb_size == BLOCK_128X128
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200994 ? !!(mi_col & mask) + 2 * !!(mi_row & mask)
995 : 0;
996 if (cm->cdef_preset[index] == -1 && !skip) {
997 aom_write_literal(w, mbmi->cdef_strength, cm->cdef_bits);
998 cm->cdef_preset[index] = mbmi->cdef_strength;
999 }
1000#else
1001 if (cm->cdef_preset == -1 && !skip) {
1002 aom_write_literal(w, mbmi->cdef_strength, cm->cdef_bits);
1003 cm->cdef_preset = mbmi->cdef_strength;
1004 }
1005#endif
1006}
1007
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001008static void write_inter_segment_id(AV1_COMP *cpi, aom_writer *w,
1009 const struct segmentation *const seg,
1010 struct segmentation_probs *const segp,
1011 int mi_row, int mi_col, int skip,
1012 int preskip) {
1013 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1014 const MODE_INFO *mi = xd->mi[0];
1015 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1016#if CONFIG_SPATIAL_SEGMENTATION
1017 AV1_COMMON *const cm = &cpi->common;
1018#else
1019 (void)mi_row;
1020 (void)mi_col;
1021 (void)skip;
1022 (void)preskip;
1023#endif
1024
1025 if (seg->update_map) {
1026#if CONFIG_SPATIAL_SEGMENTATION
1027 if (preskip) {
1028 if (!cm->preskip_segid) return;
1029 } else {
1030 if (cm->preskip_segid) return;
1031 if (skip) {
Rostislav Pehlivanov2d4322b2018-01-11 17:19:58 +00001032 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 1);
Hui Su52b1ba22017-12-27 14:25:25 -08001033 if (seg->temporal_update) ((MB_MODE_INFO *)mbmi)->seg_id_predicted = 0;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001034 return;
1035 }
1036 }
1037#endif
1038 if (seg->temporal_update) {
1039 const int pred_flag = mbmi->seg_id_predicted;
1040 aom_cdf_prob *pred_cdf = av1_get_pred_cdf_seg_id(segp, xd);
1041 aom_write_symbol(w, pred_flag, pred_cdf, 2);
1042 if (!pred_flag) {
1043#if CONFIG_SPATIAL_SEGMENTATION
1044 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1045#else
1046 write_segment_id(w, seg, segp, mbmi->segment_id);
1047#endif
1048 }
1049#if CONFIG_SPATIAL_SEGMENTATION
1050 if (pred_flag) {
1051 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type,
1052 mi_row, mi_col, mbmi->segment_id);
1053 }
1054#endif
1055 } else {
1056#if CONFIG_SPATIAL_SEGMENTATION
1057 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1058#else
1059 write_segment_id(w, seg, segp, mbmi->segment_id);
1060#endif
1061 }
1062 }
1063}
1064
Angie Chiangc31ea682017-04-13 16:20:54 -07001065static void pack_inter_mode_mvs(AV1_COMP *cpi, const int mi_row,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001066 const int mi_col, aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001067 AV1_COMMON *const cm = &cpi->common;
Arild Fuldseth07441162016-08-15 15:07:52 +02001068 MACROBLOCK *const x = &cpi->td.mb;
1069 MACROBLOCKD *const xd = &x->e_mbd;
Thomas Davies24523292017-01-11 16:56:47 +00001070 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Angie Chiangc31ea682017-04-13 16:20:54 -07001071 const MODE_INFO *mi = xd->mi[0];
Thomas Davies24523292017-01-11 16:56:47 +00001072
Yaowu Xuc27fc142016-08-22 16:08:15 -07001073 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001074 struct segmentation_probs *const segp = &ec_ctx->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001075 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1076 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
1077 const PREDICTION_MODE mode = mbmi->mode;
1078 const int segment_id = mbmi->segment_id;
1079 const BLOCK_SIZE bsize = mbmi->sb_type;
1080 const int allow_hp = cm->allow_high_precision_mv;
1081 const int is_inter = is_inter_block(mbmi);
1082 const int is_compound = has_second_ref(mbmi);
1083 int skip, ref;
David Barker45390c12017-02-20 14:44:40 +00001084 (void)mi_row;
1085 (void)mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001086
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001087 write_inter_segment_id(cpi, w, seg, segp, mi_row, mi_col, 0, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001088
Zoe Liuf40a9572017-10-13 12:37:19 -07001089#if CONFIG_EXT_SKIP
1090 write_skip_mode(cm, xd, segment_id, mi, w);
1091
1092 if (mbmi->skip_mode) {
1093 skip = mbmi->skip;
1094 assert(skip);
1095 } else {
1096#endif // CONFIG_EXT_SKIP
1097 skip = write_skip(cm, xd, segment_id, mi, w);
1098#if CONFIG_EXT_SKIP
1099 }
1100#endif // CONFIG_EXT_SKIP
1101
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001102#if CONFIG_SPATIAL_SEGMENTATION
1103 write_inter_segment_id(cpi, w, seg, segp, mi_row, mi_col, skip, 0);
1104#endif
Zoe Liuf40a9572017-10-13 12:37:19 -07001105
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001106 write_cdef(cm, w, skip, mi_col, mi_row);
1107
Arild Fuldseth07441162016-08-15 15:07:52 +02001108 if (cm->delta_q_present_flag) {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001109 int super_block_upper_left =
1110 ((mi_row & (cm->seq_params.mib_size - 1)) == 0) &&
1111 ((mi_col & (cm->seq_params.mib_size - 1)) == 0);
1112 if ((bsize != cm->seq_params.sb_size || skip == 0) &&
1113 super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001114 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001115 int reduced_delta_qindex =
1116 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001117 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001118 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001119#if CONFIG_EXT_DELTA_Q
Cheng Chena97394f2017-09-27 15:05:14 -07001120#if CONFIG_LOOPFILTER_LEVEL
1121 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07001122 if (cm->delta_lf_multi) {
1123 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id) {
1124 int reduced_delta_lflevel =
1125 (mbmi->curr_delta_lf[lf_id] - xd->prev_delta_lf[lf_id]) /
1126 cm->delta_lf_res;
1127 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
1128 xd->prev_delta_lf[lf_id] = mbmi->curr_delta_lf[lf_id];
1129 }
1130 } else {
Cheng Chena97394f2017-09-27 15:05:14 -07001131 int reduced_delta_lflevel =
Cheng Chen880166a2017-10-02 17:48:48 -07001132 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
Cheng Chena97394f2017-09-27 15:05:14 -07001133 cm->delta_lf_res;
Cheng Chen880166a2017-10-02 17:48:48 -07001134 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
1135 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -07001136 }
1137 }
1138#else
Fangwen Fu231fe422017-04-24 17:52:29 -07001139 if (cm->delta_lf_present_flag) {
1140 int reduced_delta_lflevel =
1141 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
1142 cm->delta_lf_res;
1143 write_delta_lflevel(cm, xd, reduced_delta_lflevel, w);
1144 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
1145 }
Cheng Chena97394f2017-09-27 15:05:14 -07001146#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07001147#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001148 }
1149 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001150
Zoe Liuf40a9572017-10-13 12:37:19 -07001151#if CONFIG_EXT_SKIP
1152 if (!mbmi->skip_mode)
1153#endif // CONFIG_EXT_SKIP
1154 write_is_inter(cm, xd, mbmi->segment_id, w, is_inter);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001155
Debargha Mukherjee4def76a2017-10-19 13:38:35 -07001156 if (cm->tx_mode == TX_MODE_SELECT && block_signals_txsize(bsize) &&
Yaowu Xuc27fc142016-08-22 16:08:15 -07001157 !(is_inter && skip) && !xd->lossless[segment_id]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001158 if (is_inter) { // This implies skip flag is 0.
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001159 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, bsize, 0);
Jingning Hanf64062f2016-11-02 16:22:18 -07001160 const int bh = tx_size_high_unit[max_tx_size];
1161 const int bw = tx_size_wide_unit[max_tx_size];
Jingning Han9ca05b72017-01-03 14:41:36 -08001162 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1163 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001164 int idx, idy;
Jingning Hanfe45b212016-11-22 10:30:23 -08001165 for (idy = 0; idy < height; idy += bh)
1166 for (idx = 0; idx < width; idx += bw)
Debargha Mukherjeeedc73462017-10-31 15:13:32 -07001167 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, 0, idy, idx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001168 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001169 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001170 write_selected_tx_size(cm, xd, w);
1171 }
1172 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001173 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001174 }
1175
Zoe Liuf40a9572017-10-13 12:37:19 -07001176#if CONFIG_EXT_SKIP
Zoe Liu56644192017-12-19 13:16:18 -08001177 if (mbmi->skip_mode) return;
Zoe Liuf40a9572017-10-13 12:37:19 -07001178#endif // CONFIG_EXT_SKIP
1179
Yaowu Xuc27fc142016-08-22 16:08:15 -07001180 if (!is_inter) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001181 write_intra_mode(ec_ctx, bsize, mode, w);
Luc Trudeau866da792018-02-12 11:13:34 -05001182 const int use_angle_delta = av1_use_angle_delta(bsize);
1183
1184 if (use_angle_delta && av1_is_directional_mode(mode, bsize)) {
1185 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_Y],
1186 ec_ctx->angle_delta_cdf[mode - V_PRED]);
1187 }
1188
David Barkerc2a680e2018-02-07 15:53:53 +00001189#if CONFIG_MONO_VIDEO
1190 if (!cm->seq_params.monochrome &&
1191 is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
1192 xd->plane[1].subsampling_y))
1193#else
Jingning Hand3a64432017-04-06 17:04:17 -07001194 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
David Barkerc2a680e2018-02-07 15:53:53 +00001195 xd->plane[1].subsampling_y))
1196#endif // CONFIG_MONO_VIDEO
1197 {
Luc Trudeau866da792018-02-12 11:13:34 -05001198 const UV_PREDICTION_MODE uv_mode = mbmi->uv_mode;
David Michael Barrcb3a8ef2018-01-06 15:48:49 +09001199#if !CONFIG_CFL
Luc Trudeau866da792018-02-12 11:13:34 -05001200 write_intra_uv_mode(ec_ctx, uv_mode, mode, w);
David Michael Barrcb3a8ef2018-01-06 15:48:49 +09001201#else
Luc Trudeau866da792018-02-12 11:13:34 -05001202 write_intra_uv_mode(ec_ctx, uv_mode, mode, is_cfl_allowed(mbmi), w);
1203 if (uv_mode == UV_CFL_PRED)
David Michael Barr23198662017-06-19 23:19:48 +09001204 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001205#endif
Luc Trudeau866da792018-02-12 11:13:34 -05001206 if (use_angle_delta &&
1207 av1_is_directional_mode(get_uv_mode(uv_mode), bsize)) {
1208 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_UV],
1209 ec_ctx->angle_delta_cdf[uv_mode - V_PRED]);
1210 }
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001211 }
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001212
Hui Sue87fb232017-10-05 15:00:15 -07001213 if (av1_allow_palette(cm->allow_screen_content_tools, bsize))
Hui Su8b618f62017-12-20 12:03:35 -08001214 write_palette_mode_info(cm, xd, mi, mi_row, mi_col, w);
hui su5db97432016-10-14 16:10:14 -07001215#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -08001216 write_filter_intra_mode_info(xd, mbmi, w);
hui su5db97432016-10-14 16:10:14 -07001217#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001218 } else {
Yaowu Xub0d0d002016-11-22 09:26:43 -08001219 int16_t mode_ctx;
Zoe Liufa8bad12018-01-23 14:32:31 -08001220
1221 av1_collect_neighbors_ref_counts(xd);
1222
Yaowu Xuc27fc142016-08-22 16:08:15 -07001223 write_ref_frames(cm, xd, w);
1224
Jingning Han7ae50fd2018-02-05 16:33:40 -08001225#if CONFIG_OPT_REF_MV
1226 mode_ctx =
1227 av1_mode_context_analyzer(mbmi_ext->mode_context, mbmi->ref_frame);
1228#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001229 if (is_compound)
1230 mode_ctx = mbmi_ext->compound_mode_context[mbmi->ref_frame[0]];
1231 else
Luc Trudeau15a18e32017-12-13 14:15:25 -05001232 mode_ctx =
1233 av1_mode_context_analyzer(mbmi_ext->mode_context, mbmi->ref_frame);
Jingning Han7ae50fd2018-02-05 16:33:40 -08001234#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001235
1236 // If segment skip is not enabled code the mode.
1237 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001238 if (is_inter_compound_mode(mode))
1239 write_inter_compound_mode(cm, xd, w, mode, mode_ctx);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001240 else if (is_inter_singleref_mode(mode))
1241 write_inter_mode(w, mode, ec_ctx, mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001242
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001243 if (mode == NEWMV || mode == NEW_NEWMV || have_nearmv_in_inter_mode(mode))
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001244 write_drl_idx(ec_ctx, mbmi, mbmi_ext, w);
1245 else
1246 assert(mbmi->ref_mv_idx == 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001247 }
1248
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001249 if (mode == NEWMV || mode == NEW_NEWMV) {
1250 int_mv ref_mv;
1251 for (ref = 0; ref < 1 + is_compound; ++ref) {
1252 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1253 int nmv_ctx =
1254 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1255 mbmi_ext->ref_mv_stack[rf_type], ref, mbmi->ref_mv_idx);
1256 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1257 ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0];
1258 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv, nmvc,
1259 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001260 }
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001261 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
1262 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
Imdad Sardharwallac23ad632017-11-28 14:12:38 +00001263 int nmv_ctx = av1_nmv_ctx(
1264 mbmi_ext->ref_mv_count[rf_type], mbmi_ext->ref_mv_stack[rf_type], 1,
1265 mbmi->ref_mv_idx + (mode == NEAR_NEWMV ? 1 : 0));
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001266 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1267 av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv,
1268 &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv, nmvc,
1269 allow_hp);
1270 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
1271 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
Imdad Sardharwallac23ad632017-11-28 14:12:38 +00001272 int nmv_ctx = av1_nmv_ctx(
1273 mbmi_ext->ref_mv_count[rf_type], mbmi_ext->ref_mv_stack[rf_type], 0,
1274 mbmi->ref_mv_idx + (mode == NEW_NEARMV ? 1 : 0));
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001275 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1276 av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv,
1277 &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv, nmvc,
1278 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001279 }
1280
Yaowu Xuc27fc142016-08-22 16:08:15 -07001281 if (cpi->common.reference_mode != COMPOUND_REFERENCE &&
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001282 cpi->common.allow_interintra_compound && is_interintra_allowed(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001283 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
1284 const int bsize_group = size_group_lookup[bsize];
Thomas Daviescff91712017-07-07 11:49:55 +01001285 aom_write_symbol(w, interintra, ec_ctx->interintra_cdf[bsize_group], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001286 if (interintra) {
Thomas Davies299ff042017-06-27 13:41:59 +01001287 aom_write_symbol(w, mbmi->interintra_mode,
1288 ec_ctx->interintra_mode_cdf[bsize_group],
1289 INTERINTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001290 if (is_interintra_wedge_used(bsize)) {
Thomas Daviescff91712017-07-07 11:49:55 +01001291 aom_write_symbol(w, mbmi->use_wedge_interintra,
1292 ec_ctx->wedge_interintra_cdf[bsize], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001293 if (mbmi->use_wedge_interintra) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001294 aom_write_literal(w, mbmi->interintra_wedge_index,
1295 get_wedge_bits_lookup(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001296 assert(mbmi->interintra_wedge_sign == 0);
1297 }
1298 }
1299 }
1300 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001301
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001302 if (mbmi->ref_frame[1] != INTRA_FRAME) write_motion_mode(cm, xd, mi, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001303
Cheng Chen33a13d92017-11-28 16:49:59 -08001304#if CONFIG_JNT_COMP
1305 // First write idx to indicate current compound inter prediction mode group
1306 // Group A (0): jnt_comp, compound_average
1307 // Group B (1): interintra, compound_segment, wedge
1308 if (has_second_ref(mbmi)) {
Zoe Liu5f11e912017-12-05 23:23:56 -08001309 const int masked_compound_used =
1310 is_any_masked_compound_used(bsize) && cm->allow_masked_compound;
Cheng Chen5a881722017-11-30 17:05:10 -08001311
Zoe Liu5f11e912017-12-05 23:23:56 -08001312 if (masked_compound_used) {
Cheng Chen5a881722017-11-30 17:05:10 -08001313 const int ctx_comp_group_idx = get_comp_group_idx_context(xd);
1314 aom_write_symbol(w, mbmi->comp_group_idx,
1315 ec_ctx->comp_group_idx_cdf[ctx_comp_group_idx], 2);
Zoe Liu5f11e912017-12-05 23:23:56 -08001316 } else {
1317 assert(mbmi->comp_group_idx == 0);
Cheng Chen5a881722017-11-30 17:05:10 -08001318 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001319
1320 if (mbmi->comp_group_idx == 0) {
1321 if (mbmi->compound_idx)
1322 assert(mbmi->interinter_compound_type == COMPOUND_AVERAGE);
1323
1324 const int comp_index_ctx = get_comp_index_context(cm, xd);
1325 aom_write_symbol(w, mbmi->compound_idx,
1326 ec_ctx->compound_index_cdf[comp_index_ctx], 2);
1327 } else {
Zoe Liu5f11e912017-12-05 23:23:56 -08001328 assert(cpi->common.reference_mode != SINGLE_REFERENCE &&
1329 is_inter_compound_mode(mbmi->mode) &&
1330 mbmi->motion_mode == SIMPLE_TRANSLATION);
1331 assert(masked_compound_used);
1332 // compound_segment, wedge
Cheng Chen33a13d92017-11-28 16:49:59 -08001333 assert(mbmi->interinter_compound_type == COMPOUND_WEDGE ||
1334 mbmi->interinter_compound_type == COMPOUND_SEG);
Cheng Chen33a13d92017-11-28 16:49:59 -08001335
Zoe Liu5f11e912017-12-05 23:23:56 -08001336 if (is_interinter_compound_used(COMPOUND_WEDGE, bsize))
1337 aom_write_symbol(w, mbmi->interinter_compound_type - 1,
1338 ec_ctx->compound_type_cdf[bsize],
1339 COMPOUND_TYPES - 1);
1340
1341 if (mbmi->interinter_compound_type == COMPOUND_WEDGE) {
1342 assert(is_interinter_compound_used(COMPOUND_WEDGE, bsize));
1343 aom_write_literal(w, mbmi->wedge_index, get_wedge_bits_lookup(bsize));
1344 aom_write_bit(w, mbmi->wedge_sign);
1345 } else {
1346 assert(mbmi->interinter_compound_type == COMPOUND_SEG);
1347 aom_write_literal(w, mbmi->mask_type, MAX_SEG_MASK_BITS);
Cheng Chen33a13d92017-11-28 16:49:59 -08001348 }
1349 }
1350 }
1351#else // CONFIG_JNT_COMP
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001352 if (cpi->common.reference_mode != SINGLE_REFERENCE &&
Zoe Liu85b66462017-04-20 14:28:19 -07001353 is_inter_compound_mode(mbmi->mode) &&
Zoe Liu85b66462017-04-20 14:28:19 -07001354 mbmi->motion_mode == SIMPLE_TRANSLATION &&
Zoe Liu85b66462017-04-20 14:28:19 -07001355 is_any_masked_compound_used(bsize)) {
Cheng Chen33a13d92017-11-28 16:49:59 -08001356 if (cm->allow_masked_compound) {
Sarah Parker680b9b12017-08-16 18:55:34 -07001357 if (!is_interinter_compound_used(COMPOUND_WEDGE, bsize))
1358 aom_write_bit(w, mbmi->interinter_compound_type == COMPOUND_AVERAGE);
1359 else
Cheng Chenbdd6ca82017-10-23 22:34:25 -07001360 aom_write_symbol(w, mbmi->interinter_compound_type,
1361 ec_ctx->compound_type_cdf[bsize], COMPOUND_TYPES);
Sarah Parker680b9b12017-08-16 18:55:34 -07001362 if (is_interinter_compound_used(COMPOUND_WEDGE, bsize) &&
1363 mbmi->interinter_compound_type == COMPOUND_WEDGE) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001364 aom_write_literal(w, mbmi->wedge_index, get_wedge_bits_lookup(bsize));
1365 aom_write_bit(w, mbmi->wedge_sign);
1366 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001367 if (mbmi->interinter_compound_type == COMPOUND_SEG) {
Cheng Chenbdd6ca82017-10-23 22:34:25 -07001368 aom_write_literal(w, mbmi->mask_type, MAX_SEG_MASK_BITS);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001369 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001370 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001371 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001372#endif // CONFIG_JNT_COMP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001373
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07001374 write_mb_interp_filter(cpi, xd, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001375 }
1376
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001377#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001378 av1_write_tx_type(cm, xd, w);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001379#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07001380}
1381
Hui Suc2232cf2017-10-11 17:32:56 -07001382#if CONFIG_INTRABC
1383static void write_intrabc_info(AV1_COMMON *cm, MACROBLOCKD *xd,
1384 const MB_MODE_INFO_EXT *mbmi_ext,
1385 int enable_tx_size, aom_writer *w) {
1386 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1387 int use_intrabc = is_intrabc_block(mbmi);
1388 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1389 aom_write_symbol(w, use_intrabc, ec_ctx->intrabc_cdf, 2);
1390 if (use_intrabc) {
1391 assert(mbmi->mode == DC_PRED);
1392 assert(mbmi->uv_mode == UV_DC_PRED);
Hui Su12546aa2017-10-13 16:10:01 -07001393 if ((enable_tx_size && !mbmi->skip)) {
Hui Su12546aa2017-10-13 16:10:01 -07001394 const BLOCK_SIZE bsize = mbmi->sb_type;
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001395 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, bsize, 0);
Hui Su12546aa2017-10-13 16:10:01 -07001396 const int bh = tx_size_high_unit[max_tx_size];
1397 const int bw = tx_size_wide_unit[max_tx_size];
1398 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1399 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Hui Su12546aa2017-10-13 16:10:01 -07001400 int idx, idy;
1401 for (idy = 0; idy < height; idy += bh) {
1402 for (idx = 0; idx < width; idx += bw) {
Debargha Mukherjeeedc73462017-10-31 15:13:32 -07001403 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, 0, idy, idx, w);
Hui Su12546aa2017-10-13 16:10:01 -07001404 }
1405 }
Hui Su12546aa2017-10-13 16:10:01 -07001406 } else {
Hui Su12546aa2017-10-13 16:10:01 -07001407 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, mbmi->skip, xd);
Hui Su12546aa2017-10-13 16:10:01 -07001408 }
Hui Suc2232cf2017-10-11 17:32:56 -07001409 int_mv dv_ref = mbmi_ext->ref_mvs[INTRA_FRAME][0];
1410 av1_encode_dv(w, &mbmi->mv[0].as_mv, &dv_ref.as_mv, &ec_ctx->ndvc);
Sebastien Alaiwan3bac9922017-11-02 12:34:41 +01001411#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001412 av1_write_tx_type(cm, xd, w);
Sebastien Alaiwan3bac9922017-11-02 12:34:41 +01001413#endif // !CONFIG_TXK_SEL
Hui Suc2232cf2017-10-11 17:32:56 -07001414 }
1415}
1416#endif // CONFIG_INTRABC
1417
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001418static void write_mb_modes_kf(AV1_COMP *cpi, MACROBLOCKD *xd,
Alex Converse44c2bad2017-05-11 09:36:10 -07001419#if CONFIG_INTRABC
1420 const MB_MODE_INFO_EXT *mbmi_ext,
1421#endif // CONFIG_INTRABC
Jingning Han36fe3202017-02-20 22:31:49 -08001422 const int mi_row, const int mi_col,
Angie Chiangc31ea682017-04-13 16:20:54 -07001423 aom_writer *w) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001424 AV1_COMMON *const cm = &cpi->common;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001425 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001426 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001427 struct segmentation_probs *const segp = &ec_ctx->seg;
Angie Chiangc31ea682017-04-13 16:20:54 -07001428 const MODE_INFO *const mi = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001429 const MODE_INFO *const above_mi = xd->above_mi;
1430 const MODE_INFO *const left_mi = xd->left_mi;
1431 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1432 const BLOCK_SIZE bsize = mbmi->sb_type;
Luc Trudeau866da792018-02-12 11:13:34 -05001433 const PREDICTION_MODE mode = mbmi->mode;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001434
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001435#if CONFIG_SPATIAL_SEGMENTATION
1436 if (cm->preskip_segid && seg->update_map)
1437 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1438#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001439 if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001440#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001441
Alex Converse619576b2017-05-10 15:14:18 -07001442 const int skip = write_skip(cm, xd, mbmi->segment_id, mi, w);
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001443
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001444#if CONFIG_SPATIAL_SEGMENTATION
1445 if (!cm->preskip_segid && seg->update_map)
1446 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, skip);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +01001447#endif
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001448
1449 write_cdef(cm, w, skip, mi_col, mi_row);
1450
Arild Fuldseth07441162016-08-15 15:07:52 +02001451 if (cm->delta_q_present_flag) {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001452 int super_block_upper_left =
1453 ((mi_row & (cm->seq_params.mib_size - 1)) == 0) &&
1454 ((mi_col & (cm->seq_params.mib_size - 1)) == 0);
1455 if ((bsize != cm->seq_params.sb_size || skip == 0) &&
1456 super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001457 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001458 int reduced_delta_qindex =
1459 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001460 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001461 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001462#if CONFIG_EXT_DELTA_Q
Cheng Chena97394f2017-09-27 15:05:14 -07001463#if CONFIG_LOOPFILTER_LEVEL
1464 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07001465 if (cm->delta_lf_multi) {
1466 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id) {
1467 int reduced_delta_lflevel =
1468 (mbmi->curr_delta_lf[lf_id] - xd->prev_delta_lf[lf_id]) /
1469 cm->delta_lf_res;
1470 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
1471 xd->prev_delta_lf[lf_id] = mbmi->curr_delta_lf[lf_id];
1472 }
1473 } else {
Cheng Chena97394f2017-09-27 15:05:14 -07001474 int reduced_delta_lflevel =
Cheng Chen880166a2017-10-02 17:48:48 -07001475 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
Cheng Chena97394f2017-09-27 15:05:14 -07001476 cm->delta_lf_res;
Cheng Chen880166a2017-10-02 17:48:48 -07001477 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
1478 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -07001479 }
1480 }
1481#else
Fangwen Fu231fe422017-04-24 17:52:29 -07001482 if (cm->delta_lf_present_flag) {
1483 int reduced_delta_lflevel =
1484 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
1485 cm->delta_lf_res;
1486 write_delta_lflevel(cm, xd, reduced_delta_lflevel, w);
1487 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
1488 }
Cheng Chena97394f2017-09-27 15:05:14 -07001489#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07001490#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001491 }
1492 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001493
Alex Conversef71808c2017-06-06 12:21:17 -07001494 int enable_tx_size = cm->tx_mode == TX_MODE_SELECT &&
Rupert Swarbrickfcff0b22017-10-05 09:26:04 +01001495 block_signals_txsize(bsize) &&
Alex Conversef71808c2017-06-06 12:21:17 -07001496 !xd->lossless[mbmi->segment_id];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001497
Alex Converse28744302017-04-13 14:46:22 -07001498#if CONFIG_INTRABC
Hui Sueb2fd5c2017-12-15 14:38:01 -08001499 if (av1_allow_intrabc(cm)) {
Hui Suc2232cf2017-10-11 17:32:56 -07001500 write_intrabc_info(cm, xd, mbmi_ext, enable_tx_size, w);
1501 if (is_intrabc_block(mbmi)) return;
Alex Converse28744302017-04-13 14:46:22 -07001502 }
1503#endif // CONFIG_INTRABC
Hui Suc2232cf2017-10-11 17:32:56 -07001504
Alex Conversef71808c2017-06-06 12:21:17 -07001505 if (enable_tx_size) write_selected_tx_size(cm, xd, w);
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001506#if CONFIG_INTRABC
Hui Su12546aa2017-10-13 16:10:01 -07001507 if (cm->allow_screen_content_tools)
1508 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, mbmi->skip, xd);
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001509#endif // CONFIG_INTRABC
Alex Converse28744302017-04-13 14:46:22 -07001510
Luc Trudeau866da792018-02-12 11:13:34 -05001511 write_intra_mode_kf(ec_ctx, mi, above_mi, left_mi, mode, w);
1512
1513 const int use_angle_delta = av1_use_angle_delta(bsize);
1514 if (use_angle_delta && av1_is_directional_mode(mode, bsize)) {
1515 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_Y],
1516 ec_ctx->angle_delta_cdf[mode - V_PRED]);
1517 }
Jingning Han0b7cbe62017-03-08 10:22:47 -08001518
David Barkerc2a680e2018-02-07 15:53:53 +00001519#if CONFIG_MONO_VIDEO
1520 if (!cm->seq_params.monochrome &&
1521 is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
1522 xd->plane[1].subsampling_y))
1523#else
Jingning Hand3a64432017-04-06 17:04:17 -07001524 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
David Barkerc2a680e2018-02-07 15:53:53 +00001525 xd->plane[1].subsampling_y))
1526#endif // CONFIG_MONO_VIDEO
1527 {
Luc Trudeau866da792018-02-12 11:13:34 -05001528 const UV_PREDICTION_MODE uv_mode = mbmi->uv_mode;
David Michael Barrcb3a8ef2018-01-06 15:48:49 +09001529#if !CONFIG_CFL
Luc Trudeau866da792018-02-12 11:13:34 -05001530 write_intra_uv_mode(ec_ctx, uv_mode, mode, w);
David Michael Barrcb3a8ef2018-01-06 15:48:49 +09001531#else
Luc Trudeau866da792018-02-12 11:13:34 -05001532 write_intra_uv_mode(ec_ctx, uv_mode, mode, is_cfl_allowed(mbmi), w);
1533 if (uv_mode == UV_CFL_PRED)
David Michael Barr23198662017-06-19 23:19:48 +09001534 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Luc Trudeauf5334002017-04-25 12:21:26 -04001535#endif
Luc Trudeau866da792018-02-12 11:13:34 -05001536 if (use_angle_delta &&
1537 av1_is_directional_mode(get_uv_mode(uv_mode), bsize)) {
1538 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_UV],
1539 ec_ctx->angle_delta_cdf[uv_mode - V_PRED]);
1540 }
Luc Trudeau2c317902017-04-28 11:06:50 -04001541 }
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07001542
Hui Sue87fb232017-10-05 15:00:15 -07001543 if (av1_allow_palette(cm->allow_screen_content_tools, bsize))
Hui Su8b618f62017-12-20 12:03:35 -08001544 write_palette_mode_info(cm, xd, mi, mi_row, mi_col, w);
hui su5db97432016-10-14 16:10:14 -07001545#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -08001546 write_filter_intra_mode_info(xd, mbmi, w);
hui su5db97432016-10-14 16:10:14 -07001547#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001548
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001549#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001550 av1_write_tx_type(cm, xd, w);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001551#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07001552}
1553
Angie Chiangd4022822016-11-02 18:30:25 -07001554#if CONFIG_RD_DEBUG
1555static void dump_mode_info(MODE_INFO *mi) {
1556 printf("\nmi->mbmi.mi_row == %d\n", mi->mbmi.mi_row);
1557 printf("&& mi->mbmi.mi_col == %d\n", mi->mbmi.mi_col);
1558 printf("&& mi->mbmi.sb_type == %d\n", mi->mbmi.sb_type);
1559 printf("&& mi->mbmi.tx_size == %d\n", mi->mbmi.tx_size);
Jingning Han2fac8a42017-12-14 16:26:00 -08001560 printf("&& mi->mbmi.mode == %d\n", mi->mbmi.mode);
Angie Chiangd4022822016-11-02 18:30:25 -07001561}
Angie Chiangd02001d2016-11-06 15:31:49 -08001562static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats,
1563 int plane) {
1564 if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) {
1565 int r, c;
1566 printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n",
1567 plane, rd_stats->txb_coeff_cost[plane], token_stats->cost);
Angie Chiangd02001d2016-11-06 15:31:49 -08001568 printf("rd txb_coeff_cost_map\n");
1569 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1570 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1571 printf("%d ", rd_stats->txb_coeff_cost_map[plane][r][c]);
1572 }
1573 printf("\n");
1574 }
1575
1576 printf("pack txb_coeff_cost_map\n");
1577 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1578 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1579 printf("%d ", token_stats->txb_coeff_cost_map[r][c]);
1580 }
1581 printf("\n");
1582 }
Angie Chiangd02001d2016-11-06 15:31:49 -08001583 return 1;
1584 }
1585 return 0;
1586}
Angie Chiangd4022822016-11-02 18:30:25 -07001587#endif
1588
Di Chen56586622017-06-09 13:49:44 -07001589#if ENC_MISMATCH_DEBUG
1590static void enc_dump_logs(AV1_COMP *cpi, int mi_row, int mi_col) {
1591 AV1_COMMON *const cm = &cpi->common;
1592 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1593 MODE_INFO *m;
1594 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1595 m = xd->mi[0];
1596 if (is_inter_block(&m->mbmi)) {
Zoe Liuf40a9572017-10-13 12:37:19 -07001597#define FRAME_TO_CHECK 11
Zoe Liu17af2742017-10-06 10:36:42 -07001598 if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
Di Chen56586622017-06-09 13:49:44 -07001599 const MB_MODE_INFO *const mbmi = &m->mbmi;
1600 const BLOCK_SIZE bsize = mbmi->sb_type;
1601
1602 int_mv mv[2];
1603 int is_comp_ref = has_second_ref(&m->mbmi);
1604 int ref;
1605
1606 for (ref = 0; ref < 1 + is_comp_ref; ++ref)
1607 mv[ref].as_mv = m->mbmi.mv[ref].as_mv;
1608
1609 if (!is_comp_ref) {
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001610 mv[1].as_int = 0;
Di Chen56586622017-06-09 13:49:44 -07001611 }
Di Chen56586622017-06-09 13:49:44 -07001612
Di Chen56586622017-06-09 13:49:44 -07001613 MACROBLOCK *const x = &cpi->td.mb;
Di Chen56586622017-06-09 13:49:44 -07001614 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
Zoe Liuf40a9572017-10-13 12:37:19 -07001615 const int16_t mode_ctx =
1616 is_comp_ref ? mbmi_ext->compound_mode_context[mbmi->ref_frame[0]]
1617 : av1_mode_context_analyzer(mbmi_ext->mode_context,
Luc Trudeau15a18e32017-12-13 14:15:25 -05001618 mbmi->ref_frame);
Zoe Liuf40a9572017-10-13 12:37:19 -07001619
Di Chen56586622017-06-09 13:49:44 -07001620 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
1621 int16_t zeromv_ctx = -1;
1622 int16_t refmv_ctx = -1;
Zoe Liuf40a9572017-10-13 12:37:19 -07001623
Di Chen56586622017-06-09 13:49:44 -07001624 if (mbmi->mode != NEWMV) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -07001625 zeromv_ctx = (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Jingning Han59b12632018-02-12 10:44:52 -08001626 if (mbmi->mode != GLOBALMV)
Di Chen56586622017-06-09 13:49:44 -07001627 refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Di Chen56586622017-06-09 13:49:44 -07001628 }
1629
Zoe Liuf40a9572017-10-13 12:37:19 -07001630#if CONFIG_EXT_SKIP
1631 printf(
1632 "=== ENCODER ===: "
1633 "Frame=%d, (mi_row,mi_col)=(%d,%d), skip_mode=%d, mode=%d, bsize=%d, "
1634 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
1635 "ref[1]=%d, motion_mode=%d, mode_ctx=%d, "
1636 "newmv_ctx=%d, zeromv_ctx=%d, refmv_ctx=%d, tx_size=%d\n",
1637 cm->current_video_frame, mi_row, mi_col, mbmi->skip_mode, mbmi->mode,
1638 bsize, cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col,
1639 mv[1].as_mv.row, mv[1].as_mv.col, mbmi->ref_frame[0],
1640 mbmi->ref_frame[1], mbmi->motion_mode, mode_ctx, newmv_ctx,
1641 zeromv_ctx, refmv_ctx, mbmi->tx_size);
1642#else
Di Chen56586622017-06-09 13:49:44 -07001643 printf(
1644 "=== ENCODER ===: "
1645 "Frame=%d, (mi_row,mi_col)=(%d,%d), mode=%d, bsize=%d, "
1646 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
Zoe Liuf40a9572017-10-13 12:37:19 -07001647 "ref[1]=%d, motion_mode=%d, mode_ctx=%d, "
1648 "newmv_ctx=%d, zeromv_ctx=%d, refmv_ctx=%d, tx_size=%d\n",
Di Chen56586622017-06-09 13:49:44 -07001649 cm->current_video_frame, mi_row, mi_col, mbmi->mode, bsize,
1650 cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col, mv[1].as_mv.row,
1651 mv[1].as_mv.col, mbmi->ref_frame[0], mbmi->ref_frame[1],
Zoe Liuf40a9572017-10-13 12:37:19 -07001652 mbmi->motion_mode, mode_ctx, newmv_ctx, zeromv_ctx, refmv_ctx,
1653 mbmi->tx_size);
1654#endif // CONFIG_EXT_SKIP
Di Chen56586622017-06-09 13:49:44 -07001655 }
1656 }
1657}
1658#endif // ENC_MISMATCH_DEBUG
1659
Yue Chen64550b62017-01-12 12:18:22 -08001660static void write_mbmi_b(AV1_COMP *cpi, const TileInfo *const tile,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001661 aom_writer *w, int mi_row, int mi_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001662 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001663 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1664 MODE_INFO *m;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001665 int bh, bw;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001666 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1667 m = xd->mi[0];
1668
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001669 assert(m->mbmi.sb_type <= cm->seq_params.sb_size ||
Rupert Swarbrick72678572017-08-02 12:05:26 +01001670 (m->mbmi.sb_type >= BLOCK_SIZES && m->mbmi.sb_type < BLOCK_SIZES_ALL));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001671
Jingning Hanc709e1f2016-12-06 14:48:09 -08001672 bh = mi_size_high[m->mbmi.sb_type];
1673 bw = mi_size_wide[m->mbmi.sb_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001674
1675 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1676
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001677 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001678#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001679 cm->dependent_horz_tiles,
1680#endif // CONFIG_DEPENDENT_HORZTILES
1681 cm->mi_rows, cm->mi_cols);
Yushin Cho77bba8d2016-11-04 16:36:56 -07001682
Yaowu Xuc27fc142016-08-22 16:08:15 -07001683 if (frame_is_intra_only(cm)) {
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001684#if CONFIG_INTRABC
Hui Su12546aa2017-10-13 16:10:01 -07001685 if (cm->allow_screen_content_tools) {
1686 xd->above_txfm_context =
1687 cm->above_txfm_context + (mi_col << TX_UNIT_WIDE_LOG2);
1688 xd->left_txfm_context = xd->left_txfm_context_buffer +
1689 ((mi_row & MAX_MIB_MASK) << TX_UNIT_HIGH_LOG2);
1690 }
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001691#endif // CONFIG_INTRABC
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001692 write_mb_modes_kf(cpi, xd,
Alex Converse44c2bad2017-05-11 09:36:10 -07001693#if CONFIG_INTRABC
1694 cpi->td.mb.mbmi_ext,
1695#endif // CONFIG_INTRABC
1696 mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001697 } else {
Jingning Han331662e2017-05-30 17:03:32 -07001698 xd->above_txfm_context =
1699 cm->above_txfm_context + (mi_col << TX_UNIT_WIDE_LOG2);
1700 xd->left_txfm_context = xd->left_txfm_context_buffer +
1701 ((mi_row & MAX_MIB_MASK) << TX_UNIT_HIGH_LOG2);
Angie Chiang38edf682017-02-21 15:13:09 -08001702 // has_subpel_mv_component needs the ref frame buffers set up to look
1703 // up if they are scaled. has_subpel_mv_component is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07001704 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
1705 set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
Zoe Liu85b66462017-04-20 14:28:19 -07001706
Di Chen56586622017-06-09 13:49:44 -07001707#if ENC_MISMATCH_DEBUG
Di Chen56586622017-06-09 13:49:44 -07001708 enc_dump_logs(cpi, mi_row, mi_col);
1709#endif // ENC_MISMATCH_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001710
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001711 pack_inter_mode_mvs(cpi, mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001712 }
Yue Chen64550b62017-01-12 12:18:22 -08001713}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001714
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001715static void write_inter_txb_coeff(AV1_COMMON *const cm, MACROBLOCK *const x,
1716 MB_MODE_INFO *const mbmi, aom_writer *w,
1717 const TOKENEXTRA **tok,
1718 const TOKENEXTRA *const tok_end,
1719 TOKEN_STATS *token_stats, const int row,
1720 const int col, int *block, const int plane) {
1721 MACROBLOCKD *const xd = &x->e_mbd;
1722 const struct macroblockd_plane *const pd = &xd->plane[plane];
Debargha Mukherjee19619882017-11-22 13:13:14 -08001723 const BLOCK_SIZE bsize = mbmi->sb_type;
1724 const BLOCK_SIZE bsizec =
1725 scale_chroma_bsize(bsize, pd->subsampling_x, pd->subsampling_y);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001726
Debargha Mukherjee5d149e12017-12-14 12:49:51 -08001727 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsizec, pd);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001728
Debargha Mukherjee19619882017-11-22 13:13:14 -08001729 TX_SIZE max_tx_size = get_vartx_max_txsize(
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001730 xd, plane_bsize, pd->subsampling_x || pd->subsampling_y);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001731 const int step =
1732 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
1733 const int bkw = tx_size_wide_unit[max_tx_size];
1734 const int bkh = tx_size_high_unit[max_tx_size];
1735
1736 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
1737 int mu_blocks_wide = block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
1738 int mu_blocks_high = block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
1739
1740 int blk_row, blk_col;
1741
1742 const int num_4x4_w = block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
1743 const int num_4x4_h = block_size_high[plane_bsize] >> tx_size_wide_log2[0];
1744
Jingning Hancdbc47f2018-01-12 16:21:07 -08001745 const int unit_height =
1746 AOMMIN(mu_blocks_high + (row >> pd->subsampling_y), num_4x4_h);
1747 const int unit_width =
1748 AOMMIN(mu_blocks_wide + (col >> pd->subsampling_x), num_4x4_w);
1749 for (blk_row = row >> pd->subsampling_y; blk_row < unit_height;
1750 blk_row += bkh) {
1751 for (blk_col = col >> pd->subsampling_x; blk_col < unit_width;
1752 blk_col += bkw) {
Sebastien Alaiwancad5ebc2018-02-20 16:18:20 +01001753 pack_txb_tokens(w, cm, x, tok, tok_end, xd, mbmi, plane, plane_bsize,
1754 cm->bit_depth, *block, blk_row, blk_col, max_tx_size,
1755 token_stats);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001756 *block += step;
1757 }
1758 }
1759}
1760
Yue Chen64550b62017-01-12 12:18:22 -08001761static void write_tokens_b(AV1_COMP *cpi, const TileInfo *const tile,
1762 aom_writer *w, const TOKENEXTRA **tok,
1763 const TOKENEXTRA *const tok_end, int mi_row,
1764 int mi_col) {
1765 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001766 const int num_planes = av1_num_planes(cm);
Yue Chen64550b62017-01-12 12:18:22 -08001767 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Wei-Ting Lin1d46d902017-06-26 15:57:18 -07001768 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1769 MODE_INFO *const m = *(cm->mi_grid_visible + mi_offset);
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001770 MB_MODE_INFO *const mbmi = &m->mbmi;
Yue Chen64550b62017-01-12 12:18:22 -08001771 int plane;
1772 int bh, bw;
Yushin Cho258a0242017-03-06 13:53:01 -08001773 MACROBLOCK *const x = &cpi->td.mb;
Yue Chen64550b62017-01-12 12:18:22 -08001774 (void)tok;
1775 (void)tok_end;
Wei-Ting Lin1d46d902017-06-26 15:57:18 -07001776 xd->mi = cm->mi_grid_visible + mi_offset;
Yue Chen64550b62017-01-12 12:18:22 -08001777
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001778 assert(mbmi->sb_type <= cm->seq_params.sb_size ||
Rupert Swarbrick72678572017-08-02 12:05:26 +01001779 (mbmi->sb_type >= BLOCK_SIZES && mbmi->sb_type < BLOCK_SIZES_ALL));
Yue Chen64550b62017-01-12 12:18:22 -08001780
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001781 bh = mi_size_high[mbmi->sb_type];
1782 bw = mi_size_wide[mbmi->sb_type];
Yue Chen64550b62017-01-12 12:18:22 -08001783 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1784
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001785 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001786#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001787 cm->dependent_horz_tiles,
1788#endif // CONFIG_DEPENDENT_HORZTILES
1789 cm->mi_rows, cm->mi_cols);
Yue Chen64550b62017-01-12 12:18:22 -08001790
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001791 for (plane = 0; plane < AOMMIN(2, num_planes); ++plane) {
Fangwen Fub3be9262017-03-06 15:34:28 -08001792 const uint8_t palette_size_plane =
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001793 mbmi->palette_mode_info.palette_size[plane];
Zoe Liuf40a9572017-10-13 12:37:19 -07001794#if CONFIG_EXT_SKIP
1795 assert(!mbmi->skip_mode || !palette_size_plane);
1796#endif // CONFIG_EXT_SKIP
Fangwen Fub3be9262017-03-06 15:34:28 -08001797 if (palette_size_plane > 0) {
Alex Converseed37d012017-04-24 11:15:24 -07001798#if CONFIG_INTRABC
1799 assert(mbmi->use_intrabc == 0);
1800#endif
Hui Su8b618f62017-12-20 12:03:35 -08001801 assert(av1_allow_palette(cm->allow_screen_content_tools, mbmi->sb_type));
Fangwen Fub3be9262017-03-06 15:34:28 -08001802 int rows, cols;
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001803 av1_get_block_dimensions(mbmi->sb_type, plane, xd, NULL, NULL, &rows,
Fangwen Fub3be9262017-03-06 15:34:28 -08001804 &cols);
1805 assert(*tok < tok_end);
Sarah Parker99e7daa2017-08-29 10:30:13 -07001806 pack_map_tokens(w, tok, palette_size_plane, rows * cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001807 }
Fangwen Fub3be9262017-03-06 15:34:28 -08001808 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001809
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001810 if (!mbmi->skip) {
Jingning Hanad54a982018-01-12 14:40:29 -08001811 if (!is_inter_block(mbmi))
1812 av1_write_coeffs_mb(cm, x, mi_row, mi_col, w, mbmi->sb_type);
1813
Jingning Hancdbc47f2018-01-12 16:21:07 -08001814 if (is_inter_block(mbmi)) {
1815 int block[MAX_MB_PLANE] = { 0 };
1816 const struct macroblockd_plane *const y_pd = &xd->plane[0];
1817 const BLOCK_SIZE plane_bsize = get_plane_block_size(mbmi->sb_type, y_pd);
Jingning Han42a0fb32016-10-31 10:43:31 -07001818 const int num_4x4_w =
1819 block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
1820 const int num_4x4_h =
1821 block_size_high[plane_bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001822 int row, col;
Angie Chiangd4022822016-11-02 18:30:25 -07001823 TOKEN_STATS token_stats;
Angie Chiangd02001d2016-11-06 15:31:49 -08001824 init_token_stats(&token_stats);
Angie Chiangd4022822016-11-02 18:30:25 -07001825
Jingning Hancdbc47f2018-01-12 16:21:07 -08001826 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, y_pd);
Jingning Hanc2b797f2017-07-19 09:37:11 -07001827 int mu_blocks_wide =
1828 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
1829 int mu_blocks_high =
1830 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
1831
1832 mu_blocks_wide = AOMMIN(num_4x4_w, mu_blocks_wide);
1833 mu_blocks_high = AOMMIN(num_4x4_h, mu_blocks_high);
1834
Jingning Hancdbc47f2018-01-12 16:21:07 -08001835 for (row = 0; row < num_4x4_h; row += mu_blocks_high) {
1836 for (col = 0; col < num_4x4_w; col += mu_blocks_wide) {
1837 for (plane = 0; plane < num_planes && is_inter_block(mbmi); ++plane) {
1838 const struct macroblockd_plane *const pd = &xd->plane[plane];
1839 if (!is_chroma_reference(mi_row, mi_col, mbmi->sb_type,
1840 pd->subsampling_x, pd->subsampling_y)) {
Jingning Hancdbc47f2018-01-12 16:21:07 -08001841 continue;
1842 }
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001843 write_inter_txb_coeff(cm, x, mbmi, w, tok, tok_end, &token_stats,
Jingning Hancdbc47f2018-01-12 16:21:07 -08001844 row, col, &block[plane], plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001845 }
1846 }
Angie Chiangd02001d2016-11-06 15:31:49 -08001847#if CONFIG_RD_DEBUG
Angie Chiang3963d632016-11-10 18:41:40 -08001848 if (mbmi->sb_type >= BLOCK_8X8 &&
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001849 rd_token_stats_mismatch(&mbmi->rd_stats, &token_stats, plane)) {
Angie Chiangd02001d2016-11-06 15:31:49 -08001850 dump_mode_info(m);
1851 assert(0);
1852 }
Jingning Hanfe45b212016-11-22 10:30:23 -08001853#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001854 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001855 }
1856 }
1857}
1858
Yue Chen64550b62017-01-12 12:18:22 -08001859static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
1860 aom_writer *w, const TOKENEXTRA **tok,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001861 const TOKENEXTRA *const tok_end, int mi_row,
1862 int mi_col) {
1863 write_mbmi_b(cpi, tile, w, mi_row, mi_col);
Jingning Hanf5a4d3b2017-08-27 23:01:19 -07001864
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001865 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
Yue Chen64550b62017-01-12 12:18:22 -08001866}
1867
Yaowu Xuf883b422016-08-30 14:01:10 -07001868static void write_partition(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001869 const MACROBLOCKD *const xd, int hbs, int mi_row,
1870 int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize,
Yaowu Xuf883b422016-08-30 14:01:10 -07001871 aom_writer *w) {
Alex Converse55c6bde2017-01-12 15:55:31 -08001872 const int is_partition_point = bsize >= BLOCK_8X8;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00001873
Jingning Hanbf9c6b72016-12-14 14:50:45 -08001874 if (!is_partition_point) return;
1875
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001876 const int has_rows = (mi_row + hbs) < cm->mi_rows;
1877 const int has_cols = (mi_col + hbs) < cm->mi_cols;
1878 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
1879 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1880
1881 if (!has_rows && !has_cols) {
1882 assert(p == PARTITION_SPLIT);
1883 return;
1884 }
1885
Yaowu Xuc27fc142016-08-22 16:08:15 -07001886 if (has_rows && has_cols) {
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001887 aom_write_symbol(w, p, ec_ctx->partition_cdf[ctx],
1888 partition_cdf_length(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001889 } else if (!has_rows && has_cols) {
1890 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001891 assert(bsize > BLOCK_8X8);
1892 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001893 partition_gather_vert_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001894 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001895 } else {
1896 assert(has_rows && !has_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001897 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001898 assert(bsize > BLOCK_8X8);
1899 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001900 partition_gather_horz_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001901 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001902 }
1903}
1904
Yaowu Xuf883b422016-08-30 14:01:10 -07001905static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile,
1906 aom_writer *const w, const TOKENEXTRA **tok,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001907 const TOKENEXTRA *const tok_end, int mi_row,
1908 int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001909 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001910 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Jingning Hanc709e1f2016-12-06 14:48:09 -08001911 const int hbs = mi_size_wide[bsize] / 2;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001912#if CONFIG_EXT_PARTITION_TYPES
1913 const int quarter_step = mi_size_wide[bsize] / 4;
1914 int i;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001915#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07001916 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
1917 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
Jingning Han52261842016-12-14 12:17:49 -08001918
Yaowu Xuc27fc142016-08-22 16:08:15 -07001919 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
1920
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001921#if CONFIG_LOOP_RESTORATION
Debargha Mukherjeea78c8f52018-01-31 11:14:38 -08001922 const int num_planes = av1_num_planes(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001923 for (int plane = 0; plane < num_planes; ++plane) {
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001924 int rcol0, rcol1, rrow0, rrow1, tile_tl_idx;
1925 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
1926 &rcol0, &rcol1, &rrow0, &rrow1,
1927 &tile_tl_idx)) {
1928 const int rstride = cm->rst_info[plane].horz_units_per_tile;
1929 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
1930 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
1931 const int rtile_idx = tile_tl_idx + rcol + rrow * rstride;
1932 const RestorationUnitInfo *rui =
1933 &cm->rst_info[plane].unit_info[rtile_idx];
1934 loop_restoration_write_sb_coeffs(cm, xd, rui, w, plane);
1935 }
1936 }
1937 }
1938 }
1939#endif
1940
Yaowu Xuc27fc142016-08-22 16:08:15 -07001941 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001942 switch (partition) {
1943 case PARTITION_NONE:
1944 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1945 break;
1946 case PARTITION_HORZ:
1947 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1948 if (mi_row + hbs < cm->mi_rows)
1949 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1950 break;
1951 case PARTITION_VERT:
1952 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1953 if (mi_col + hbs < cm->mi_cols)
1954 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1955 break;
1956 case PARTITION_SPLIT:
1957 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
1958 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs, subsize);
1959 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col, subsize);
1960 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs,
1961 subsize);
1962 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001963#if CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001964 case PARTITION_HORZ_A:
1965 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1966 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1967 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1968 break;
1969 case PARTITION_HORZ_B:
1970 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1971 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1972 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
1973 break;
1974 case PARTITION_VERT_A:
1975 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1976 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1977 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1978 break;
1979 case PARTITION_VERT_B:
1980 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1981 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1982 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
1983 break;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001984 case PARTITION_HORZ_4:
1985 for (i = 0; i < 4; ++i) {
1986 int this_mi_row = mi_row + i * quarter_step;
1987 if (i > 0 && this_mi_row >= cm->mi_rows) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001988
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001989 write_modes_b(cpi, tile, w, tok, tok_end, this_mi_row, mi_col);
1990 }
1991 break;
1992 case PARTITION_VERT_4:
1993 for (i = 0; i < 4; ++i) {
1994 int this_mi_col = mi_col + i * quarter_step;
1995 if (i > 0 && this_mi_col >= cm->mi_cols) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001996
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001997 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, this_mi_col);
1998 }
1999 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002000#endif // CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002001 default: assert(0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002002 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002003
2004// update partition context
2005#if CONFIG_EXT_PARTITION_TYPES
2006 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
2007#else
2008 if (bsize >= BLOCK_8X8 &&
2009 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
2010 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01002011#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002012}
2013
Yaowu Xuf883b422016-08-30 14:01:10 -07002014static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile,
2015 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002016 const TOKENEXTRA *const tok_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002017 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002018 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
2019 const int mi_row_start = tile->mi_row_start;
2020 const int mi_row_end = tile->mi_row_end;
2021 const int mi_col_start = tile->mi_col_start;
2022 const int mi_col_end = tile->mi_col_end;
2023 int mi_row, mi_col;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002024
2025#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08002026 if (!cm->dependent_horz_tiles || mi_row_start == 0 ||
2027 tile->tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002028 av1_zero_above_context(cm, mi_col_start, mi_col_end);
2029 }
2030#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002031 av1_zero_above_context(cm, mi_col_start, mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002032#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02002033 if (cpi->common.delta_q_present_flag) {
2034 xd->prev_qindex = cpi->common.base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07002035#if CONFIG_EXT_DELTA_Q
2036 if (cpi->common.delta_lf_present_flag) {
Cheng Chena97394f2017-09-27 15:05:14 -07002037#if CONFIG_LOOPFILTER_LEVEL
2038 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
2039 xd->prev_delta_lf[lf_id] = 0;
2040#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07002041 xd->prev_delta_lf_from_base = 0;
2042 }
2043#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02002044 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002045
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002046 for (mi_row = mi_row_start; mi_row < mi_row_end;
2047 mi_row += cm->seq_params.mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002048 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002049
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002050 for (mi_col = mi_col_start; mi_col < mi_col_end;
2051 mi_col += cm->seq_params.mib_size) {
2052 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col,
2053 cm->seq_params.sb_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002054 }
2055 }
2056}
2057
Yaowu Xuc27fc142016-08-22 16:08:15 -07002058#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002059static void encode_restoration_mode(AV1_COMMON *cm,
2060 struct aom_write_bit_buffer *wb) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002061 const int num_planes = av1_num_planes(cm);
Hui Su27df8342017-11-07 15:16:05 -08002062#if CONFIG_INTRABC
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00002063
Hui Su27df8342017-11-07 15:16:05 -08002064 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
2065#endif // CONFIG_INTRABC
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002066 int all_none = 1, chroma_none = 1;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002067 for (int p = 0; p < num_planes; ++p) {
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00002068 RestorationInfo *rsi = &cm->rst_info[p];
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002069 if (rsi->frame_restoration_type != RESTORE_NONE) {
2070 all_none = 0;
2071 chroma_none &= p == 0;
2072 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002073 switch (rsi->frame_restoration_type) {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07002074 case RESTORE_NONE:
2075 aom_wb_write_bit(wb, 0);
2076 aom_wb_write_bit(wb, 0);
2077 break;
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07002078 case RESTORE_WIENER:
2079 aom_wb_write_bit(wb, 1);
2080 aom_wb_write_bit(wb, 0);
2081 break;
2082 case RESTORE_SGRPROJ:
2083 aom_wb_write_bit(wb, 1);
2084 aom_wb_write_bit(wb, 1);
2085 break;
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07002086 case RESTORE_SWITCHABLE:
2087 aom_wb_write_bit(wb, 0);
2088 aom_wb_write_bit(wb, 1);
2089 break;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002090 default: assert(0);
2091 }
2092 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002093 if (!all_none) {
Yue Chen8628ae42018-02-13 11:05:20 -08002094#if CONFIG_EXT_PARTITION
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00002095 assert(cm->seq_params.sb_size == BLOCK_64X64 ||
2096 cm->seq_params.sb_size == BLOCK_128X128);
2097 const int sb_size = cm->seq_params.sb_size == BLOCK_128X128 ? 128 : 64;
Yue Chen8628ae42018-02-13 11:05:20 -08002098#else
2099 assert(cm->seq_params.sb_size == BLOCK_64X64);
2100 const int sb_size = 64;
2101#endif
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00002102
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00002103 RestorationInfo *rsi = &cm->rst_info[0];
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00002104
2105 assert(rsi->restoration_unit_size >= sb_size);
2106 assert(RESTORATION_TILESIZE_MAX == 256);
2107
2108 if (sb_size == 64) {
2109 aom_wb_write_bit(wb, rsi->restoration_unit_size > 64);
2110 }
2111 if (rsi->restoration_unit_size > 64) {
2112 aom_wb_write_bit(wb, rsi->restoration_unit_size > 128);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002113 }
2114 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002115
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002116 if (num_planes > 1) {
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002117 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
2118 if (s && !chroma_none) {
Johannb0ef6ff2018-02-08 14:32:21 -08002119 aom_wb_write_bit(wb, cm->rst_info[1].restoration_unit_size !=
2120 cm->rst_info[0].restoration_unit_size);
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002121 assert(cm->rst_info[1].restoration_unit_size ==
2122 cm->rst_info[0].restoration_unit_size ||
2123 cm->rst_info[1].restoration_unit_size ==
2124 (cm->rst_info[0].restoration_unit_size >> s));
2125 assert(cm->rst_info[2].restoration_unit_size ==
2126 cm->rst_info[1].restoration_unit_size);
2127 } else if (!s) {
2128 assert(cm->rst_info[1].restoration_unit_size ==
2129 cm->rst_info[0].restoration_unit_size);
2130 assert(cm->rst_info[2].restoration_unit_size ==
2131 cm->rst_info[1].restoration_unit_size);
2132 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07002133 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002134}
2135
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002136static void write_wiener_filter(int wiener_win, const WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002137 WienerInfo *ref_wiener_info, aom_writer *wb) {
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002138 if (wiener_win == WIENER_WIN)
2139 aom_write_primitive_refsubexpfin(
2140 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2141 WIENER_FILT_TAP0_SUBEXP_K,
2142 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV,
2143 wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV);
2144 else
2145 assert(wiener_info->vfilter[0] == 0 &&
2146 wiener_info->vfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002147 aom_write_primitive_refsubexpfin(
2148 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2149 WIENER_FILT_TAP1_SUBEXP_K,
2150 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV,
2151 wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV);
2152 aom_write_primitive_refsubexpfin(
2153 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2154 WIENER_FILT_TAP2_SUBEXP_K,
2155 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV,
2156 wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV);
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002157 if (wiener_win == WIENER_WIN)
2158 aom_write_primitive_refsubexpfin(
2159 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2160 WIENER_FILT_TAP0_SUBEXP_K,
2161 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV,
2162 wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV);
2163 else
2164 assert(wiener_info->hfilter[0] == 0 &&
2165 wiener_info->hfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002166 aom_write_primitive_refsubexpfin(
2167 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2168 WIENER_FILT_TAP1_SUBEXP_K,
2169 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV,
2170 wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV);
2171 aom_write_primitive_refsubexpfin(
2172 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2173 WIENER_FILT_TAP2_SUBEXP_K,
2174 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV,
2175 wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV);
2176 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002177}
2178
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002179static void write_sgrproj_filter(const SgrprojInfo *sgrproj_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002180 SgrprojInfo *ref_sgrproj_info,
2181 aom_writer *wb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002182 aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002183 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1,
2184 SGRPROJ_PRJ_SUBEXP_K,
2185 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
2186 sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0);
2187 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1,
2188 SGRPROJ_PRJ_SUBEXP_K,
2189 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
2190 sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1);
2191 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002192}
2193
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002194static void loop_restoration_write_sb_coeffs(const AV1_COMMON *const cm,
2195 MACROBLOCKD *xd,
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002196 const RestorationUnitInfo *rui,
2197 aom_writer *const w, int plane) {
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002198 const RestorationInfo *rsi = cm->rst_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002199 RestorationType frame_rtype = rsi->frame_restoration_type;
2200 if (frame_rtype == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002201
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002202 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
2203 WienerInfo *wiener_info = xd->wiener_info + plane;
2204 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002205 RestorationType unit_rtype = rui->restoration_type;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002206
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002207 if (frame_rtype == RESTORE_SWITCHABLE) {
2208 aom_write_symbol(w, unit_rtype, xd->tile_ctx->switchable_restore_cdf,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002209 RESTORE_SWITCHABLE_TYPES);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002210 switch (unit_rtype) {
2211 case RESTORE_WIENER:
2212 write_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, w);
2213 break;
2214 case RESTORE_SGRPROJ:
2215 write_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, w);
2216 break;
2217 default: assert(unit_rtype == RESTORE_NONE); break;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002218 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002219 } else if (frame_rtype == RESTORE_WIENER) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002220 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002221 xd->tile_ctx->wiener_restore_cdf, 2);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002222 if (unit_rtype != RESTORE_NONE) {
2223 write_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, w);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002224 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002225 } else if (frame_rtype == RESTORE_SGRPROJ) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002226 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002227 xd->tile_ctx->sgrproj_restore_cdf, 2);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002228 if (unit_rtype != RESTORE_NONE) {
2229 write_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, w);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002230 }
2231 }
2232}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002233#endif // CONFIG_LOOP_RESTORATION
2234
Yaowu Xuf883b422016-08-30 14:01:10 -07002235static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002236 const int num_planes = av1_num_planes(cm);
Cheng Chen07365c92017-12-21 16:37:33 -08002237#if CONFIG_INTRABC
Hui Su27df8342017-11-07 15:16:05 -08002238 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
Cheng Chen07365c92017-12-21 16:37:33 -08002239#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07002240 int i;
2241 struct loopfilter *lf = &cm->lf;
2242
Cheng Chen179479f2017-08-04 10:56:39 -07002243// Encode the loop filter level and type
Cheng Chen13fc8192017-08-19 11:49:28 -07002244#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen76224b02017-12-15 12:21:01 -08002245 aom_wb_write_literal(wb, lf->filter_level[0], 6);
2246 aom_wb_write_literal(wb, lf->filter_level[1], 6);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002247 if (num_planes > 1) {
Cheng Chen76224b02017-12-15 12:21:01 -08002248 if (lf->filter_level[0] || lf->filter_level[1]) {
2249 aom_wb_write_literal(wb, lf->filter_level_u, 6);
2250 aom_wb_write_literal(wb, lf->filter_level_v, 6);
Cheng Chen765e34e2017-12-11 11:43:35 -08002251 }
Cheng Chene94df5c2017-07-19 17:25:33 -07002252 }
Cheng Chena7345512017-12-05 15:36:05 -08002253#else
Cheng Chen179479f2017-08-04 10:56:39 -07002254 aom_wb_write_literal(wb, lf->filter_level, 6);
Cheng Chena7345512017-12-05 15:36:05 -08002255#endif // CONFIG_LOOPFILTER_LEVEL
Yaowu Xuf883b422016-08-30 14:01:10 -07002256 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002257
2258 // Write out loop filter deltas applied at the MB level based on mode or
2259 // ref frame (if they are enabled).
Yaowu Xuf883b422016-08-30 14:01:10 -07002260 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002261
2262 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002263 aom_wb_write_bit(wb, lf->mode_ref_delta_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002264 if (lf->mode_ref_delta_update) {
2265 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
2266 const int delta = lf->ref_deltas[i];
2267 const int changed = delta != lf->last_ref_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002268 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002269 if (changed) {
2270 lf->last_ref_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002271 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002272 }
2273 }
2274
2275 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
2276 const int delta = lf->mode_deltas[i];
2277 const int changed = delta != lf->last_mode_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002278 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002279 if (changed) {
2280 lf->last_mode_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002281 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002282 }
2283 }
2284 }
2285 }
2286}
2287
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002288static void encode_cdef(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002289 const int num_planes = av1_num_planes(cm);
Hui Su27df8342017-11-07 15:16:05 -08002290#if CONFIG_INTRABC
2291 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
2292#endif // CONFIG_INTRABC
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002293 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02002294 aom_wb_write_literal(wb, cm->cdef_pri_damping - 3, 2);
2295 assert(cm->cdef_pri_damping == cm->cdef_sec_damping);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002296 aom_wb_write_literal(wb, cm->cdef_bits, 2);
2297 for (i = 0; i < cm->nb_cdef_strengths; i++) {
2298 aom_wb_write_literal(wb, cm->cdef_strengths[i], CDEF_STRENGTH_BITS);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002299 if (num_planes > 1)
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02002300 aom_wb_write_literal(wb, cm->cdef_uv_strengths[i], CDEF_STRENGTH_BITS);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002301 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002302}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002303
Yaowu Xuf883b422016-08-30 14:01:10 -07002304static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002305 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002306 aom_wb_write_bit(wb, 1);
2307 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002308 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002309 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002310 }
2311}
2312
Yaowu Xuf883b422016-08-30 14:01:10 -07002313static void encode_quantization(const AV1_COMMON *const cm,
2314 struct aom_write_bit_buffer *wb) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002315 const int num_planes = av1_num_planes(cm);
2316
Yaowu Xuf883b422016-08-30 14:01:10 -07002317 aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002318 write_delta_q(wb, cm->y_dc_delta_q);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002319 if (num_planes > 1) {
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002320 int diff_uv_delta = (cm->u_dc_delta_q != cm->v_dc_delta_q) ||
2321 (cm->u_ac_delta_q != cm->v_ac_delta_q);
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002322 if (cm->separate_uv_delta_q) aom_wb_write_bit(wb, diff_uv_delta);
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002323 write_delta_q(wb, cm->u_dc_delta_q);
2324 write_delta_q(wb, cm->u_ac_delta_q);
2325 if (diff_uv_delta) {
2326 write_delta_q(wb, cm->v_dc_delta_q);
2327 write_delta_q(wb, cm->v_ac_delta_q);
2328 }
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002329 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002330#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07002331 aom_wb_write_bit(wb, cm->using_qmatrix);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002332 if (cm->using_qmatrix) {
Yaowu Xuf7a12422018-01-31 15:29:20 -08002333#if CONFIG_AOM_QM_EXT
2334 aom_wb_write_literal(wb, cm->qm_y, QM_LEVEL_BITS);
2335 aom_wb_write_literal(wb, cm->qm_u, QM_LEVEL_BITS);
Yaowu Xuf7a12422018-01-31 15:29:20 -08002336 if (!cm->separate_uv_delta_q)
2337 assert(cm->qm_u == cm->qm_v);
2338 else
Yaowu Xuf7a12422018-01-31 15:29:20 -08002339 aom_wb_write_literal(wb, cm->qm_v, QM_LEVEL_BITS);
2340#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002341 aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS);
2342 aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS);
Yaowu Xuf7a12422018-01-31 15:29:20 -08002343#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002344 }
2345#endif
2346}
2347
Yaowu Xuf883b422016-08-30 14:01:10 -07002348static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
2349 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002350 int i, j;
2351 const struct segmentation *seg = &cm->seg;
2352
Yaowu Xuf883b422016-08-30 14:01:10 -07002353 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002354 if (!seg->enabled) return;
2355
2356 // Segmentation map
2357 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002358 aom_wb_write_bit(wb, seg->update_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002359 } else {
2360 assert(seg->update_map == 1);
2361 }
2362 if (seg->update_map) {
2363 // Select the coding strategy (temporal or spatial)
Yushin Choe8d88792018-01-25 12:09:15 -08002364 if (!cm->error_resilient_mode) av1_choose_segmap_coding_method(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002365
2366 // Write out the chosen coding method.
2367 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002368 aom_wb_write_bit(wb, seg->temporal_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002369 } else {
2370 assert(seg->temporal_update == 0);
2371 }
2372 }
2373
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00002374#if CONFIG_SPATIAL_SEGMENTATION
2375 cm->preskip_segid = 0;
2376#endif
2377
Yaowu Xuc27fc142016-08-22 16:08:15 -07002378 // Segmentation data
Yaowu Xuf883b422016-08-30 14:01:10 -07002379 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002380 if (seg->update_data) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002381 for (i = 0; i < MAX_SEGMENTS; i++) {
2382 for (j = 0; j < SEG_LVL_MAX; j++) {
2383 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002384 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002385 if (active) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00002386#if CONFIG_SPATIAL_SEGMENTATION
2387 cm->preskip_segid |= j >= SEG_LVL_REF_FRAME;
2388 cm->last_active_segid = i;
2389#endif
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002390 const int data_max = av1_seg_feature_data_max(j);
2391 const int data_min = -data_max;
2392 const int ubits = get_unsigned_bits(data_max);
2393 const int data = clamp(get_segdata(seg, i, j), data_min, data_max);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002394
Yaowu Xuf883b422016-08-30 14:01:10 -07002395 if (av1_is_segfeature_signed(j)) {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002396 aom_wb_write_inv_signed_literal(wb, data, ubits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002397 } else {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002398 aom_wb_write_literal(wb, data, ubits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002399 }
2400 }
2401 }
2402 }
2403 }
2404}
2405
Thomas Daedef636d5c2017-06-29 13:48:27 -07002406static void write_tx_mode(AV1_COMMON *cm, TX_MODE *mode,
Yue Cheneeacc4c2017-01-17 17:29:17 -08002407 struct aom_write_bit_buffer *wb) {
Thomas Daedef636d5c2017-06-29 13:48:27 -07002408 if (cm->all_lossless) {
Yue Cheneeacc4c2017-01-17 17:29:17 -08002409 *mode = ONLY_4X4;
2410 return;
2411 }
Debargha Mukherjee923b73d2017-10-31 18:11:34 -07002412 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002413}
2414
Angie Chiang5678ad92016-11-21 09:38:40 -08002415static void write_frame_interp_filter(InterpFilter filter,
2416 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002417 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002418 if (filter != SWITCHABLE)
Angie Chiang6305abe2016-10-24 12:24:44 -07002419 aom_wb_write_literal(wb, filter, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002420}
2421
Yaowu Xuf883b422016-08-30 14:01:10 -07002422static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002423 if (cm->interp_filter == SWITCHABLE) {
2424 // Check to see if only one of the filters is actually used
2425 int count[SWITCHABLE_FILTERS];
2426 int i, j, c = 0;
2427 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2428 count[i] = 0;
2429 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
2430 count[i] += counts->switchable_interp[j][i];
2431 c += (count[i] > 0);
2432 }
2433 if (c == 1) {
2434 // Only one filter is used. So set the filter at frame level
2435 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2436 if (count[i]) {
Sarah Parker4c10a3c2017-04-10 19:37:59 -07002437 if (i == EIGHTTAP_REGULAR || WARP_WM_NEIGHBORS_WITH_OBMC)
Debargha Mukherjee604d8462017-04-06 15:27:00 -07002438 cm->interp_filter = i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002439 break;
2440 }
2441 }
2442 }
2443 }
2444}
2445
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002446#if CONFIG_MAX_TILE
2447
2448// Same function as write_uniform but writing to uncompresses header wb
2449static void wb_write_uniform(struct aom_write_bit_buffer *wb, int n, int v) {
2450 const int l = get_unsigned_bits(n);
2451 const int m = (1 << l) - n;
2452 if (l == 0) return;
2453 if (v < m) {
2454 aom_wb_write_literal(wb, v, l - 1);
2455 } else {
2456 aom_wb_write_literal(wb, m + ((v - m) >> 1), l - 1);
2457 aom_wb_write_literal(wb, (v - m) & 1, 1);
2458 }
2459}
2460
2461static void write_tile_info_max_tile(const AV1_COMMON *const cm,
2462 struct aom_write_bit_buffer *wb) {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002463 int width_mi = ALIGN_POWER_OF_TWO(cm->mi_cols, cm->seq_params.mib_size_log2);
2464 int height_mi = ALIGN_POWER_OF_TWO(cm->mi_rows, cm->seq_params.mib_size_log2);
2465 int width_sb = width_mi >> cm->seq_params.mib_size_log2;
2466 int height_sb = height_mi >> cm->seq_params.mib_size_log2;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002467 int size_sb, i;
2468
2469 aom_wb_write_bit(wb, cm->uniform_tile_spacing_flag);
2470
2471 if (cm->uniform_tile_spacing_flag) {
2472 // Uniform spaced tiles with power-of-two number of rows and columns
2473 // tile columns
2474 int ones = cm->log2_tile_cols - cm->min_log2_tile_cols;
2475 while (ones--) {
2476 aom_wb_write_bit(wb, 1);
2477 }
2478 if (cm->log2_tile_cols < cm->max_log2_tile_cols) {
2479 aom_wb_write_bit(wb, 0);
2480 }
2481
2482 // rows
2483 ones = cm->log2_tile_rows - cm->min_log2_tile_rows;
2484 while (ones--) {
2485 aom_wb_write_bit(wb, 1);
2486 }
2487 if (cm->log2_tile_rows < cm->max_log2_tile_rows) {
2488 aom_wb_write_bit(wb, 0);
2489 }
2490 } else {
2491 // Explicit tiles with configurable tile widths and heights
2492 // columns
2493 for (i = 0; i < cm->tile_cols; i++) {
2494 size_sb = cm->tile_col_start_sb[i + 1] - cm->tile_col_start_sb[i];
2495 wb_write_uniform(wb, AOMMIN(width_sb, MAX_TILE_WIDTH_SB), size_sb - 1);
2496 width_sb -= size_sb;
2497 }
2498 assert(width_sb == 0);
2499
2500 // rows
2501 for (i = 0; i < cm->tile_rows; i++) {
2502 size_sb = cm->tile_row_start_sb[i + 1] - cm->tile_row_start_sb[i];
2503 wb_write_uniform(wb, AOMMIN(height_sb, cm->max_tile_height_sb),
2504 size_sb - 1);
2505 height_sb -= size_sb;
2506 }
2507 assert(height_sb == 0);
2508 }
2509}
2510#endif
2511
Yaowu Xuf883b422016-08-30 14:01:10 -07002512static void write_tile_info(const AV1_COMMON *const cm,
2513 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002514#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002515 if (cm->large_scale_tile) {
2516 const int tile_width =
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002517 ALIGN_POWER_OF_TWO(cm->tile_width, cm->seq_params.mib_size_log2) >>
2518 cm->seq_params.mib_size_log2;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002519 const int tile_height =
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002520 ALIGN_POWER_OF_TWO(cm->tile_height, cm->seq_params.mib_size_log2) >>
2521 cm->seq_params.mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002522
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002523 assert(tile_width > 0);
2524 assert(tile_height > 0);
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08002525
Yaowu Xuc27fc142016-08-22 16:08:15 -07002526// Write the tile sizes
2527#if CONFIG_EXT_PARTITION
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002528 if (cm->seq_params.sb_size == BLOCK_128X128) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002529 assert(tile_width <= 32);
2530 assert(tile_height <= 32);
2531 aom_wb_write_literal(wb, tile_width - 1, 5);
2532 aom_wb_write_literal(wb, tile_height - 1, 5);
2533 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002534#endif // CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002535 assert(tile_width <= 64);
2536 assert(tile_height <= 64);
2537 aom_wb_write_literal(wb, tile_width - 1, 6);
2538 aom_wb_write_literal(wb, tile_height - 1, 6);
2539#if CONFIG_EXT_PARTITION
2540 }
2541#endif // CONFIG_EXT_PARTITION
2542 } else {
2543#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002544
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002545#if CONFIG_MAX_TILE
2546 write_tile_info_max_tile(cm, wb);
2547#else
2548 int min_log2_tile_cols, max_log2_tile_cols, ones;
2549 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002550
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002551 // columns
2552 ones = cm->log2_tile_cols - min_log2_tile_cols;
2553 while (ones--) aom_wb_write_bit(wb, 1);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002554
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002555 if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0);
2556
2557 // rows
2558 aom_wb_write_bit(wb, cm->log2_tile_rows != 0);
2559 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1);
2560#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002561#if CONFIG_DEPENDENT_HORZTILES
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002562 if (cm->tile_rows > 1) aom_wb_write_bit(wb, cm->dependent_horz_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002563#endif
2564#if CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002565 }
Fangwen Fu70bcb892017-05-06 17:05:19 -07002566#endif // CONFIG_EXT_TILE
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002567
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002568#if CONFIG_LOOPFILTERING_ACROSS_TILES
Lei7bb501d2017-12-13 15:10:34 -08002569#if CONFIG_LOOPFILTERING_ACROSS_TILES_EXT
2570 if (cm->tile_cols > 1) {
2571 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_v_enabled);
2572 }
2573 if (cm->tile_rows > 1) {
2574 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_h_enabled);
2575 }
2576#else
Yunqing Wang42015d12017-10-17 15:43:49 -07002577 if (cm->tile_cols * cm->tile_rows > 1)
2578 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_enabled);
Lei7bb501d2017-12-13 15:10:34 -08002579#endif // CONFIG_LOOPFILTERING_ACROSS_TILES_EXT
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002580#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Cyril Concolato3b5afc12017-12-15 12:54:15 -08002581
2582#if CONFIG_TILE_INFO_FIRST
2583 // write the tile length code (Always 4 bytes for now)
2584 aom_wb_write_literal(wb, 3, 2);
2585#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002586}
2587
Zoe Liu8dd1c982017-09-11 10:14:35 -07002588#if USE_GF16_MULTI_LAYER
2589static int get_refresh_mask_gf16(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002590 int refresh_mask = 0;
2591
Zoe Liu8dd1c982017-09-11 10:14:35 -07002592 if (cpi->refresh_last_frame || cpi->refresh_golden_frame ||
2593 cpi->refresh_bwd_ref_frame || cpi->refresh_alt2_ref_frame ||
2594 cpi->refresh_alt_ref_frame) {
2595 assert(cpi->refresh_fb_idx >= 0 && cpi->refresh_fb_idx < REF_FRAMES);
2596 refresh_mask |= (1 << cpi->refresh_fb_idx);
2597 }
2598
2599 return refresh_mask;
2600}
2601#endif // USE_GF16_MULTI_LAYER
Zoe Liu8dd1c982017-09-11 10:14:35 -07002602
2603static int get_refresh_mask(AV1_COMP *cpi) {
Yi Luo2e6a9ab2017-09-15 08:13:59 -07002604 int refresh_mask = 0;
Zoe Liu8dd1c982017-09-11 10:14:35 -07002605#if USE_GF16_MULTI_LAYER
2606 if (cpi->rc.baseline_gf_interval == 16) return get_refresh_mask_gf16(cpi);
2607#endif // USE_GF16_MULTI_LAYER
2608
Yaowu Xuc27fc142016-08-22 16:08:15 -07002609 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
2610 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
2611 // the 3 LAST reference frames will be updated accordingly, i.e.:
2612 // (1) The original virtual index for LAST3_FRAME will become the new virtual
2613 // index for LAST_FRAME; and
2614 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
2615 // shifted and become the new virtual indexes for LAST2_FRAME and
2616 // LAST3_FRAME.
2617 refresh_mask |=
2618 (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]);
Zoe Liue9b15e22017-07-19 15:53:01 -07002619
Zoe Liue9b15e22017-07-19 15:53:01 -07002620 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
2621 refresh_mask |= (cpi->refresh_alt2_ref_frame << cpi->alt2_fb_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002622
Yaowu Xuf883b422016-08-30 14:01:10 -07002623 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002624 // We have decided to preserve the previously existing golden frame as our
2625 // new ARF frame. However, in the short term we leave it in the GF slot and,
2626 // if we're updating the GF with the current decoded frame, we save it
2627 // instead to the ARF slot.
Yaowu Xuf883b422016-08-30 14:01:10 -07002628 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
Yaowu Xuc27fc142016-08-22 16:08:15 -07002629 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
2630 // there so that it can be done outside of the recode loop.
2631 // Note: This is highly specific to the use of ARF as a forward reference,
2632 // and this needs to be generalized as other uses are implemented
2633 // (like RTC/temporal scalability).
2634 return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx);
2635 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -07002636 const int arf_idx = cpi->alt_fb_idx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002637 return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) |
2638 (cpi->refresh_alt_ref_frame << arf_idx);
2639 }
2640}
2641
2642#if CONFIG_EXT_TILE
2643static INLINE int find_identical_tile(
2644 const int tile_row, const int tile_col,
2645 TileBufferEnc (*const tile_buffers)[1024]) {
2646 const MV32 candidate_offset[1] = { { 1, 0 } };
2647 const uint8_t *const cur_tile_data =
2648 tile_buffers[tile_row][tile_col].data + 4;
Jingning Han99ffce62017-04-25 15:48:41 -07002649 const size_t cur_tile_size = tile_buffers[tile_row][tile_col].size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002650
2651 int i;
2652
2653 if (tile_row == 0) return 0;
2654
2655 // (TODO: yunqingwang) For now, only above tile is checked and used.
2656 // More candidates such as left tile can be added later.
2657 for (i = 0; i < 1; i++) {
2658 int row_offset = candidate_offset[0].row;
2659 int col_offset = candidate_offset[0].col;
2660 int row = tile_row - row_offset;
2661 int col = tile_col - col_offset;
2662 uint8_t tile_hdr;
2663 const uint8_t *tile_data;
2664 TileBufferEnc *candidate;
2665
2666 if (row < 0 || col < 0) continue;
2667
2668 tile_hdr = *(tile_buffers[row][col].data);
2669
2670 // Read out tcm bit
2671 if ((tile_hdr >> 7) == 1) {
2672 // The candidate is a copy tile itself
2673 row_offset += tile_hdr & 0x7f;
2674 row = tile_row - row_offset;
2675 }
2676
2677 candidate = &tile_buffers[row][col];
2678
2679 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
2680
2681 tile_data = candidate->data + 4;
2682
2683 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
2684
2685 // Identical tile found
2686 assert(row_offset > 0);
2687 return row_offset;
2688 }
2689
2690 // No identical tile found
2691 return 0;
2692}
2693#endif // CONFIG_EXT_TILE
2694
Jingning Handa11e692017-12-19 08:45:08 -08002695#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07002696static uint32_t write_tiles(AV1_COMP *const cpi, uint8_t *const dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002697 unsigned int *max_tile_size,
2698 unsigned int *max_tile_col_size) {
Thomas Davies4822e142017-10-10 11:30:36 +01002699 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuf883b422016-08-30 14:01:10 -07002700 aom_writer mode_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002701 int tile_row, tile_col;
2702 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
clang-format67948d32016-09-07 22:40:40 -07002703 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
James Zern71a37de2017-04-20 16:03:13 -07002704 uint32_t total_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002705 const int tile_cols = cm->tile_cols;
2706 const int tile_rows = cm->tile_rows;
Thomas Daviesaf6df172016-11-09 14:04:18 +00002707 unsigned int tile_size = 0;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002708 const int have_tiles = tile_cols * tile_rows > 1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002709 struct aom_write_bit_buffer wb = { dst, 0 };
Thomas Davies80188d12016-10-26 16:08:35 -07002710 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
Thomas Davies80188d12016-10-26 16:08:35 -07002711 // Fixed size tile groups for the moment
2712 const int num_tg_hdrs = cm->num_tg;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002713 const int tg_size =
2714#if CONFIG_EXT_TILE
2715 (cm->large_scale_tile)
2716 ? 1
2717 :
2718#endif // CONFIG_EXT_TILE
2719 (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
Thomas Davies80188d12016-10-26 16:08:35 -07002720 int tile_count = 0;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00002721 int tg_count = 1;
2722 int tile_size_bytes = 4;
2723 int tile_col_size_bytes;
James Zern71a37de2017-04-20 16:03:13 -07002724 uint32_t uncompressed_hdr_size = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07002725 struct aom_write_bit_buffer tg_params_wb;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00002726 struct aom_write_bit_buffer tile_size_bytes_wb;
James Zern71a37de2017-04-20 16:03:13 -07002727 uint32_t saved_offset;
Thomas Daviesaf6df172016-11-09 14:04:18 +00002728 int mtu_size = cpi->oxcf.mtu;
2729 int curr_tg_data_size = 0;
2730 int hdr_size;
Yaowu Xua8975df2018-01-23 09:32:49 -08002731 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002732
2733 *max_tile_size = 0;
2734 *max_tile_col_size = 0;
2735
Frederic Barbierf57a9372018-01-05 09:35:03 +01002736 // All tile size fields are output on 4 bytes. A call to remux_tiles will
2737 // later compact the data if smaller headers are adequate.
Yaowu Xuc27fc142016-08-22 16:08:15 -07002738
Thomas Davies4822e142017-10-10 11:30:36 +01002739 cm->largest_tile_id = 0;
Thomas Davies4822e142017-10-10 11:30:36 +01002740
Yaowu Xuc27fc142016-08-22 16:08:15 -07002741#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002742 if (cm->large_scale_tile) {
2743 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
2744 TileInfo tile_info;
2745 const int is_last_col = (tile_col == tile_cols - 1);
2746 const uint32_t col_offset = total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002747
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002748 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002749
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002750 // The last column does not have a column header
2751 if (!is_last_col) total_size += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002752
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002753 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
2754 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
2755 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
2756 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
2757 const int data_offset = have_tiles ? 4 : 0;
2758 const int tile_idx = tile_row * tile_cols + tile_col;
2759 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
2760 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002761
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002762 buf->data = dst + total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002763
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002764 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
2765 // even for the last one, unless no tiling is used at all.
2766 total_size += data_offset;
2767 // Initialise tile context from the frame context
2768 this_tile->tctx = *cm->fc;
2769 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07002770 mode_bc.allow_update_cdf = !cm->large_scale_tile;
Rupert Swarbrick7546b302017-10-26 10:45:26 +01002771#if CONFIG_LOOP_RESTORATION
Yaowu Xua8975df2018-01-23 09:32:49 -08002772 av1_reset_loop_restoration(&cpi->td.mb.e_mbd, num_planes);
Rupert Swarbrick7546b302017-10-26 10:45:26 +01002773#endif // CONFIG_LOOP_RESTORATION
2774
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002775 aom_start_encode(&mode_bc, buf->data + data_offset);
2776 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
2777 assert(tok == tok_end);
2778 aom_stop_encode(&mode_bc);
2779 tile_size = mode_bc.pos;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002780 buf->size = tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002781
Thomas Davies4822e142017-10-10 11:30:36 +01002782 if (tile_size > *max_tile_size) {
2783 cm->largest_tile_id = tile_cols * tile_row + tile_col;
2784 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002785 // Record the maximum tile size we see, so we can compact headers later.
2786 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002787
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002788 if (have_tiles) {
2789 // tile header: size of this tile, or copy offset
2790 uint32_t tile_header = tile_size;
2791 const int tile_copy_mode =
2792 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256)
2793 ? 1
2794 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002795
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002796 // If tile_copy_mode = 1, check if this tile is a copy tile.
2797 // Very low chances to have copy tiles on the key frames, so don't
2798 // search on key frames to reduce unnecessary search.
2799 if (cm->frame_type != KEY_FRAME && tile_copy_mode) {
2800 const int idendical_tile_offset =
2801 find_identical_tile(tile_row, tile_col, tile_buffers);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002802
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002803 if (idendical_tile_offset > 0) {
2804 tile_size = 0;
2805 tile_header = idendical_tile_offset | 0x80;
2806 tile_header <<= 24;
2807 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002808 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002809
2810 mem_put_le32(buf->data, tile_header);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002811 }
2812
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002813 total_size += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002814 }
2815
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002816 if (!is_last_col) {
2817 uint32_t col_size = total_size - col_offset - 4;
2818 mem_put_le32(dst + col_offset, col_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002819
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002820 // If it is not final packing, record the maximum tile column size we
2821 // see, otherwise, check if the tile size is out of the range.
2822 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
2823 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002824 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002825 } else {
2826#endif // CONFIG_EXT_TILE
Soo-Chul Han38427e82017-09-27 15:06:13 -04002827
2828#if !CONFIG_OBU
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002829 write_uncompressed_header_frame(cpi, &wb);
Soo-Chul Han38427e82017-09-27 15:06:13 -04002830#else
2831 write_uncompressed_header_obu(cpi, &wb);
2832#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002833
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002834 if (cm->show_existing_frame) {
2835 total_size = aom_wb_bytes_written(&wb);
2836 return (uint32_t)total_size;
2837 }
Jingning Hand3f441c2017-03-06 09:12:54 -08002838
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002839 // Write the tile length code
2840 tile_size_bytes_wb = wb;
2841 aom_wb_write_literal(&wb, 3, 2);
Thomas Davies80188d12016-10-26 16:08:35 -07002842
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002843 /* Write a placeholder for the number of tiles in each tile group */
2844 tg_params_wb = wb;
2845 saved_offset = wb.bit_offset;
2846 if (have_tiles) {
Imdad Sardharwalla857c99b2017-11-21 15:53:31 +00002847 aom_wb_write_literal(&wb, 3, n_log2_tiles);
2848 aom_wb_write_literal(&wb, (1 << n_log2_tiles) - 1, n_log2_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002849 }
Thomas Davies80188d12016-10-26 16:08:35 -07002850
Yunqing Wange7142e12018-01-17 11:20:12 -08002851 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
2852 hdr_size = uncompressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002853 total_size += hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07002854
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002855 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
2856 TileInfo tile_info;
2857 const int is_last_row = (tile_row == tile_rows - 1);
2858 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002859
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002860 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
2861 const int tile_idx = tile_row * tile_cols + tile_col;
2862 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
2863 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
2864 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
2865 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
2866 const int is_last_col = (tile_col == tile_cols - 1);
2867 const int is_last_tile = is_last_col && is_last_row;
Thomas Daviesaf6df172016-11-09 14:04:18 +00002868
Thomas Daviesb25ba502017-07-18 10:18:24 +01002869 if ((!mtu_size && tile_count > tg_size) ||
2870 (mtu_size && tile_count && curr_tg_data_size >= mtu_size)) {
2871 // New tile group
2872 tg_count++;
2873 // We've exceeded the packet size
2874 if (tile_count > 1) {
2875 /* The last tile exceeded the packet size. The tile group size
2876 should therefore be tile_count-1.
2877 Move the last tile and insert headers before it
2878 */
2879 uint32_t old_total_size = total_size - tile_size - 4;
2880 memmove(dst + old_total_size + hdr_size, dst + old_total_size,
2881 (tile_size + 4) * sizeof(uint8_t));
2882 // Copy uncompressed header
2883 memmove(dst + old_total_size, dst,
2884 uncompressed_hdr_size * sizeof(uint8_t));
2885 // Write the number of tiles in the group into the last uncompressed
2886 // header before the one we've just inserted
2887 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
2888 n_log2_tiles);
2889 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 2,
2890 n_log2_tiles);
2891 // Update the pointer to the last TG params
2892 tg_params_wb.bit_offset = saved_offset + 8 * old_total_size;
Thomas Daviesb25ba502017-07-18 10:18:24 +01002893 total_size += hdr_size;
2894 tile_count = 1;
2895 curr_tg_data_size = hdr_size + tile_size + 4;
2896 } else {
2897 // We exceeded the packet size in just one tile
2898 // Copy uncompressed header
2899 memmove(dst + total_size, dst,
2900 uncompressed_hdr_size * sizeof(uint8_t));
2901 // Write the number of tiles in the group into the last uncompressed
2902 // header
2903 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
2904 n_log2_tiles);
2905 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1,
2906 n_log2_tiles);
2907 tg_params_wb.bit_offset = saved_offset + 8 * total_size;
Thomas Daviesb25ba502017-07-18 10:18:24 +01002908 total_size += hdr_size;
2909 tile_count = 0;
2910 curr_tg_data_size = hdr_size;
2911 }
Thomas Daviesaf6df172016-11-09 14:04:18 +00002912 }
Thomas Daviesb25ba502017-07-18 10:18:24 +01002913 tile_count++;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002914 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002915
Thomas Daviesb25ba502017-07-18 10:18:24 +01002916#if CONFIG_DEPENDENT_HORZTILES
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002917 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
Fangwen Fu73126c02017-02-08 22:37:47 -08002918#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002919 buf->data = dst + total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002920
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002921 // The last tile does not have a header.
2922 if (!is_last_tile) total_size += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002923
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002924 // Initialise tile context from the frame context
2925 this_tile->tctx = *cm->fc;
2926 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07002927 mode_bc.allow_update_cdf = 1;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002928#if CONFIG_LOOP_RESTORATION
Yaowu Xua8975df2018-01-23 09:32:49 -08002929 av1_reset_loop_restoration(&cpi->td.mb.e_mbd, num_planes);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002930#endif // CONFIG_LOOP_RESTORATION
2931
Alex Converse30f0e152017-03-28 10:13:27 -07002932 aom_start_encode(&mode_bc, dst + total_size);
2933 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
Alex Converse30f0e152017-03-28 10:13:27 -07002934 aom_stop_encode(&mode_bc);
2935 tile_size = mode_bc.pos;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002936 assert(tile_size > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002937
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002938 curr_tg_data_size += tile_size + 4;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002939 buf->size = tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002940
Thomas Davies4822e142017-10-10 11:30:36 +01002941 if (tile_size > *max_tile_size) {
2942 cm->largest_tile_id = tile_cols * tile_row + tile_col;
2943 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002944 if (!is_last_tile) {
2945 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
2946 // size of this tile
2947 mem_put_le32(buf->data, tile_size);
2948 }
2949
2950 total_size += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002951 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002952 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002953 // Write the final tile group size
2954 if (n_log2_tiles) {
Dominic Symesf58f1112017-09-25 12:47:40 +02002955 aom_wb_overwrite_literal(
2956 &tg_params_wb, (tile_cols * tile_rows) - tile_count, n_log2_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002957 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1, n_log2_tiles);
2958 }
2959 // Remux if possible. TODO (Thomas Davies): do this for more than one tile
2960 // group
2961 if (have_tiles && tg_count == 1) {
Yunqing Wange7142e12018-01-17 11:20:12 -08002962 int data_size = total_size - uncompressed_hdr_size;
2963 data_size = remux_tiles(cm, dst + uncompressed_hdr_size, data_size,
2964 *max_tile_size, *max_tile_col_size,
2965 &tile_size_bytes, &tile_col_size_bytes);
2966 total_size = data_size + uncompressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002967 aom_wb_overwrite_literal(&tile_size_bytes_wb, tile_size_bytes - 1, 2);
2968 }
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00002969
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002970#if CONFIG_EXT_TILE
2971 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002972#endif // CONFIG_EXT_TILE
2973 return (uint32_t)total_size;
2974}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002975#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002976
Yaowu Xuf883b422016-08-30 14:01:10 -07002977static void write_render_size(const AV1_COMMON *cm,
2978 struct aom_write_bit_buffer *wb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002979 const int scaling_active = !av1_resize_unscaled(cm);
Yaowu Xuf883b422016-08-30 14:01:10 -07002980 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002981 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002982 aom_wb_write_literal(wb, cm->render_width - 1, 16);
2983 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002984 }
2985}
2986
Urvang Joshi94ad3702017-12-06 11:38:08 -08002987#if CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07002988static void write_superres_scale(const AV1_COMMON *const cm,
2989 struct aom_write_bit_buffer *wb) {
Fergus Simpsone7508412017-03-14 18:14:09 -07002990 // First bit is whether to to scale or not
Urvang Joshide71d142017-10-05 12:12:15 -07002991 if (cm->superres_scale_denominator == SCALE_NUMERATOR) {
Fergus Simpsone7508412017-03-14 18:14:09 -07002992 aom_wb_write_bit(wb, 0); // no scaling
2993 } else {
2994 aom_wb_write_bit(wb, 1); // scaling, write scale factor
Urvang Joshi83010182017-10-27 12:36:02 -07002995 assert(cm->superres_scale_denominator >= SUPERRES_SCALE_DENOMINATOR_MIN);
2996 assert(cm->superres_scale_denominator <
2997 SUPERRES_SCALE_DENOMINATOR_MIN + (1 << SUPERRES_SCALE_BITS));
Fergus Simpsone7508412017-03-14 18:14:09 -07002998 aom_wb_write_literal(
Urvang Joshide71d142017-10-05 12:12:15 -07002999 wb, cm->superres_scale_denominator - SUPERRES_SCALE_DENOMINATOR_MIN,
Fergus Simpsone7508412017-03-14 18:14:09 -07003000 SUPERRES_SCALE_BITS);
3001 }
3002}
Urvang Joshi94ad3702017-12-06 11:38:08 -08003003#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07003004
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003005#if CONFIG_FRAME_SIZE
3006static void write_frame_size(const AV1_COMMON *cm, int frame_size_override,
David Barker22171312017-11-20 11:26:04 +00003007 struct aom_write_bit_buffer *wb)
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003008#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003009static void write_frame_size(const AV1_COMMON *cm,
David Barker22171312017-11-20 11:26:04 +00003010 struct aom_write_bit_buffer *wb)
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003011#endif
David Barker22171312017-11-20 11:26:04 +00003012{
Urvang Joshi94ad3702017-12-06 11:38:08 -08003013#if CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003014 const int coded_width = cm->superres_upscaled_width - 1;
3015 const int coded_height = cm->superres_upscaled_height - 1;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003016#else
David Barker22171312017-11-20 11:26:04 +00003017 const int coded_width = cm->width - 1;
3018 const int coded_height = cm->height - 1;
Urvang Joshi94ad3702017-12-06 11:38:08 -08003019#endif // CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003020
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003021#if CONFIG_FRAME_SIZE
3022 if (frame_size_override) {
3023 const SequenceHeader *seq_params = &cm->seq_params;
3024 int num_bits_width = seq_params->num_bits_width;
3025 int num_bits_height = seq_params->num_bits_height;
David Barker22171312017-11-20 11:26:04 +00003026 aom_wb_write_literal(wb, coded_width, num_bits_width);
3027 aom_wb_write_literal(wb, coded_height, num_bits_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003028 }
3029#else
David Barker22171312017-11-20 11:26:04 +00003030 aom_wb_write_literal(wb, coded_width, 16);
3031 aom_wb_write_literal(wb, coded_height, 16);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003032#endif
David Barker22171312017-11-20 11:26:04 +00003033
Urvang Joshi94ad3702017-12-06 11:38:08 -08003034#if CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003035 write_superres_scale(cm, wb);
Urvang Joshi94ad3702017-12-06 11:38:08 -08003036#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003037 write_render_size(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003038}
3039
Yaowu Xuf883b422016-08-30 14:01:10 -07003040static void write_frame_size_with_refs(AV1_COMP *cpi,
3041 struct aom_write_bit_buffer *wb) {
3042 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003043 int found = 0;
3044
3045 MV_REFERENCE_FRAME ref_frame;
3046 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3047 YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
3048
3049 if (cfg != NULL) {
Urvang Joshi94ad3702017-12-06 11:38:08 -08003050#if CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003051 found = cm->superres_upscaled_width == cfg->y_crop_width &&
3052 cm->superres_upscaled_height == cfg->y_crop_height;
3053#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003054 found =
3055 cm->width == cfg->y_crop_width && cm->height == cfg->y_crop_height;
Urvang Joshi94ad3702017-12-06 11:38:08 -08003056#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003057 found &= cm->render_width == cfg->render_width &&
3058 cm->render_height == cfg->render_height;
3059 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003060 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003061 if (found) {
Urvang Joshi94ad3702017-12-06 11:38:08 -08003062#if CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003063 write_superres_scale(cm, wb);
Urvang Joshi94ad3702017-12-06 11:38:08 -08003064#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003065 break;
3066 }
3067 }
3068
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003069#if CONFIG_FRAME_SIZE
3070 if (!found) {
3071 int frame_size_override = 1; // Allways equal to 1 in this function
3072 write_frame_size(cm, frame_size_override, wb);
3073 }
3074#else
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003075 if (!found) write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003076#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003077}
3078
Yaowu Xuc27fc142016-08-22 16:08:15 -07003079static void write_profile(BITSTREAM_PROFILE profile,
Yaowu Xuf883b422016-08-30 14:01:10 -07003080 struct aom_write_bit_buffer *wb) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003081 assert(profile >= PROFILE_0 && profile < MAX_PROFILES);
3082 aom_wb_write_literal(wb, profile, 2);
3083}
3084
3085static void write_bitdepth(AV1_COMMON *const cm,
3086 struct aom_write_bit_buffer *wb) {
3087 // Profile 0/1: [0] for 8 bit, [1] 10-bit
3088 // Profile 2: [0] for 8 bit, [10] 10-bit, [11] - 12-bit
3089 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_8 ? 0 : 1);
3090 if (cm->profile == PROFILE_2 && cm->bit_depth != AOM_BITS_8) {
3091 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003092 }
3093}
3094
3095static void write_bitdepth_colorspace_sampling(
Yaowu Xuf883b422016-08-30 14:01:10 -07003096 AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003097 write_bitdepth(cm, wb);
3098#if CONFIG_MONO_VIDEO
Debargha Mukherjeef340fec2018-01-10 18:12:22 -08003099 const int is_monochrome = cm->seq_params.monochrome;
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003100 // monochrome bit
3101 if (cm->profile != PROFILE_1)
3102 aom_wb_write_bit(wb, is_monochrome);
3103 else
3104 assert(!is_monochrome);
3105#elif !CONFIG_CICP
3106 const int is_monochrome = 0;
3107#endif // CONFIG_MONO_VIDEO
Andrey Norkin9e694632017-12-21 18:50:57 -08003108#if CONFIG_CICP
3109 if (cm->color_primaries == AOM_CICP_CP_UNSPECIFIED &&
3110 cm->transfer_characteristics == AOM_CICP_TC_UNSPECIFIED &&
3111 cm->matrix_coefficients == AOM_CICP_MC_UNSPECIFIED) {
3112 aom_wb_write_bit(wb, 0); // No color description present
3113 } else {
3114 aom_wb_write_bit(wb, 1); // Color description present
3115 aom_wb_write_literal(wb, cm->color_primaries, 8);
3116 aom_wb_write_literal(wb, cm->transfer_characteristics, 8);
3117 aom_wb_write_literal(wb, cm->matrix_coefficients, 8);
3118 }
3119#else
anorkin76fb1262017-03-22 15:12:12 -07003120#if CONFIG_COLORSPACE_HEADERS
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003121 if (!is_monochrome) aom_wb_write_literal(wb, cm->color_space, 5);
anorkin76fb1262017-03-22 15:12:12 -07003122 aom_wb_write_literal(wb, cm->transfer_function, 5);
3123#else
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003124 if (!is_monochrome) aom_wb_write_literal(wb, cm->color_space, 4);
3125#endif // CONFIG_COLORSPACE_HEADERS
3126#endif // CONFIG_CICP
Debargha Mukherjeee5267692018-01-16 09:41:15 -08003127#if CONFIG_MONO_VIDEO
3128 if (is_monochrome) return;
3129#endif // CONFIG_MONO_VIDEO
Andrey Norkin9e694632017-12-21 18:50:57 -08003130#if CONFIG_CICP
3131 if (cm->color_primaries == AOM_CICP_CP_BT_709 &&
3132 cm->transfer_characteristics == AOM_CICP_TC_SRGB &&
3133 cm->matrix_coefficients ==
3134 AOM_CICP_MC_IDENTITY) { // it would be better to remove this
3135 // dependency too
3136#else
Imdad Sardharwalla317002f2017-12-05 16:24:56 +00003137 if (cm->color_space == AOM_CS_SRGB) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003138#endif // CONFIG_CICP
3139 assert(cm->subsampling_x == 0 && cm->subsampling_y == 0);
3140 assert(cm->profile == PROFILE_1 ||
3141 (cm->profile == PROFILE_2 && cm->bit_depth == AOM_BITS_12));
Imdad Sardharwalla317002f2017-12-05 16:24:56 +00003142 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003143 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Yaowu Xuf883b422016-08-30 14:01:10 -07003144 aom_wb_write_bit(wb, cm->color_range);
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003145 if (cm->profile == PROFILE_0) {
3146 // 420 only
Yaowu Xuc27fc142016-08-22 16:08:15 -07003147 assert(cm->subsampling_x == 1 && cm->subsampling_y == 1);
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003148 } else if (cm->profile == PROFILE_1) {
3149 // 444 only
3150 assert(cm->subsampling_x == 0 && cm->subsampling_y == 0);
3151 } else if (cm->profile == PROFILE_2) {
3152 if (cm->bit_depth == AOM_BITS_12) {
3153 // 420, 444 or 422
3154 aom_wb_write_bit(wb, cm->subsampling_x);
David Barker0c3545b2018-01-16 17:32:23 +00003155 if (cm->subsampling_x == 0) {
3156 assert(cm->subsampling_y == 0 &&
3157 "4:4:0 subsampling not allowed in AV1");
3158 } else {
3159 aom_wb_write_bit(wb, cm->subsampling_y);
3160 }
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003161 } else {
3162 // 422 only
3163 assert(cm->subsampling_x == 1 && cm->subsampling_y == 0);
3164 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003165 }
anorkin76fb1262017-03-22 15:12:12 -07003166#if CONFIG_COLORSPACE_HEADERS
3167 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
3168 aom_wb_write_literal(wb, cm->chroma_sample_position, 2);
3169 }
3170#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003171 }
Yaowu Xu6fc47e52017-12-04 15:07:48 -08003172 aom_wb_write_bit(wb, cm->separate_uv_delta_q);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003173}
3174
Andrey Norkin28e9ce22018-01-08 10:11:21 -08003175#if CONFIG_TIMING_INFO_IN_SEQ_HEADERS
3176static void write_timing_info_header(AV1_COMMON *const cm,
3177 struct aom_write_bit_buffer *wb) {
3178 aom_wb_write_bit(wb, cm->timing_info_present); // timing info present flag
3179
3180 if (cm->timing_info_present) {
3181 aom_wb_write_unsigned_literal(wb, cm->num_units_in_tick,
3182 32); // Number of units in tick
3183 aom_wb_write_unsigned_literal(wb, cm->time_scale, 32); // Time scale
3184 aom_wb_write_bit(wb,
3185 cm->equal_picture_interval); // Equal picture interval bit
3186 if (cm->equal_picture_interval) {
3187 aom_wb_write_uvlc(wb,
3188 cm->num_ticks_per_picture - 1); // ticks per picture
3189 }
3190 }
3191}
3192#endif // CONFIG_TIMING_INFO_IN_SEQ_HEADERS
3193
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003194#if CONFIG_FILM_GRAIN
3195static void write_film_grain_params(AV1_COMMON *const cm,
3196 struct aom_write_bit_buffer *wb) {
3197 aom_film_grain_t *pars = &cm->film_grain_params;
3198
3199 aom_wb_write_bit(wb, pars->apply_grain);
3200 if (!pars->apply_grain) return;
3201
3202 aom_wb_write_literal(wb, pars->random_seed, 16);
3203
3204 pars->random_seed += 3245; // For film grain test vectors purposes
3205 if (!pars->random_seed) // Random seed should not be zero
3206 pars->random_seed += 1735;
3207
3208 aom_wb_write_bit(wb, pars->update_parameters);
3209 if (!pars->update_parameters) return;
3210
3211 // Scaling functions parameters
3212
3213 aom_wb_write_literal(wb, pars->num_y_points, 4); // max 14
3214 for (int i = 0; i < pars->num_y_points; i++) {
3215 aom_wb_write_literal(wb, pars->scaling_points_y[i][0], 8);
3216 aom_wb_write_literal(wb, pars->scaling_points_y[i][1], 8);
3217 }
3218
3219 aom_wb_write_bit(wb, pars->chroma_scaling_from_luma);
3220
Andrey Norkin0c294fa2018-02-16 18:32:12 -08003221 if (pars->chroma_scaling_from_luma) {
3222 assert(pars->num_cb_points == 0);
3223 assert(pars->num_cr_points == 0);
3224 } else {
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003225 aom_wb_write_literal(wb, pars->num_cb_points, 4); // max 10
3226 for (int i = 0; i < pars->num_cb_points; i++) {
3227 aom_wb_write_literal(wb, pars->scaling_points_cb[i][0], 8);
3228 aom_wb_write_literal(wb, pars->scaling_points_cb[i][1], 8);
3229 }
3230
3231 aom_wb_write_literal(wb, pars->num_cr_points, 4); // max 10
3232 for (int i = 0; i < pars->num_cr_points; i++) {
3233 aom_wb_write_literal(wb, pars->scaling_points_cr[i][0], 8);
3234 aom_wb_write_literal(wb, pars->scaling_points_cr[i][1], 8);
3235 }
3236 }
3237
3238 aom_wb_write_literal(wb, pars->scaling_shift - 8, 2); // 8 + value
3239
3240 // AR coefficients
3241 // Only sent if the corresponsing scaling function has
3242 // more than 0 points
3243
3244 aom_wb_write_literal(wb, pars->ar_coeff_lag, 2);
3245
3246 int num_pos_luma = 2 * pars->ar_coeff_lag * (pars->ar_coeff_lag + 1);
3247 int num_pos_chroma = num_pos_luma + 1;
3248
3249 if (pars->num_y_points)
3250 for (int i = 0; i < num_pos_luma; i++)
3251 aom_wb_write_literal(wb, pars->ar_coeffs_y[i] + 128, 8);
3252
3253 if (pars->num_cb_points || pars->chroma_scaling_from_luma)
3254 for (int i = 0; i < num_pos_chroma; i++)
3255 aom_wb_write_literal(wb, pars->ar_coeffs_cb[i] + 128, 8);
3256
3257 if (pars->num_cr_points || pars->chroma_scaling_from_luma)
3258 for (int i = 0; i < num_pos_chroma; i++)
3259 aom_wb_write_literal(wb, pars->ar_coeffs_cr[i] + 128, 8);
3260
3261 aom_wb_write_literal(wb, pars->ar_coeff_shift - 6, 2); // 8 + value
3262
Andrey Norkina840cde2018-02-16 15:39:50 -08003263 aom_wb_write_literal(wb, pars->grain_scale_shift, 2);
3264
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003265 if (pars->num_cb_points) {
3266 aom_wb_write_literal(wb, pars->cb_mult, 8);
3267 aom_wb_write_literal(wb, pars->cb_luma_mult, 8);
3268 aom_wb_write_literal(wb, pars->cb_offset, 9);
3269 }
3270
3271 if (pars->num_cr_points) {
3272 aom_wb_write_literal(wb, pars->cr_mult, 8);
3273 aom_wb_write_literal(wb, pars->cr_luma_mult, 8);
3274 aom_wb_write_literal(wb, pars->cr_offset, 9);
3275 }
3276
3277 aom_wb_write_bit(wb, pars->overlap_flag);
3278
3279 aom_wb_write_bit(wb, pars->clip_to_restricted_range);
3280}
3281#endif // CONFIG_FILM_GRAIN
3282
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00003283static void write_sb_size(SequenceHeader *seq_params,
3284 struct aom_write_bit_buffer *wb) {
3285 (void)seq_params;
3286 (void)wb;
3287 assert(seq_params->mib_size == mi_size_wide[seq_params->sb_size]);
3288 assert(seq_params->mib_size == 1 << seq_params->mib_size_log2);
3289#if CONFIG_EXT_PARTITION
3290 assert(seq_params->sb_size == BLOCK_128X128 ||
3291 seq_params->sb_size == BLOCK_64X64);
3292 aom_wb_write_bit(wb, seq_params->sb_size == BLOCK_128X128 ? 1 : 0);
3293#else
3294 assert(seq_params->sb_size == BLOCK_64X64);
3295#endif // CONFIG_EXT_PARTITION
3296}
3297
Rupert Swarbrickb394bfe2017-11-07 17:52:13 +00003298#if CONFIG_REFERENCE_BUFFER || CONFIG_OBU
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003299void write_sequence_header(AV1_COMP *cpi, struct aom_write_bit_buffer *wb) {
3300 AV1_COMMON *const cm = &cpi->common;
David Barker5e70a112017-10-03 14:28:17 +01003301 SequenceHeader *seq_params = &cm->seq_params;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003302
3303#if CONFIG_FRAME_SIZE
3304 int num_bits_width = 16;
3305 int num_bits_height = 16;
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003306 int max_frame_width = cpi->oxcf.width;
3307 int max_frame_height = cpi->oxcf.height;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003308
3309 seq_params->num_bits_width = num_bits_width;
3310 seq_params->num_bits_height = num_bits_height;
3311 seq_params->max_frame_width = max_frame_width;
3312 seq_params->max_frame_height = max_frame_height;
3313
3314 aom_wb_write_literal(wb, num_bits_width - 1, 4);
3315 aom_wb_write_literal(wb, num_bits_height - 1, 4);
3316 aom_wb_write_literal(wb, max_frame_width - 1, num_bits_width);
3317 aom_wb_write_literal(wb, max_frame_height - 1, num_bits_height);
3318#endif
3319
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003320 /* Placeholder for actually writing to the bitstream */
Yunqing Wangc2502b52017-07-19 17:44:18 -07003321 seq_params->frame_id_numbers_present_flag =
3322#if CONFIG_EXT_TILE
3323 cm->large_scale_tile ? 0 :
3324#endif // CONFIG_EXT_TILE
Yaowu Xu6eb9da22018-01-23 10:19:17 -08003325 cm->error_resilient_mode;
Sebastien Alaiwand418f682017-10-19 15:06:52 +02003326 seq_params->frame_id_length = FRAME_ID_LENGTH;
3327 seq_params->delta_frame_id_length = DELTA_FRAME_ID_LENGTH;
David Barker5e70a112017-10-03 14:28:17 +01003328
3329 aom_wb_write_bit(wb, seq_params->frame_id_numbers_present_flag);
3330 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003331 // We must always have delta_frame_id_length < frame_id_length,
3332 // in order for a frame to be referenced with a unique delta.
3333 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003334 aom_wb_write_literal(wb, seq_params->delta_frame_id_length - 2, 4);
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003335 aom_wb_write_literal(
3336 wb, seq_params->frame_id_length - seq_params->delta_frame_id_length - 1,
3337 3);
David Barker5e70a112017-10-03 14:28:17 +01003338 }
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00003339
3340 write_sb_size(seq_params, wb);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00003341
3342 if (seq_params->force_screen_content_tools == 2) {
3343 aom_wb_write_bit(wb, 1);
3344 } else {
3345 aom_wb_write_bit(wb, 0);
3346 aom_wb_write_bit(wb, seq_params->force_screen_content_tools);
3347 }
3348
3349#if CONFIG_AMVR
3350 if (seq_params->force_screen_content_tools > 0) {
3351 if (seq_params->force_integer_mv == 2) {
3352 aom_wb_write_bit(wb, 1);
3353 } else {
3354 aom_wb_write_bit(wb, 0);
3355 aom_wb_write_bit(wb, seq_params->force_integer_mv);
3356 }
3357 } else {
3358 assert(seq_params->force_integer_mv == 2);
3359 }
3360#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003361}
Rupert Swarbrickb394bfe2017-11-07 17:52:13 +00003362#endif // CONFIG_REFERENCE_BUFFER || CONFIG_OBU
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003363
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003364static void write_compound_tools(const AV1_COMMON *cm,
3365 struct aom_write_bit_buffer *wb) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003366 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
3367 aom_wb_write_bit(wb, cm->allow_interintra_compound);
3368 } else {
3369 assert(cm->allow_interintra_compound == 0);
3370 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003371 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
3372 aom_wb_write_bit(wb, cm->allow_masked_compound);
3373 } else {
3374 assert(cm->allow_masked_compound == 0);
3375 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003376}
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003377
David Barkerd7c8bd52017-09-25 14:47:29 +01003378static void write_global_motion_params(const WarpedMotionParams *params,
3379 const WarpedMotionParams *ref_params,
Sarah Parker3e579a62017-08-23 16:53:20 -07003380 struct aom_write_bit_buffer *wb,
3381 int allow_hp) {
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01003382 const TransformationType type = params->wmtype;
Sarah Parker3e579a62017-08-23 16:53:20 -07003383
3384 aom_wb_write_bit(wb, type != IDENTITY);
3385 if (type != IDENTITY) {
3386#if GLOBAL_TRANS_TYPES > 4
3387 aom_wb_write_literal(wb, type - 1, GLOBAL_TYPE_BITS);
3388#else
3389 aom_wb_write_bit(wb, type == ROTZOOM);
3390 if (type != ROTZOOM) aom_wb_write_bit(wb, type == TRANSLATION);
3391#endif // GLOBAL_TRANS_TYPES > 4
3392 }
3393
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01003394 if (type >= ROTZOOM) {
3395 aom_wb_write_signed_primitive_refsubexpfin(
3396 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3397 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
3398 (1 << GM_ALPHA_PREC_BITS),
3399 (params->wmmat[2] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
3400 aom_wb_write_signed_primitive_refsubexpfin(
3401 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3402 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF),
3403 (params->wmmat[3] >> GM_ALPHA_PREC_DIFF));
3404 }
3405
3406 if (type >= AFFINE) {
3407 aom_wb_write_signed_primitive_refsubexpfin(
3408 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3409 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF),
3410 (params->wmmat[4] >> GM_ALPHA_PREC_DIFF));
3411 aom_wb_write_signed_primitive_refsubexpfin(
3412 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3413 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
3414 (1 << GM_ALPHA_PREC_BITS),
3415 (params->wmmat[5] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
3416 }
3417
3418 if (type >= TRANSLATION) {
3419 const int trans_bits = (type == TRANSLATION)
3420 ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
3421 : GM_ABS_TRANS_BITS;
3422 const int trans_prec_diff = (type == TRANSLATION)
3423 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
3424 : GM_TRANS_PREC_DIFF;
3425 aom_wb_write_signed_primitive_refsubexpfin(
3426 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
3427 (ref_params->wmmat[0] >> trans_prec_diff),
3428 (params->wmmat[0] >> trans_prec_diff));
3429 aom_wb_write_signed_primitive_refsubexpfin(
3430 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
3431 (ref_params->wmmat[1] >> trans_prec_diff),
3432 (params->wmmat[1] >> trans_prec_diff));
Sarah Parker3e579a62017-08-23 16:53:20 -07003433 }
3434}
3435
3436static void write_global_motion(AV1_COMP *cpi,
3437 struct aom_write_bit_buffer *wb) {
3438 AV1_COMMON *const cm = &cpi->common;
3439 int frame;
3440 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003441 const WarpedMotionParams *ref_params =
3442 cm->error_resilient_mode ? &default_warp_params
3443 : &cm->prev_frame->global_motion[frame];
3444 write_global_motion_params(&cm->global_motion[frame], ref_params, wb,
Sarah Parker3e579a62017-08-23 16:53:20 -07003445 cm->allow_high_precision_mv);
3446 // TODO(sarahparker, debargha): The logic in the commented out code below
3447 // does not work currently and causes mismatches when resize is on.
3448 // Fix it before turning the optimization back on.
3449 /*
3450 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame_buffer(cpi, frame);
3451 if (cpi->source->y_crop_width == ref_buf->y_crop_width &&
3452 cpi->source->y_crop_height == ref_buf->y_crop_height) {
3453 write_global_motion_params(&cm->global_motion[frame],
3454 &cm->prev_frame->global_motion[frame], wb,
3455 cm->allow_high_precision_mv);
3456 } else {
3457 assert(cm->global_motion[frame].wmtype == IDENTITY &&
3458 "Invalid warp type for frames of different resolutions");
3459 }
3460 */
3461 /*
3462 printf("Frame %d/%d: Enc Ref %d: %d %d %d %d\n",
3463 cm->current_video_frame, cm->show_frame, frame,
3464 cm->global_motion[frame].wmmat[0],
3465 cm->global_motion[frame].wmmat[1], cm->global_motion[frame].wmmat[2],
3466 cm->global_motion[frame].wmmat[3]);
3467 */
3468 }
3469}
Sarah Parker3e579a62017-08-23 16:53:20 -07003470
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003471#if !CONFIG_OBU
3472static void write_uncompressed_header_frame(AV1_COMP *cpi,
3473 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003474 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003475 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3476
Yaowu Xuf883b422016-08-30 14:01:10 -07003477 aom_wb_write_literal(wb, AOM_FRAME_MARKER, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003478
3479 write_profile(cm->profile, wb);
3480
Yaowu Xuc27fc142016-08-22 16:08:15 -07003481 // NOTE: By default all coded frames to be used as a reference
3482 cm->is_reference_frame = 1;
3483
3484 if (cm->show_existing_frame) {
3485 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3486 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3487
3488 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003489 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003490 "Buffer %d does not contain a reconstructed frame",
3491 frame_to_show);
3492 }
3493 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3494
Yaowu Xuf883b422016-08-30 14:01:10 -07003495 aom_wb_write_bit(wb, 1); // show_existing_frame
3496 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003497
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003498#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003499 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003500 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003501 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
3502 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
3503 /* Add a zero byte to prevent emulation of superframe marker */
3504 /* Same logic as when when terminating the entropy coder */
3505 /* Consider to have this logic only one place */
3506 aom_wb_write_literal(wb, 0, 8);
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003507 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003508#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003509
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003510#if CONFIG_FILM_GRAIN
3511 if (cm->film_grain_params_present) write_film_grain_params(cm, wb);
3512#endif
3513
Zoe Liub4991202017-12-21 15:31:06 -08003514#if CONFIG_FWD_KF
3515 if (cm->reset_decoder_state && !frame_bufs[frame_to_show].intra_only) {
3516 aom_internal_error(
3517 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3518 "show_existing_frame to reset state on non-intra_only");
3519 }
3520 aom_wb_write_bit(wb, cm->reset_decoder_state);
3521#endif // CONFIG_FWD_KF
3522
Yaowu Xuc27fc142016-08-22 16:08:15 -07003523 return;
3524 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003525 aom_wb_write_bit(wb, 0); // show_existing_frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07003526 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003527
Yaowu Xuf883b422016-08-30 14:01:10 -07003528 aom_wb_write_bit(wb, cm->frame_type);
3529 aom_wb_write_bit(wb, cm->show_frame);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003530 if (cm->frame_type != KEY_FRAME)
3531 if (!cm->show_frame) aom_wb_write_bit(wb, cm->intra_only);
Yaowu Xuf883b422016-08-30 14:01:10 -07003532 aom_wb_write_bit(wb, cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003533
Pavel Frolov3b95c502017-10-01 21:35:24 +03003534 if (frame_is_intra_only(cm)) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003535#if CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003536 write_sequence_header(cpi, wb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003537#endif // CONFIG_REFERENCE_BUFFER
Joe Youngdb5eb4c2018-02-16 17:30:40 -08003538#if CONFIG_INTRA_EDGE2
3539 aom_wb_write_bit(wb, cm->disable_intra_edge_filter);
3540#endif // CONFIG_INTRA_EDGE2
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003541 }
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00003542
3543 if (cm->seq_params.force_screen_content_tools == 2) {
3544 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
3545 } else {
3546 assert(cm->allow_screen_content_tools ==
3547 cm->seq_params.force_screen_content_tools);
3548 }
3549
3550#if CONFIG_AMVR
3551 if (cm->allow_screen_content_tools) {
3552 if (cm->seq_params.force_integer_mv == 2) {
3553 aom_wb_write_bit(wb, cm->cur_frame_force_integer_mv);
3554 } else {
3555 assert(cm->cur_frame_force_integer_mv == cm->seq_params.force_integer_mv);
3556 }
3557 } else {
3558 assert(cm->cur_frame_force_integer_mv == 0);
3559 }
3560#endif // CONFIG_AMVR
3561
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003562#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003563 cm->invalid_delta_frame_id_minus1 = 0;
3564 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003565 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003566 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003567 }
3568#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003569
3570#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003571 if (cm->width > cm->seq_params.max_frame_width ||
3572 cm->height > cm->seq_params.max_frame_height) {
3573 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3574 "Frame dimensions are larger than the maximum values");
3575 }
Urvang Joshi94ad3702017-12-06 11:38:08 -08003576#if CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003577 const int coded_width = cm->superres_upscaled_width;
3578 const int coded_height = cm->superres_upscaled_height;
3579#else
3580 const int coded_width = cm->width;
3581 const int coded_height = cm->height;
Urvang Joshi94ad3702017-12-06 11:38:08 -08003582#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003583 int frame_size_override_flag =
David Barker22171312017-11-20 11:26:04 +00003584 (coded_width != cm->seq_params.max_frame_width ||
3585 coded_height != cm->seq_params.max_frame_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003586 aom_wb_write_bit(wb, frame_size_override_flag);
3587#endif
3588
Yaowu Xuc27fc142016-08-22 16:08:15 -07003589 if (cm->frame_type == KEY_FRAME) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003590 write_bitdepth_colorspace_sampling(cm, wb);
Andrey Norkin28e9ce22018-01-08 10:11:21 -08003591#if CONFIG_TIMING_INFO_IN_SEQ_HEADERS
3592 // timing_info
3593 write_timing_info_header(cm, wb);
3594#endif
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003595#if CONFIG_FILM_GRAIN
3596 aom_wb_write_bit(wb, cm->film_grain_params_present);
3597#endif
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003598#if CONFIG_FRAME_SIZE
3599 write_frame_size(cm, frame_size_override_flag, wb);
3600#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003601 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003602#endif
Hui Su85878782017-11-07 14:56:31 -08003603#if CONFIG_INTRABC
David Barker218556e2018-02-14 14:23:12 +00003604#if CONFIG_HORZONLY_FRAME_SUPERRES
3605 assert(av1_superres_unscaled(cm) ||
3606 !(cm->allow_intrabc && NO_FILTER_FOR_IBC));
3607 if (cm->allow_screen_content_tools &&
3608 (av1_superres_unscaled(cm) || !NO_FILTER_FOR_IBC))
3609#else
3610 if (cm->allow_screen_content_tools)
3611#endif
3612 aom_wb_write_bit(wb, cm->allow_intrabc);
Hui Su85878782017-11-07 14:56:31 -08003613#endif // CONFIG_INTRABC
Hui Su1cb1c002018-02-05 18:21:20 -08003614#if CONFIG_CDF_UPDATE_MODE
3615 aom_wb_write_literal(wb, cm->cdf_update_mode, 2);
3616#endif // CONFIG_CDF_UPDATE_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003617 } else {
Thomas Daedea6a854b2017-06-22 17:49:11 -07003618#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003619 if (!cm->error_resilient_mode) {
3620 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003621 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003622 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3623 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003624 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003625 cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
3626 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003627 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003628 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3629 }
3630 }
Thomas Daedea6a854b2017-06-22 17:49:11 -07003631#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003632 cpi->refresh_frame_mask = get_refresh_mask(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003633
3634 if (cm->intra_only) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003635 write_bitdepth_colorspace_sampling(cm, wb);
Andrey Norkin28e9ce22018-01-08 10:11:21 -08003636#if CONFIG_TIMING_INFO_IN_SEQ_HEADERS
3637 write_timing_info_header(cm, wb);
3638#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003639
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003640#if CONFIG_FILM_GRAIN
3641 aom_wb_write_bit(wb, cm->film_grain_params_present);
3642#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003643 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003644#if CONFIG_FRAME_SIZE
3645 write_frame_size(cm, frame_size_override_flag, wb);
3646#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003647 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003648#endif
Hui Sudf89ee32017-11-21 11:47:58 -08003649#if CONFIG_INTRABC
David Barker218556e2018-02-14 14:23:12 +00003650#if CONFIG_HORZONLY_FRAME_SUPERRES
3651 assert(av1_superres_unscaled(cm) ||
3652 !(cm->allow_intrabc && NO_FILTER_FOR_IBC));
3653 if (cm->allow_screen_content_tools &&
3654 (av1_superres_unscaled(cm) || !NO_FILTER_FOR_IBC))
3655#else
Hui Sudf89ee32017-11-21 11:47:58 -08003656 if (cm->allow_screen_content_tools)
David Barker218556e2018-02-14 14:23:12 +00003657#endif
Hui Sudf89ee32017-11-21 11:47:58 -08003658 aom_wb_write_bit(wb, cm->allow_intrabc);
3659#endif // CONFIG_INTRABC
Hui Su1cb1c002018-02-05 18:21:20 -08003660#if CONFIG_CDF_UPDATE_MODE
3661 aom_wb_write_literal(wb, cm->cdf_update_mode, 2);
3662#endif // CONFIG_CDF_UPDATE_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003663 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003664 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003665
Yaowu Xuc27fc142016-08-22 16:08:15 -07003666 if (!cpi->refresh_frame_mask) {
3667 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
3668 // will not be used as a reference
3669 cm->is_reference_frame = 0;
3670 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003671
Zoe Liuf40a9572017-10-13 12:37:19 -07003672 for (MV_REFERENCE_FRAME ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME;
3673 ++ref_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003674 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
Yaowu Xuf883b422016-08-30 14:01:10 -07003675 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003676 REF_FRAMES_LOG2);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003677#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003678 if (cm->seq_params.frame_id_numbers_present_flag) {
3679 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003680 int frame_id_len = cm->seq_params.frame_id_length;
3681 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003682 int delta_frame_id_minus1 =
3683 ((cm->current_frame_id - cm->ref_frame_id[i] +
3684 (1 << frame_id_len)) %
3685 (1 << frame_id_len)) -
3686 1;
3687 if (delta_frame_id_minus1 < 0 ||
3688 delta_frame_id_minus1 >= (1 << diff_len))
3689 cm->invalid_delta_frame_id_minus1 = 1;
3690 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003691 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003692#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07003693 }
3694
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003695#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003696 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003697 write_frame_size_with_refs(cpi, wb);
3698 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003699 write_frame_size(cm, frame_size_override_flag, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003700 }
3701#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003702 write_frame_size_with_refs(cpi, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003703#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003704
RogerZhou3b635242017-09-19 10:06:46 -07003705#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003706 if (cm->cur_frame_force_integer_mv) {
3707 cm->allow_high_precision_mv = 0;
3708 } else {
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003709#if !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003710 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003711#endif // !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003712 }
3713#else
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003714#if !CONFIG_EIGHTH_PEL_MV_ONLY
Yaowu Xuf883b422016-08-30 14:01:10 -07003715 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003716#endif // !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003717#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003718 fix_interp_filter(cm, cpi->td.counts);
Angie Chiang5678ad92016-11-21 09:38:40 -08003719 write_frame_interp_filter(cm->interp_filter, wb);
Jingning Hane17ebe92017-11-03 15:25:42 -07003720 if (frame_might_use_prev_frame_mvs(cm))
3721 aom_wb_write_bit(wb, cm->use_ref_frame_mvs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003722 }
3723 }
3724
Jingning Hanc723b342017-08-24 11:19:46 -07003725 if (cm->show_frame == 0) {
3726 int arf_offset = AOMMIN(
3727 (MAX_GF_INTERVAL - 1),
3728 cpi->twopass.gf_group.arf_src_offset[cpi->twopass.gf_group.index]);
Jingning Hanc723b342017-08-24 11:19:46 -07003729 int brf_offset =
3730 cpi->twopass.gf_group.brf_src_offset[cpi->twopass.gf_group.index];
3731
3732 arf_offset = AOMMIN((MAX_GF_INTERVAL - 1), arf_offset + brf_offset);
Cheng Chend300f0e2017-12-01 10:46:23 -08003733 aom_wb_write_literal(wb, arf_offset, FRAME_OFFSET_BITS);
Jingning Hanc723b342017-08-24 11:19:46 -07003734 }
Jingning Hanc723b342017-08-24 11:19:46 -07003735
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003736#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003737 if (cm->seq_params.frame_id_numbers_present_flag) {
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003738 cm->refresh_mask =
3739 cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
3740 }
3741#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003742
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01003743#if CONFIG_EXT_TILE
3744 const int might_bwd_adapt =
3745 !(cm->error_resilient_mode || cm->large_scale_tile);
3746#else
3747 const int might_bwd_adapt = !cm->error_resilient_mode;
3748#endif // CONFIG_EXT_TILE
3749 if (might_bwd_adapt) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003750 aom_wb_write_bit(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003751 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
3752 }
Thomas Daededa4d8b92017-06-05 15:44:14 -07003753#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuf883b422016-08-30 14:01:10 -07003754 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
Thomas Daededa4d8b92017-06-05 15:44:14 -07003755#endif
Cyril Concolato3b5afc12017-12-15 12:54:15 -08003756
3757#if CONFIG_TILE_INFO_FIRST
3758 write_tile_info(cm, wb);
3759#endif
3760
Yaowu Xuc27fc142016-08-22 16:08:15 -07003761 encode_loopfilter(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003762 encode_quantization(cm, wb);
3763 encode_segmentation(cm, xd, wb);
Arild Fuldseth07441162016-08-15 15:07:52 +02003764 {
Thomas Davies28444be2017-10-13 18:12:25 +01003765 int delta_q_allowed = 1;
3766#if !CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003767 int i;
3768 struct segmentation *const seg = &cm->seg;
3769 int segment_quantizer_active = 0;
3770 for (i = 0; i < MAX_SEGMENTS; i++) {
3771 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3772 segment_quantizer_active = 1;
3773 }
3774 }
Thomas Davies28444be2017-10-13 18:12:25 +01003775 delta_q_allowed = !segment_quantizer_active;
3776#endif
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01003777
Yaowu Xu288f8162017-10-10 15:03:22 -07003778 if (cm->delta_q_present_flag) assert(cm->base_qindex > 0);
Thomas Davies28444be2017-10-13 18:12:25 +01003779 // Segment quantizer and delta_q both allowed if CONFIG_EXT_DELTA_Q
3780 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02003781 aom_wb_write_bit(wb, cm->delta_q_present_flag);
3782 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01003783 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
Arild Fuldseth07441162016-08-15 15:07:52 +02003784 xd->prev_qindex = cm->base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07003785#if CONFIG_EXT_DELTA_Q
Fangwen Fu231fe422017-04-24 17:52:29 -07003786 aom_wb_write_bit(wb, cm->delta_lf_present_flag);
3787 if (cm->delta_lf_present_flag) {
3788 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_lf_res) - 1, 2);
Cheng Chen880166a2017-10-02 17:48:48 -07003789 xd->prev_delta_lf_from_base = 0;
Cheng Chena97394f2017-09-27 15:05:14 -07003790#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -07003791 aom_wb_write_bit(wb, cm->delta_lf_multi);
Cheng Chena97394f2017-09-27 15:05:14 -07003792 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
3793 xd->prev_delta_lf[lf_id] = 0;
3794#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003795 }
3796#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003797 }
3798 }
3799 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003800 if (!cm->all_lossless) {
3801 encode_cdef(cm, wb);
3802 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003803#if CONFIG_LOOP_RESTORATION
3804 encode_restoration_mode(cm, wb);
3805#endif // CONFIG_LOOP_RESTORATION
3806 write_tx_mode(cm, &cm->tx_mode, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003807
3808 if (cpi->allow_comp_inter_inter) {
3809 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003810
Yaowu Xuf883b422016-08-30 14:01:10 -07003811 aom_wb_write_bit(wb, use_hybrid_pred);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003812 }
Zoe Liu4b847e12017-12-07 12:44:45 -08003813
3814#if CONFIG_EXT_SKIP
3815 if (cm->is_skip_mode_allowed) aom_wb_write_bit(wb, cm->skip_mode_flag);
3816#endif // CONFIG_EXT_SKIP
3817
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003818 write_compound_tools(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003819
Sarah Parkere68a3e42017-02-16 14:03:24 -08003820 aom_wb_write_bit(wb, cm->reduced_tx_set_used);
Sarah Parkere68a3e42017-02-16 14:03:24 -08003821
Sarah Parkerf289f9f2017-09-12 18:50:02 -07003822 if (!frame_is_intra_only(cm)) write_global_motion(cpi, wb);
Sarah Parker3e579a62017-08-23 16:53:20 -07003823
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003824#if CONFIG_FILM_GRAIN
3825 if (cm->film_grain_params_present && cm->show_frame)
3826 write_film_grain_params(cm, wb);
3827#endif
3828
Cyril Concolato3b5afc12017-12-15 12:54:15 -08003829#if !CONFIG_TILE_INFO_FIRST
Yaowu Xuc27fc142016-08-22 16:08:15 -07003830 write_tile_info(cm, wb);
Cyril Concolato3b5afc12017-12-15 12:54:15 -08003831#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003832}
3833
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003834#else
3835// New function based on HLS R18
3836static void write_uncompressed_header_obu(AV1_COMP *cpi,
Jingning Handa11e692017-12-19 08:45:08 -08003837#if CONFIG_EXT_TILE
3838 struct aom_write_bit_buffer *saved_wb,
3839#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003840 struct aom_write_bit_buffer *wb) {
3841 AV1_COMMON *const cm = &cpi->common;
3842 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3843
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003844 // NOTE: By default all coded frames to be used as a reference
3845 cm->is_reference_frame = 1;
3846
3847 if (cm->show_existing_frame) {
3848 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3849 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3850
3851 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
3852 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3853 "Buffer %d does not contain a reconstructed frame",
3854 frame_to_show);
3855 }
3856 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3857
3858 aom_wb_write_bit(wb, 1); // show_existing_frame
3859 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
3860
3861#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003862 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003863 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003864 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
3865 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
3866 /* Add a zero byte to prevent emulation of superframe marker */
3867 /* Same logic as when when terminating the entropy coder */
3868 /* Consider to have this logic only one place */
3869 aom_wb_write_literal(wb, 0, 8);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003870 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003871#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003872
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003873#if CONFIG_FILM_GRAIN
3874 if (cm->film_grain_params_present && cm->show_frame) {
3875 int flip_back_update_parameters_flag = 0;
3876 if (cm->frame_type == KEY_FRAME &&
3877 cm->film_grain_params.update_parameters == 0) {
3878 cm->film_grain_params.update_parameters = 1;
3879 flip_back_update_parameters_flag = 1;
3880 }
3881 write_film_grain_params(cm, wb);
3882
3883 if (flip_back_update_parameters_flag)
3884 cm->film_grain_params.update_parameters = 0;
3885 }
3886#endif
3887
Zoe Liub4991202017-12-21 15:31:06 -08003888#if CONFIG_FWD_KF
3889 if (cm->reset_decoder_state && !frame_bufs[frame_to_show].intra_only) {
3890 aom_internal_error(
3891 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3892 "show_existing_frame to reset state on non-intra_only");
3893 }
3894 aom_wb_write_bit(wb, cm->reset_decoder_state);
3895#endif // CONFIG_FWD_KF
3896
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003897 return;
3898 } else {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003899 aom_wb_write_bit(wb, 0); // show_existing_frame
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003900 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003901
3902 cm->frame_type = cm->intra_only ? INTRA_ONLY_FRAME : cm->frame_type;
3903 aom_wb_write_literal(wb, cm->frame_type, 2);
3904
3905 if (cm->intra_only) cm->frame_type = INTRA_ONLY_FRAME;
3906
3907 aom_wb_write_bit(wb, cm->show_frame);
3908 aom_wb_write_bit(wb, cm->error_resilient_mode);
3909
Joe Youngdb5eb4c2018-02-16 17:30:40 -08003910#if CONFIG_INTRA_EDGE2
3911 if (frame_is_intra_only(cm)) {
3912 aom_wb_write_bit(wb, cm->disable_intra_edge_filter);
3913 }
3914#endif // CONFIG_INTRA_EDGE2
3915
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00003916 if (cm->seq_params.force_screen_content_tools == 2) {
3917 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
3918 } else {
3919 assert(cm->allow_screen_content_tools ==
3920 cm->seq_params.force_screen_content_tools);
3921 }
3922
3923#if CONFIG_AMVR
3924 if (cm->allow_screen_content_tools) {
3925 if (cm->seq_params.force_integer_mv == 2) {
3926 aom_wb_write_bit(wb, cm->cur_frame_force_integer_mv);
3927 } else {
3928 assert(cm->cur_frame_force_integer_mv == cm->seq_params.force_integer_mv);
3929 }
3930 } else {
3931 assert(cm->cur_frame_force_integer_mv == 0);
3932 }
3933#endif // CONFIG_AMVR
3934
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003935#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003936 cm->invalid_delta_frame_id_minus1 = 0;
3937 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003938 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003939 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003940 }
3941#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003942
3943#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003944 if (cm->width > cm->seq_params.max_frame_width ||
3945 cm->height > cm->seq_params.max_frame_height) {
3946 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3947 "Frame dimensions are larger than the maximum values");
3948 }
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003949 int frame_size_override_flag =
3950 (cm->width != cm->seq_params.max_frame_width ||
3951 cm->height != cm->seq_params.max_frame_height);
3952 aom_wb_write_bit(wb, frame_size_override_flag);
3953#endif
3954
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003955 if (cm->frame_type == KEY_FRAME) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003956#if CONFIG_FRAME_SIZE
3957 write_frame_size(cm, frame_size_override_flag, wb);
3958#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003959 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003960#endif
Hui Suad7536b2017-12-13 15:48:11 -08003961#if CONFIG_INTRABC
David Barker218556e2018-02-14 14:23:12 +00003962#if CONFIG_HORZONLY_FRAME_SUPERRES
3963 assert(av1_superres_unscaled(cm) ||
3964 !(cm->allow_intrabc && NO_FILTER_FOR_IBC));
3965 if (cm->allow_screen_content_tools &&
3966 (av1_superres_unscaled(cm) || !NO_FILTER_FOR_IBC))
3967#else
3968 if (cm->allow_screen_content_tools)
3969#endif
3970 aom_wb_write_bit(wb, cm->allow_intrabc);
Hui Suad7536b2017-12-13 15:48:11 -08003971#endif // CONFIG_INTRABC
Hui Su1cb1c002018-02-05 18:21:20 -08003972#if CONFIG_CDF_UPDATE_MODE
3973 aom_wb_write_literal(wb, cm->cdf_update_mode, 2);
3974#endif // CONFIG_CDF_UPDATE_MODE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003975 } else if (cm->frame_type == INTRA_ONLY_FRAME) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003976#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
3977 if (!cm->error_resilient_mode) {
3978 if (cm->intra_only) {
3979 aom_wb_write_bit(wb,
3980 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3981 }
3982 }
3983#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003984 cpi->refresh_frame_mask = get_refresh_mask(cpi);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003985
3986 if (cm->intra_only) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003987 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003988#if CONFIG_FRAME_SIZE
3989 write_frame_size(cm, frame_size_override_flag, wb);
3990#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003991 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003992#endif
Hui Suad7536b2017-12-13 15:48:11 -08003993#if CONFIG_INTRABC
David Barker218556e2018-02-14 14:23:12 +00003994#if CONFIG_HORZONLY_FRAME_SUPERRES
3995 assert(av1_superres_unscaled(cm) ||
3996 !(cm->allow_intrabc && NO_FILTER_FOR_IBC));
3997 if (cm->allow_screen_content_tools &&
3998 (av1_superres_unscaled(cm) || !NO_FILTER_FOR_IBC))
3999#else
Hui Suad7536b2017-12-13 15:48:11 -08004000 if (cm->allow_screen_content_tools)
David Barker218556e2018-02-14 14:23:12 +00004001#endif
Hui Suad7536b2017-12-13 15:48:11 -08004002 aom_wb_write_bit(wb, cm->allow_intrabc);
4003#endif // CONFIG_INTRABC
Hui Su1cb1c002018-02-05 18:21:20 -08004004#if CONFIG_CDF_UPDATE_MODE
4005 aom_wb_write_literal(wb, cm->cdf_update_mode, 2);
4006#endif // CONFIG_CDF_UPDATE_MODE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004007 }
4008 } else if (cm->frame_type == INTER_FRAME) {
4009 MV_REFERENCE_FRAME ref_frame;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004010#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4011 if (!cm->error_resilient_mode) {
4012 aom_wb_write_bit(wb, cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
4013 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
4014 aom_wb_write_bit(wb,
4015 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4016 }
4017#endif
4018
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004019 cpi->refresh_frame_mask = get_refresh_mask(cpi);
4020 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004021
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004022 if (!cpi->refresh_frame_mask) {
4023 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
4024 // will not be used as a reference
4025 cm->is_reference_frame = 0;
4026 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004027
4028 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4029 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
4030 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
4031 REF_FRAMES_LOG2);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004032#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004033 if (cm->seq_params.frame_id_numbers_present_flag) {
4034 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02004035 int frame_id_len = cm->seq_params.frame_id_length;
4036 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01004037 int delta_frame_id_minus1 =
4038 ((cm->current_frame_id - cm->ref_frame_id[i] +
4039 (1 << frame_id_len)) %
4040 (1 << frame_id_len)) -
4041 1;
4042 if (delta_frame_id_minus1 < 0 ||
4043 delta_frame_id_minus1 >= (1 << diff_len))
4044 cm->invalid_delta_frame_id_minus1 = 1;
4045 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004046 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004047#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004048 }
4049
4050#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004051 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004052 write_frame_size_with_refs(cpi, wb);
4053 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004054 write_frame_size(cm, frame_size_override_flag, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004055 }
4056#else
4057 write_frame_size_with_refs(cpi, wb);
4058#endif
4059
4060#if CONFIG_AMVR
RogerZhou0bf36902017-12-19 13:51:10 -08004061 if (cm->cur_frame_force_integer_mv) {
4062 cm->allow_high_precision_mv = 0;
4063 } else {
4064 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
4065 }
4066#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004067 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
RogerZhou0bf36902017-12-19 13:51:10 -08004068#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004069 fix_interp_filter(cm, cpi->td.counts);
4070 write_frame_interp_filter(cm->interp_filter, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004071 if (frame_might_use_prev_frame_mvs(cm)) {
Jingning Han923f8272017-12-14 10:50:12 -08004072 aom_wb_write_bit(wb, cm->use_ref_frame_mvs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004073 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004074 } else if (cm->frame_type == S_FRAME) {
4075 MV_REFERENCE_FRAME ref_frame;
4076
4077#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4078 if (!cm->error_resilient_mode) {
4079 aom_wb_write_bit(wb, cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
4080 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
4081 aom_wb_write_bit(wb,
4082 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4083 }
4084#endif
4085
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004086 if (!cpi->refresh_frame_mask) {
4087 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
4088 // will not be used as a reference
4089 cm->is_reference_frame = 0;
4090 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004091
4092 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4093 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
4094 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
4095 REF_FRAMES_LOG2);
4096 assert(cm->ref_frame_sign_bias[ref_frame] == 0);
4097#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004098 if (cm->seq_params.frame_id_numbers_present_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004099 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02004100 int frame_id_len = cm->seq_params.frame_id_length;
4101 int diff_len = cm->seq_params.delta_frame_id_length;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004102 int delta_frame_id_minus1 =
4103 ((cm->current_frame_id - cm->ref_frame_id[i] +
4104 (1 << frame_id_len)) %
4105 (1 << frame_id_len)) -
4106 1;
4107 if (delta_frame_id_minus1 < 0 ||
4108 delta_frame_id_minus1 >= (1 << diff_len))
4109 cm->invalid_delta_frame_id_minus1 = 1;
4110 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
4111 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004112#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004113 }
4114
4115#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004116 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004117 write_frame_size_with_refs(cpi, wb);
4118 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004119 write_frame_size(cm, frame_size_override_flag, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004120 }
4121#else
4122 write_frame_size_with_refs(cpi, wb);
4123#endif
4124
4125 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
4126
4127 fix_interp_filter(cm, cpi->td.counts);
4128 write_frame_interp_filter(cm->interp_filter, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004129 if (frame_might_use_prev_frame_mvs(cm)) {
Jingning Han923f8272017-12-14 10:50:12 -08004130 aom_wb_write_bit(wb, cm->use_ref_frame_mvs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004131 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004132 }
4133
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004134 if (cm->show_frame == 0) {
4135 int arf_offset = AOMMIN(
4136 (MAX_GF_INTERVAL - 1),
4137 cpi->twopass.gf_group.arf_src_offset[cpi->twopass.gf_group.index]);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004138 int brf_offset =
4139 cpi->twopass.gf_group.brf_src_offset[cpi->twopass.gf_group.index];
4140
4141 arf_offset = AOMMIN((MAX_GF_INTERVAL - 1), arf_offset + brf_offset);
Cheng Chend300f0e2017-12-01 10:46:23 -08004142 aom_wb_write_literal(wb, arf_offset, FRAME_OFFSET_BITS);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004143 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004144
4145#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004146 if (cm->seq_params.frame_id_numbers_present_flag) {
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004147 cm->refresh_mask =
4148 cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
4149 }
4150#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004151
Jingning Handa11e692017-12-19 08:45:08 -08004152#if CONFIG_EXT_TILE
4153 const int might_bwd_adapt =
4154 !(cm->error_resilient_mode || cm->large_scale_tile);
4155#else
4156 const int might_bwd_adapt = !cm->error_resilient_mode;
4157#endif // CONFIG_EXT_TILE
4158
4159 if (might_bwd_adapt) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004160 aom_wb_write_bit(
4161 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
4162 }
4163#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4164 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
4165#endif
Cyril Concolato3b5afc12017-12-15 12:54:15 -08004166#if CONFIG_TILE_INFO_FIRST
4167 write_tile_info(cm, wb);
4168#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004169 encode_loopfilter(cm, wb);
4170 encode_quantization(cm, wb);
4171 encode_segmentation(cm, xd, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004172 {
Thomas Davies28444be2017-10-13 18:12:25 +01004173 int delta_q_allowed = 1;
4174#if !CONFIG_EXT_DELTA_Q
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004175 int i;
4176 struct segmentation *const seg = &cm->seg;
4177 int segment_quantizer_active = 0;
4178 for (i = 0; i < MAX_SEGMENTS; i++) {
4179 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
4180 segment_quantizer_active = 1;
4181 }
4182 }
Thomas Davies28444be2017-10-13 18:12:25 +01004183 delta_q_allowed = !segment_quantizer_active;
4184#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004185
4186 if (cm->delta_q_present_flag)
Thomas Davies28444be2017-10-13 18:12:25 +01004187 assert(delta_q_allowed == 1 && cm->base_qindex > 0);
4188 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004189 aom_wb_write_bit(wb, cm->delta_q_present_flag);
4190 if (cm->delta_q_present_flag) {
4191 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
4192 xd->prev_qindex = cm->base_qindex;
4193#if CONFIG_EXT_DELTA_Q
Hui Su22a51d92018-01-16 13:02:18 -08004194#if CONFIG_INTRABC
4195 if (cm->allow_intrabc && NO_FILTER_FOR_IBC)
4196 assert(cm->delta_lf_present_flag == 0);
4197 else
4198#endif // CONFIG_INTRABC
4199 aom_wb_write_bit(wb, cm->delta_lf_present_flag);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004200 if (cm->delta_lf_present_flag) {
4201 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_lf_res) - 1, 2);
Yaowu Xub02d0b12017-12-15 01:32:34 +00004202 xd->prev_delta_lf_from_base = 0;
Cheng Chena97394f2017-09-27 15:05:14 -07004203#if CONFIG_LOOPFILTER_LEVEL
Yaowu Xub02d0b12017-12-15 01:32:34 +00004204 aom_wb_write_bit(wb, cm->delta_lf_multi);
Cheng Chena97394f2017-09-27 15:05:14 -07004205 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
4206 xd->prev_delta_lf[lf_id] = 0;
4207#endif // CONFIG_LOOPFILTER_LEVEL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004208 }
4209#endif // CONFIG_EXT_DELTA_Q
4210 }
4211 }
4212 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004213 if (!cm->all_lossless) {
4214 encode_cdef(cm, wb);
4215 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004216#if CONFIG_LOOP_RESTORATION
4217 encode_restoration_mode(cm, wb);
4218#endif // CONFIG_LOOP_RESTORATION
4219 write_tx_mode(cm, &cm->tx_mode, wb);
4220
4221 if (cpi->allow_comp_inter_inter) {
4222 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004223
4224 aom_wb_write_bit(wb, use_hybrid_pred);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004225 }
Zoe Liu4b847e12017-12-07 12:44:45 -08004226
4227#if CONFIG_EXT_SKIP
4228#if 0
4229 printf("\n[ENCODER] Frame=%d, is_skip_mode_allowed=%d, skip_mode_flag=%d\n\n",
4230 (int)cm->frame_offset, cm->is_skip_mode_allowed, cm->skip_mode_flag);
4231#endif // 0
4232 if (cm->is_skip_mode_allowed) aom_wb_write_bit(wb, cm->skip_mode_flag);
4233#endif // CONFIG_EXT_SKIP
4234
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004235 write_compound_tools(cm, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004236
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004237 aom_wb_write_bit(wb, cm->reduced_tx_set_used);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004238
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004239 if (!frame_is_intra_only(cm)) write_global_motion(cpi, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004240
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08004241#if CONFIG_FILM_GRAIN
4242 if (cm->film_grain_params_present && cm->show_frame) {
4243 int flip_back_update_parameters_flag = 0;
4244 if (cm->frame_type == KEY_FRAME &&
4245 cm->film_grain_params.update_parameters == 0) {
4246 cm->film_grain_params.update_parameters = 1;
4247 flip_back_update_parameters_flag = 1;
4248 }
4249 write_film_grain_params(cm, wb);
4250
4251 if (flip_back_update_parameters_flag)
4252 cm->film_grain_params.update_parameters = 0;
4253 }
4254#endif
4255
Cyril Concolato3b5afc12017-12-15 12:54:15 -08004256#if !CONFIG_TILE_INFO_FIRST
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004257 write_tile_info(cm, wb);
Jingning Handa11e692017-12-19 08:45:08 -08004258
4259#if CONFIG_EXT_TILE
4260 *saved_wb = *wb;
4261 // Write tile size magnitudes
Jingning Han2e7d66d2018-01-02 11:27:48 -08004262 if (cm->tile_rows * cm->tile_cols > 1 && cm->large_scale_tile) {
Jingning Handa11e692017-12-19 08:45:08 -08004263 // Note that the last item in the uncompressed header is the data
4264 // describing tile configuration.
4265 // Number of bytes in tile column size - 1
4266 aom_wb_write_literal(wb, 0, 2);
4267
4268 // Number of bytes in tile size - 1
4269 aom_wb_write_literal(wb, 0, 2);
4270 }
Cyril Concolato3b5afc12017-12-15 12:54:15 -08004271#endif
Jingning Handa11e692017-12-19 08:45:08 -08004272#endif // !CONFIG_TILE_INFO_FIRST
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004273}
4274#endif // CONFIG_OBU
4275
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004276#if !CONFIG_OBU || CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004277static int choose_size_bytes(uint32_t size, int spare_msbs) {
4278 // Choose the number of bytes required to represent size, without
4279 // using the 'spare_msbs' number of most significant bits.
4280
4281 // Make sure we will fit in 4 bytes to start with..
4282 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
4283
4284 // Normalise to 32 bits
4285 size <<= spare_msbs;
4286
4287 if (size >> 24 != 0)
4288 return 4;
4289 else if (size >> 16 != 0)
4290 return 3;
4291 else if (size >> 8 != 0)
4292 return 2;
4293 else
4294 return 1;
4295}
4296
4297static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) {
4298 switch (sz) {
4299 case 1: dst[0] = (uint8_t)(val & 0xff); break;
4300 case 2: mem_put_le16(dst, val); break;
4301 case 3: mem_put_le24(dst, val); break;
4302 case 4: mem_put_le32(dst, val); break;
James Zern06c372d2017-04-20 16:08:29 -07004303 default: assert(0 && "Invalid size"); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004304 }
4305}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004306
Yaowu Xuf883b422016-08-30 14:01:10 -07004307static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004308 const uint32_t data_size, const uint32_t max_tile_size,
4309 const uint32_t max_tile_col_size,
4310 int *const tile_size_bytes,
4311 int *const tile_col_size_bytes) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004312 // Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
4313 int tsb;
4314 int tcsb;
4315
Yaowu Xuc27fc142016-08-22 16:08:15 -07004316#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004317 if (cm->large_scale_tile) {
4318 // The top bit in the tile size field indicates tile copy mode, so we
4319 // have 1 less bit to code the tile size
4320 tsb = choose_size_bytes(max_tile_size, 1);
4321 tcsb = choose_size_bytes(max_tile_col_size, 0);
4322 } else {
4323#endif // CONFIG_EXT_TILE
4324 tsb = choose_size_bytes(max_tile_size, 0);
4325 tcsb = 4; // This is ignored
4326 (void)max_tile_col_size;
4327#if CONFIG_EXT_TILE
4328 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004329#endif // CONFIG_EXT_TILE
4330
4331 assert(tsb > 0);
4332 assert(tcsb > 0);
4333
4334 *tile_size_bytes = tsb;
4335 *tile_col_size_bytes = tcsb;
4336
4337 if (tsb == 4 && tcsb == 4) {
4338 return data_size;
4339 } else {
4340 uint32_t wpos = 0;
4341 uint32_t rpos = 0;
4342
4343#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004344 if (cm->large_scale_tile) {
4345 int tile_row;
4346 int tile_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004347
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004348 for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
4349 // All but the last column has a column header
4350 if (tile_col < cm->tile_cols - 1) {
4351 uint32_t tile_col_size = mem_get_le32(dst + rpos);
4352 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004353
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004354 // Adjust the tile column size by the number of bytes removed
4355 // from the tile size fields.
4356 tile_col_size -= (4 - tsb) * cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004357
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004358 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
4359 wpos += tcsb;
4360 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004361
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004362 for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
4363 // All, including the last row has a header
4364 uint32_t tile_header = mem_get_le32(dst + rpos);
4365 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004366
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004367 // If this is a copy tile, we need to shift the MSB to the
4368 // top bit of the new width, and there is no data to copy.
4369 if (tile_header >> 31 != 0) {
4370 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
4371 mem_put_varsize(dst + wpos, tsb, tile_header);
4372 wpos += tsb;
4373 } else {
4374 mem_put_varsize(dst + wpos, tsb, tile_header);
4375 wpos += tsb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004376
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004377 memmove(dst + wpos, dst + rpos, tile_header);
4378 rpos += tile_header;
4379 wpos += tile_header;
4380 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004381 }
4382 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004383 } else {
4384#endif // CONFIG_EXT_TILE
4385 const int n_tiles = cm->tile_cols * cm->tile_rows;
4386 int n;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004387
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004388 for (n = 0; n < n_tiles; n++) {
4389 int tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004390
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004391 if (n == n_tiles - 1) {
4392 tile_size = data_size - rpos;
4393 } else {
4394 tile_size = mem_get_le32(dst + rpos);
4395 rpos += 4;
4396 mem_put_varsize(dst + wpos, tsb, tile_size);
4397 wpos += tsb;
4398 }
4399
4400 memmove(dst + wpos, dst + rpos, tile_size);
4401
4402 rpos += tile_size;
4403 wpos += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004404 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004405#if CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004406 }
4407#endif // CONFIG_EXT_TILE
4408
4409 assert(rpos > wpos);
4410 assert(rpos == data_size);
4411
4412 return wpos;
4413 }
4414}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004415#endif
4416
4417#if CONFIG_OBU
Soo-Chul Han38427e82017-09-27 15:06:13 -04004418
4419uint32_t write_obu_header(OBU_TYPE obu_type, int obu_extension,
4420 uint8_t *const dst) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004421 struct aom_write_bit_buffer wb = { dst, 0 };
4422 uint32_t size = 0;
4423
Soo-Chul Han38427e82017-09-27 15:06:13 -04004424 // first bit is obu_forbidden_bit according to R19
4425 aom_wb_write_literal(&wb, 0, 1);
4426 aom_wb_write_literal(&wb, (int)obu_type, 4);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004427 aom_wb_write_literal(&wb, 0, 2);
4428 aom_wb_write_literal(&wb, obu_extension ? 1 : 0, 1);
4429 if (obu_extension) {
4430 aom_wb_write_literal(&wb, obu_extension & 0xFF, 8);
4431 }
4432
4433 size = aom_wb_bytes_written(&wb);
4434 return size;
4435}
4436
Tom Finegan41150ad2018-01-23 11:42:55 -08004437#if CONFIG_OBU_SIZING
4438int write_uleb_obu_size(uint32_t obu_size, uint8_t *dest) {
4439 size_t coded_obu_size = 0;
4440
Tom Fineganf4129062018-02-08 08:32:42 -08004441 if (aom_uleb_encode(obu_size, sizeof(obu_size), dest, &coded_obu_size) != 0)
Tom Finegan41150ad2018-01-23 11:42:55 -08004442 return AOM_CODEC_ERROR;
Tom Finegan41150ad2018-01-23 11:42:55 -08004443
4444 return AOM_CODEC_OK;
4445}
4446#endif // CONFIG_OBU_SIZING
4447
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004448static uint32_t write_sequence_header_obu(AV1_COMP *cpi, uint8_t *const dst
4449#if CONFIG_SCALABILITY
4450 ,
4451 uint8_t enhancement_layers_cnt) {
4452#else
Johannb0ef6ff2018-02-08 14:32:21 -08004453) {
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004454#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004455 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004456 struct aom_write_bit_buffer wb = { dst, 0 };
4457 uint32_t size = 0;
4458
4459 write_profile(cm->profile, &wb);
4460
4461 aom_wb_write_literal(&wb, 0, 4);
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004462#if CONFIG_SCALABILITY
4463 aom_wb_write_literal(&wb, enhancement_layers_cnt, 2);
4464 int i;
4465 for (i = 1; i <= enhancement_layers_cnt; i++) {
4466 aom_wb_write_literal(&wb, 0, 4);
4467 }
4468#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004469
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01004470 write_sequence_header(cpi, &wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004471
4472 // color_config
4473 write_bitdepth_colorspace_sampling(cm, &wb);
4474
Andrey Norkin28e9ce22018-01-08 10:11:21 -08004475#if CONFIG_TIMING_INFO_IN_SEQ_HEADERS
4476 // timing_info
4477 write_timing_info_header(cm, &wb);
4478#endif
4479
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08004480#if CONFIG_FILM_GRAIN
4481 aom_wb_write_bit(&wb, cm->film_grain_params_present);
4482#endif
4483
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004484 size = aom_wb_bytes_written(&wb);
4485 return size;
4486}
4487
Jingning Handa11e692017-12-19 08:45:08 -08004488static uint32_t write_frame_header_obu(AV1_COMP *cpi,
4489#if CONFIG_EXT_TILE
4490 struct aom_write_bit_buffer *saved_wb,
4491#endif
4492 uint8_t *const dst) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004493 AV1_COMMON *const cm = &cpi->common;
4494 struct aom_write_bit_buffer wb = { dst, 0 };
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004495 uint32_t total_size = 0;
Yunqing Wange7142e12018-01-17 11:20:12 -08004496 uint32_t uncompressed_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004497
Jingning Handa11e692017-12-19 08:45:08 -08004498 write_uncompressed_header_obu(cpi,
4499#if CONFIG_EXT_TILE
4500 saved_wb,
4501#endif
4502 &wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004503
4504 if (cm->show_existing_frame) {
4505 total_size = aom_wb_bytes_written(&wb);
4506 return total_size;
4507 }
4508
Cyril Concolato3b5afc12017-12-15 12:54:15 -08004509#if !CONFIG_TILE_INFO_FIRST
Jingning Handa11e692017-12-19 08:45:08 -08004510// write the tile length code (Always 4 bytes for now)
4511#if CONFIG_EXT_TILE
4512 if (!cm->large_scale_tile)
4513#endif
4514 aom_wb_write_literal(&wb, 3, 2);
Cyril Concolato3b5afc12017-12-15 12:54:15 -08004515#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004516
Yunqing Wange7142e12018-01-17 11:20:12 -08004517 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
4518 total_size = uncompressed_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004519 return total_size;
4520}
4521
4522static uint32_t write_tile_group_header(uint8_t *const dst, int startTile,
4523 int endTile, int tiles_log2) {
4524 struct aom_write_bit_buffer wb = { dst, 0 };
4525 uint32_t size = 0;
4526
4527 aom_wb_write_literal(&wb, startTile, tiles_log2);
4528 aom_wb_write_literal(&wb, endTile, tiles_log2);
4529
4530 size = aom_wb_bytes_written(&wb);
4531 return size;
4532}
4533
4534static uint32_t write_tiles_in_tg_obus(AV1_COMP *const cpi, uint8_t *const dst,
4535 unsigned int *max_tile_size,
4536 unsigned int *max_tile_col_size,
Jingning Handa11e692017-12-19 08:45:08 -08004537#if CONFIG_EXT_TILE
4538 struct aom_write_bit_buffer *saved_wb,
4539#endif
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004540 uint8_t obu_extension_header) {
Thomas Davies4822e142017-10-10 11:30:36 +01004541 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004542 aom_writer mode_bc;
4543 int tile_row, tile_col;
4544 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
4545 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
4546 uint32_t total_size = 0;
4547 const int tile_cols = cm->tile_cols;
4548 const int tile_rows = cm->tile_rows;
4549 unsigned int tile_size = 0;
4550 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
4551 // Fixed size tile groups for the moment
4552 const int num_tg_hdrs = cm->num_tg;
4553 const int tg_size =
4554#if CONFIG_EXT_TILE
4555 (cm->large_scale_tile)
4556 ? 1
4557 :
4558#endif // CONFIG_EXT_TILE
4559 (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
4560 int tile_count = 0;
4561 int curr_tg_data_size = 0;
4562 uint8_t *data = dst;
4563 int new_tg = 1;
4564#if CONFIG_EXT_TILE
4565 const int have_tiles = tile_cols * tile_rows > 1;
4566#endif
4567
Thomas Davies4822e142017-10-10 11:30:36 +01004568 cm->largest_tile_id = 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004569 *max_tile_size = 0;
4570 *max_tile_col_size = 0;
4571
4572#if CONFIG_EXT_TILE
4573 if (cm->large_scale_tile) {
Jingning Handa11e692017-12-19 08:45:08 -08004574 uint32_t tg_hdr_size =
4575 write_obu_header(OBU_TILE_GROUP, 0, data + PRE_OBU_SIZE_BYTES);
Jingning Han5737b4c2018-01-02 10:27:58 -08004576 tg_hdr_size += PRE_OBU_SIZE_BYTES;
Jingning Handa11e692017-12-19 08:45:08 -08004577 data += tg_hdr_size;
4578
4579 int tile_size_bytes;
4580 int tile_col_size_bytes;
4581
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004582 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
4583 TileInfo tile_info;
4584 const int is_last_col = (tile_col == tile_cols - 1);
4585 const uint32_t col_offset = total_size;
4586
4587 av1_tile_set_col(&tile_info, cm, tile_col);
4588
4589 // The last column does not have a column header
4590 if (!is_last_col) total_size += 4;
4591
4592 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
4593 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
4594 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
4595 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
4596 const int data_offset = have_tiles ? 4 : 0;
4597 const int tile_idx = tile_row * tile_cols + tile_col;
4598 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
4599 av1_tile_set_row(&tile_info, cm, tile_row);
4600
Jingning Handa11e692017-12-19 08:45:08 -08004601 buf->data = dst + total_size + tg_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004602
4603 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
4604 // even for the last one, unless no tiling is used at all.
4605 total_size += data_offset;
4606 // Initialise tile context from the frame context
4607 this_tile->tctx = *cm->fc;
4608 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07004609 mode_bc.allow_update_cdf = !cm->large_scale_tile;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004610 aom_start_encode(&mode_bc, buf->data + data_offset);
4611 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
4612 assert(tok == tok_end);
4613 aom_stop_encode(&mode_bc);
4614 tile_size = mode_bc.pos;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004615 buf->size = tile_size;
4616
4617 // Record the maximum tile size we see, so we can compact headers later.
Thomas Davies4822e142017-10-10 11:30:36 +01004618 if (tile_size > *max_tile_size) {
4619 *max_tile_size = tile_size;
Thomas Davies4822e142017-10-10 11:30:36 +01004620 cm->largest_tile_id = tile_cols * tile_row + tile_col;
Thomas Davies4822e142017-10-10 11:30:36 +01004621 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004622
4623 if (have_tiles) {
4624 // tile header: size of this tile, or copy offset
4625 uint32_t tile_header = tile_size;
4626 const int tile_copy_mode =
4627 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256)
4628 ? 1
4629 : 0;
4630
4631 // If tile_copy_mode = 1, check if this tile is a copy tile.
4632 // Very low chances to have copy tiles on the key frames, so don't
4633 // search on key frames to reduce unnecessary search.
4634 if (cm->frame_type != KEY_FRAME && tile_copy_mode) {
4635 const int idendical_tile_offset =
4636 find_identical_tile(tile_row, tile_col, tile_buffers);
4637
4638 if (idendical_tile_offset > 0) {
4639 tile_size = 0;
4640 tile_header = idendical_tile_offset | 0x80;
4641 tile_header <<= 24;
4642 }
4643 }
4644
4645 mem_put_le32(buf->data, tile_header);
4646 }
4647
4648 total_size += tile_size;
4649 }
4650
4651 if (!is_last_col) {
4652 uint32_t col_size = total_size - col_offset - 4;
Jingning Handa11e692017-12-19 08:45:08 -08004653 mem_put_le32(dst + col_offset + tg_hdr_size, col_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004654
4655 // If it is not final packing, record the maximum tile column size we
4656 // see, otherwise, check if the tile size is out of the range.
4657 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
4658 }
4659 }
Jingning Handa11e692017-12-19 08:45:08 -08004660
4661 if (have_tiles) {
4662 total_size =
4663 remux_tiles(cm, data, total_size, *max_tile_size, *max_tile_col_size,
4664 &tile_size_bytes, &tile_col_size_bytes);
4665 }
4666
4667 // Now fill in the gaps in the uncompressed header.
4668 if (have_tiles) {
4669 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
4670 aom_wb_write_literal(saved_wb, tile_col_size_bytes - 1, 2);
4671
4672 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
4673 aom_wb_write_literal(saved_wb, tile_size_bytes - 1, 2);
4674 }
Jingning Handa11e692017-12-19 08:45:08 -08004675 total_size += tg_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004676 } else {
4677#endif // CONFIG_EXT_TILE
4678
4679 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
4680 TileInfo tile_info;
4681 const int is_last_row = (tile_row == tile_rows - 1);
4682 av1_tile_set_row(&tile_info, cm, tile_row);
4683
4684 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
4685 const int tile_idx = tile_row * tile_cols + tile_col;
4686 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
4687 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
4688 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
4689 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
4690 const int is_last_col = (tile_col == tile_cols - 1);
4691 const int is_last_tile = is_last_col && is_last_row;
4692 int is_last_tile_in_tg = 0;
4693
4694 if (new_tg) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004695 data = dst + total_size;
4696 // A new tile group begins at this tile. Write the obu header and
4697 // tile group header
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004698 curr_tg_data_size = write_obu_header(
4699 OBU_TILE_GROUP, obu_extension_header, data + PRE_OBU_SIZE_BYTES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004700 if (n_log2_tiles)
4701 curr_tg_data_size += write_tile_group_header(
Soo-Chul Han38427e82017-09-27 15:06:13 -04004702 data + curr_tg_data_size + PRE_OBU_SIZE_BYTES, tile_idx,
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004703 AOMMIN(tile_idx + tg_size - 1, tile_cols * tile_rows - 1),
4704 n_log2_tiles);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004705 total_size += curr_tg_data_size + PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004706 new_tg = 0;
4707 tile_count = 0;
4708 }
4709 tile_count++;
4710 av1_tile_set_col(&tile_info, cm, tile_col);
4711
4712 if (tile_count == tg_size || tile_idx == (tile_cols * tile_rows - 1)) {
4713 is_last_tile_in_tg = 1;
4714 new_tg = 1;
4715 } else {
4716 is_last_tile_in_tg = 0;
4717 }
4718
4719#if CONFIG_DEPENDENT_HORZTILES
4720 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
4721#endif
4722 buf->data = dst + total_size;
4723
Tom Fineganff863952017-12-22 11:41:14 -08004724 // The last tile of the tile group does not have a header.
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004725 if (!is_last_tile_in_tg) total_size += 4;
4726
4727 // Initialise tile context from the frame context
4728 this_tile->tctx = *cm->fc;
4729 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07004730 mode_bc.allow_update_cdf = 1;
Soo-Chul Han13f0d9c2017-10-22 21:55:52 -04004731#if CONFIG_LOOP_RESTORATION
Debargha Mukherjeea78c8f52018-01-31 11:14:38 -08004732 const int num_planes = av1_num_planes(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004733 av1_reset_loop_restoration(&cpi->td.mb.e_mbd, num_planes);
Soo-Chul Han13f0d9c2017-10-22 21:55:52 -04004734#endif // CONFIG_LOOP_RESTORATION
4735
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004736 aom_start_encode(&mode_bc, dst + total_size);
4737 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004738 aom_stop_encode(&mode_bc);
4739 tile_size = mode_bc.pos;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004740 assert(tile_size > 0);
4741
4742 curr_tg_data_size += (tile_size + (is_last_tile_in_tg ? 0 : 4));
4743 buf->size = tile_size;
Thomas Davies4822e142017-10-10 11:30:36 +01004744 if (tile_size > *max_tile_size) {
4745 cm->largest_tile_id = tile_cols * tile_row + tile_col;
4746 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004747 if (!is_last_tile) {
4748 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
4749 }
Thomas Davies4822e142017-10-10 11:30:36 +01004750
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004751 if (!is_last_tile_in_tg) {
4752 // size of this tile
4753 mem_put_le32(buf->data, tile_size);
4754 } else {
Tom Finegan41150ad2018-01-23 11:42:55 -08004755// write current tile group size
4756#if CONFIG_OBU_SIZING
Tom Fineganf2d40f62018-02-01 11:52:49 -08004757 const size_t length_field_size =
4758 aom_uleb_size_in_bytes(curr_tg_data_size);
4759 memmove(data + length_field_size, data, curr_tg_data_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004760 if (write_uleb_obu_size(curr_tg_data_size, data) != AOM_CODEC_OK)
4761 assert(0);
Tom Fineganf2d40f62018-02-01 11:52:49 -08004762 curr_tg_data_size += length_field_size;
4763 total_size += length_field_size;
Tom Finegan41150ad2018-01-23 11:42:55 -08004764#else
4765 mem_put_le32(data, curr_tg_data_size);
4766#endif // CONFIG_OBU_SIZING
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004767 }
4768
4769 total_size += tile_size;
4770 }
4771 }
4772#if CONFIG_EXT_TILE
4773 }
4774#endif // CONFIG_EXT_TILE
4775 return (uint32_t)total_size;
4776}
4777
Tom Finegan41150ad2018-01-23 11:42:55 -08004778#endif // CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07004779
Tom Finegane4099e32018-01-23 12:01:51 -08004780int av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004781 uint8_t *data = dst;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004782 uint32_t data_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004783 unsigned int max_tile_size;
4784 unsigned int max_tile_col_size;
Tom Finegan8695e982018-01-23 21:59:24 +00004785#if CONFIG_OBU
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004786 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004787 uint32_t obu_size;
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004788#if CONFIG_SCALABILITY
4789 const uint8_t enhancement_layers_cnt = cm->enhancement_layers_cnt;
4790 const uint8_t obu_extension_header =
4791 cm->temporal_layer_id << 5 | cm->enhancement_layer_id << 3 | 0;
4792#else
4793 uint8_t obu_extension_header = 0;
4794#endif // CONFIG_SCALABILITY
Tom Finegan41150ad2018-01-23 11:42:55 -08004795#endif // CONFIG_OBU
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004796
Angie Chiangb11aedf2017-03-10 17:31:46 -08004797#if CONFIG_BITSTREAM_DEBUG
4798 bitstream_queue_reset_write();
4799#endif
4800
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004801#if CONFIG_OBU
Soo-Chul Han38427e82017-09-27 15:06:13 -04004802 // The TD is now written outside the frame encode loop
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004803
4804 // write sequence header obu if KEY_FRAME, preceded by 4-byte size
4805 if (cm->frame_type == KEY_FRAME) {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004806 obu_size =
4807 write_obu_header(OBU_SEQUENCE_HEADER, 0, data + PRE_OBU_SIZE_BYTES);
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004808
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004809#if CONFIG_SCALABILITY
Tom Fineganf2d40f62018-02-01 11:52:49 -08004810 obu_size += write_sequence_header_obu(
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004811 cpi, data + PRE_OBU_SIZE_BYTES + obu_size, enhancement_layers_cnt);
4812#else
Tom Fineganf2d40f62018-02-01 11:52:49 -08004813 obu_size +=
4814 write_sequence_header_obu(cpi, data + PRE_OBU_SIZE_BYTES + obu_size);
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004815#endif // CONFIG_SCALABILITY
Tom Finegan41150ad2018-01-23 11:42:55 -08004816
4817#if CONFIG_OBU_SIZING
Tom Fineganf2d40f62018-02-01 11:52:49 -08004818 const size_t length_field_size = aom_uleb_size_in_bytes(obu_size);
4819 memmove(data + length_field_size, data, obu_size);
4820
Tom Finegan41150ad2018-01-23 11:42:55 -08004821 if (write_uleb_obu_size(obu_size, data) != AOM_CODEC_OK)
4822 return AOM_CODEC_ERROR;
4823#else
Tom Fineganf2d40f62018-02-01 11:52:49 -08004824 const size_t length_field_size = PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004825 mem_put_le32(data, obu_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004826#endif // CONFIG_OBU_SIZING
4827
Tom Fineganf2d40f62018-02-01 11:52:49 -08004828 data += obu_size + length_field_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004829 }
4830
Jingning Handa11e692017-12-19 08:45:08 -08004831#if CONFIG_EXT_TILE
4832 struct aom_write_bit_buffer saved_wb;
4833#endif
4834
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004835 // write frame header obu, preceded by 4-byte size
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004836 obu_size = write_obu_header(OBU_FRAME_HEADER, obu_extension_header,
Tom Fineganf2d40f62018-02-01 11:52:49 -08004837 data + PRE_OBU_SIZE_BYTES);
4838 obu_size += write_frame_header_obu(cpi,
Jingning Handa11e692017-12-19 08:45:08 -08004839#if CONFIG_EXT_TILE
Tom Fineganf2d40f62018-02-01 11:52:49 -08004840 &saved_wb,
Jingning Handa11e692017-12-19 08:45:08 -08004841#endif
Tom Fineganf2d40f62018-02-01 11:52:49 -08004842 data + PRE_OBU_SIZE_BYTES + obu_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004843
4844#if CONFIG_OBU_SIZING
Tom Fineganf2d40f62018-02-01 11:52:49 -08004845 const size_t length_field_size = aom_uleb_size_in_bytes(obu_size);
4846 memmove(data + length_field_size, data, obu_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004847 if (write_uleb_obu_size(obu_size, data) != AOM_CODEC_OK)
4848 return AOM_CODEC_ERROR;
4849#else
Tom Fineganf2d40f62018-02-01 11:52:49 -08004850 const size_t length_field_size = PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004851 mem_put_le32(data, obu_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004852#endif // CONFIG_OBU_SIZING
4853
Tom Fineganf2d40f62018-02-01 11:52:49 -08004854 data += obu_size + length_field_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004855
4856 if (cm->show_existing_frame) {
4857 data_size = 0;
4858 } else {
4859 // Each tile group obu will be preceded by 4-byte size of the tile group
4860 // obu
Tom Fineganf2d40f62018-02-01 11:52:49 -08004861 data_size =
4862 write_tiles_in_tg_obus(cpi, data, &max_tile_size, &max_tile_col_size,
Jingning Handa11e692017-12-19 08:45:08 -08004863#if CONFIG_EXT_TILE
Tom Fineganf2d40f62018-02-01 11:52:49 -08004864 &saved_wb,
Jingning Handa11e692017-12-19 08:45:08 -08004865#endif
Tom Fineganf2d40f62018-02-01 11:52:49 -08004866 obu_extension_header);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004867 }
4868
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004869#endif // CONFIG_OBU
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004870
Jingning Handa11e692017-12-19 08:45:08 -08004871#if CONFIG_EXT_TILE && !CONFIG_OBU
Jingning Handa11e692017-12-19 08:45:08 -08004872 uint32_t uncompressed_hdr_size;
4873 struct aom_write_bit_buffer saved_wb;
4874 struct aom_write_bit_buffer wb = { data, 0 };
4875 const int have_tiles = cm->tile_cols * cm->tile_rows > 1;
4876 int tile_size_bytes;
4877 int tile_col_size_bytes;
4878
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004879 if (cm->large_scale_tile) {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004880#if !CONFIG_OBU
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004881 write_uncompressed_header_frame(cpi, &wb);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004882#else
4883 write_uncompressed_header_obu(cpi, &wb);
4884#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004885
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004886 if (cm->show_existing_frame) {
4887 *size = aom_wb_bytes_written(&wb);
Tom Fineganf2d40f62018-02-01 11:52:49 -08004888 return AOM_CODEC_OK;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004889 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004890
4891 // We do not know these in advance. Output placeholder bit.
4892 saved_wb = wb;
4893 // Write tile size magnitudes
4894 if (have_tiles) {
4895 // Note that the last item in the uncompressed header is the data
4896 // describing tile configuration.
4897 // Number of bytes in tile column size - 1
4898 aom_wb_write_literal(&wb, 0, 2);
4899
4900 // Number of bytes in tile size - 1
4901 aom_wb_write_literal(&wb, 0, 2);
4902 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004903
Yunqing Wange7142e12018-01-17 11:20:12 -08004904 uncompressed_hdr_size = (uint32_t)aom_wb_bytes_written(&wb);
4905 aom_clear_system_state();
4906 data += uncompressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004907
Yunqing Wangb041d8a2017-11-15 12:31:18 -08004908#define EXT_TILE_DEBUG 0
4909#if EXT_TILE_DEBUG
4910 {
4911 char fn[20] = "./fh";
4912 fn[4] = cm->current_video_frame / 100 + '0';
4913 fn[5] = (cm->current_video_frame % 100) / 10 + '0';
4914 fn[6] = (cm->current_video_frame % 10) + '0';
4915 fn[7] = '\0';
Yunqing Wange7142e12018-01-17 11:20:12 -08004916 av1_print_uncompressed_frame_header(data - uncompressed_hdr_size,
4917 uncompressed_hdr_size, fn);
Yunqing Wangb041d8a2017-11-15 12:31:18 -08004918 }
4919#endif // EXT_TILE_DEBUG
4920#undef EXT_TILE_DEBUG
4921
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004922 // Write the encoded tile data
4923 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
4924 } else {
4925#endif // CONFIG_EXT_TILE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004926#if !CONFIG_OBU
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004927 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004928#endif
Jingning Handa11e692017-12-19 08:45:08 -08004929#if CONFIG_EXT_TILE && !CONFIG_OBU
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004930 }
4931#endif // CONFIG_EXT_TILE
Jingning Handa11e692017-12-19 08:45:08 -08004932#if CONFIG_EXT_TILE && !CONFIG_OBU
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004933 if (cm->large_scale_tile) {
4934 if (have_tiles) {
4935 data_size =
4936 remux_tiles(cm, data, data_size, max_tile_size, max_tile_col_size,
4937 &tile_size_bytes, &tile_col_size_bytes);
4938 }
4939
4940 data += data_size;
4941
4942 // Now fill in the gaps in the uncompressed header.
4943 if (have_tiles) {
4944 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
4945 aom_wb_write_literal(&saved_wb, tile_col_size_bytes - 1, 2);
4946
4947 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
4948 aom_wb_write_literal(&saved_wb, tile_size_bytes - 1, 2);
4949 }
Tom Finegane4099e32018-01-23 12:01:51 -08004950
4951 if (compressed_hdr_size > 0xffff) return AOM_CODEC_ERROR;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004952 } else {
4953#endif // CONFIG_EXT_TILE
4954 data += data_size;
Jingning Handa11e692017-12-19 08:45:08 -08004955#if CONFIG_EXT_TILE && !CONFIG_OBU
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004956 }
4957#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004958 *size = data - dst;
Tom Finegane4099e32018-01-23 12:01:51 -08004959 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004960}