blob: d914fc3a04f2f922bf06ab0869728cf1229d1f43 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070017#include "aom_dsp/aom_dsp_common.h"
Debargha Mukherjee47748b52017-03-24 12:20:49 -070018#include "aom_dsp/binary_codes_writer.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070019#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070020#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070021#include "aom_ports/mem_ops.h"
22#include "aom_ports/system_state.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070023#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070025#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070026
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010027#include "av1/common/cdef.h"
Luc Trudeaud183b642017-11-28 11:42:37 -050028#if CONFIG_CFL
29#include "av1/common/cfl.h"
30#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070031#include "av1/common/entropy.h"
32#include "av1/common/entropymode.h"
33#include "av1/common/entropymv.h"
34#include "av1/common/mvref_common.h"
Thomas Daviesf6936102016-09-05 16:51:31 +010035#include "av1/common/odintrin.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070036#include "av1/common/pred_common.h"
37#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080038#if CONFIG_EXT_INTRA
39#include "av1/common/reconintra.h"
40#endif // CONFIG_EXT_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -070041#include "av1/common/seg_common.h"
42#include "av1/common/tile_common.h"
43
Angie Chiangc8af6112017-03-16 16:11:22 -070044#if CONFIG_LV_MAP
45#include "av1/encoder/encodetxb.h"
46#endif // CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#include "av1/encoder/bitstream.h"
48#include "av1/encoder/cost.h"
49#include "av1/encoder/encodemv.h"
50#include "av1/encoder/mcomp.h"
Urvang Joshic6300aa2017-06-01 14:46:23 -070051#if CONFIG_PALETTE_DELTA_ENCODING
hui sud13c24a2017-04-07 16:13:07 -070052#include "av1/encoder/palette.h"
Urvang Joshic6300aa2017-06-01 14:46:23 -070053#endif // CONFIG_PALETTE_DELTA_ENCODING
Yaowu Xuc27fc142016-08-22 16:08:15 -070054#include "av1/encoder/segmentation.h"
55#include "av1/encoder/subexp.h"
56#include "av1/encoder/tokenize.h"
57
Di Chen56586622017-06-09 13:49:44 -070058#define ENC_MISMATCH_DEBUG 0
Zoe Liu85b66462017-04-20 14:28:19 -070059
Yaowu Xuf883b422016-08-30 14:01:10 -070060static INLINE void write_uniform(aom_writer *w, int n, int v) {
hui su37499292017-04-26 09:49:53 -070061 const int l = get_unsigned_bits(n);
62 const int m = (1 << l) - n;
Yaowu Xuc27fc142016-08-22 16:08:15 -070063 if (l == 0) return;
64 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -070065 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070066 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -070067 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
68 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070069 }
70}
71
Yaowu Xuf883b422016-08-30 14:01:10 -070072static struct av1_token interintra_mode_encodings[INTERINTRA_MODES];
Cheng Chen2ef24ea2017-11-29 12:22:24 -080073#if CONFIG_JNT_COMP
74static struct av1_token compound_type_encodings[COMPOUND_TYPES - 1];
75#else
Sarah Parker6fddd182016-11-10 20:57:20 -080076static struct av1_token compound_type_encodings[COMPOUND_TYPES];
Cheng Chen2ef24ea2017-11-29 12:22:24 -080077#endif // CONFIG_JNT_COMP
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070078#if CONFIG_LOOP_RESTORATION
Rupert Swarbrick6c545212017-09-01 17:17:25 +010079static void loop_restoration_write_sb_coeffs(const AV1_COMMON *const cm,
80 MACROBLOCKD *xd,
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +010081 const RestorationUnitInfo *rui,
82 aom_writer *const w, int plane);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070083#endif // CONFIG_LOOP_RESTORATION
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040084#if CONFIG_OBU
85static void write_uncompressed_header_obu(AV1_COMP *cpi,
86 struct aom_write_bit_buffer *wb);
87#else
88static void write_uncompressed_header_frame(AV1_COMP *cpi,
89 struct aom_write_bit_buffer *wb);
90#endif
91
Thomas Davies80188d12016-10-26 16:08:35 -070092static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data);
Debargha Mukherjee2eada612017-09-22 15:37:39 -070093
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040094#if !CONFIG_OBU || CONFIG_EXT_TILE
Thomas Daviesdbfc4f92017-01-18 16:46:09 +000095static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
96 const uint32_t data_size, const uint32_t max_tile_size,
97 const uint32_t max_tile_col_size,
98 int *const tile_size_bytes,
99 int *const tile_col_size_bytes);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -0400100#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700101void av1_encode_token_init(void) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700102 av1_tokens_from_tree(interintra_mode_encodings, av1_interintra_mode_tree);
Sarah Parker6fddd182016-11-10 20:57:20 -0800103 av1_tokens_from_tree(compound_type_encodings, av1_compound_type_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700104}
105
Jingning Hanf04254f2017-03-08 10:51:35 -0800106static void write_intra_mode_kf(const AV1_COMMON *cm, FRAME_CONTEXT *frame_ctx,
107 const MODE_INFO *mi, const MODE_INFO *above_mi,
108 const MODE_INFO *left_mi, int block,
109 PREDICTION_MODE mode, aom_writer *w) {
Alex Converse28744302017-04-13 14:46:22 -0700110#if CONFIG_INTRABC
111 assert(!is_intrabc_block(&mi->mbmi));
112#endif // CONFIG_INTRABC
Hui Su814f41e2017-10-02 12:21:24 -0700113 aom_write_symbol(w, mode,
Jingning Hanf04254f2017-03-08 10:51:35 -0800114 get_y_mode_cdf(frame_ctx, mi, above_mi, left_mi, block),
115 INTRA_MODES);
116 (void)cm;
Jingning Hanf04254f2017-03-08 10:51:35 -0800117}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700118
Thomas Davies1de6c882017-01-11 17:47:49 +0000119static void write_inter_mode(aom_writer *w, PREDICTION_MODE mode,
Zoe Liu7f24e1b2017-03-17 17:42:05 -0700120 FRAME_CONTEXT *ec_ctx, const int16_t mode_ctx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700121 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700122
Thomas Davies149eda52017-06-12 18:11:55 +0100123 aom_write_symbol(w, mode != NEWMV, ec_ctx->newmv_cdf[newmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700124
Jingning Hanf2b87bd2017-05-18 16:27:30 -0700125 if (mode != NEWMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700126 if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700127 assert(mode == GLOBALMV);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700128 return;
129 }
130
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700131 const int16_t zeromv_ctx =
132 (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700133 aom_write_symbol(w, mode != GLOBALMV, ec_ctx->zeromv_cdf[zeromv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700134
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700135 if (mode != GLOBALMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700136 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700137
138 if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6;
139 if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7;
140 if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8;
Thomas Davies149eda52017-06-12 18:11:55 +0100141 aom_write_symbol(w, mode != NEARESTMV, ec_ctx->refmv_cdf[refmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700142 }
143 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700144}
145
Thomas Davies149eda52017-06-12 18:11:55 +0100146static void write_drl_idx(FRAME_CONTEXT *ec_ctx, const MB_MODE_INFO *mbmi,
Yaowu Xuf883b422016-08-30 14:01:10 -0700147 const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) {
148 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700149
150 assert(mbmi->ref_mv_idx < 3);
151
Sebastien Alaiwan34d55662017-11-15 09:36:03 +0100152 const int new_mv = mbmi->mode == NEWMV || mbmi->mode == NEW_NEWMV;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000153 if (new_mv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700154 int idx;
155 for (idx = 0; idx < 2; ++idx) {
Jingning Hanb56b71a2017-12-04 09:14:13 -0800156 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1 ||
157 CONFIG_OPT_REF_MV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700158 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700159 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700160
Thomas Davies149eda52017-06-12 18:11:55 +0100161 aom_write_symbol(w, mbmi->ref_mv_idx != idx, ec_ctx->drl_cdf[drl_ctx],
162 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700163 if (mbmi->ref_mv_idx == idx) return;
164 }
165 }
166 return;
167 }
168
David Barker3dfba992017-04-03 16:10:09 +0100169 if (have_nearmv_in_inter_mode(mbmi->mode)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700170 int idx;
171 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
172 for (idx = 1; idx < 3; ++idx) {
Jingning Hanb56b71a2017-12-04 09:14:13 -0800173 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1 ||
174 CONFIG_OPT_REF_MV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700175 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700176 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
Thomas Davies149eda52017-06-12 18:11:55 +0100177 aom_write_symbol(w, mbmi->ref_mv_idx != (idx - 1),
178 ec_ctx->drl_cdf[drl_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700179 if (mbmi->ref_mv_idx == (idx - 1)) return;
180 }
181 }
182 return;
183 }
184}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700185
Thomas Davies8c08a332017-06-26 17:30:34 +0100186static void write_inter_compound_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
187 aom_writer *w, PREDICTION_MODE mode,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700188 const int16_t mode_ctx) {
Thomas Davies8c08a332017-06-26 17:30:34 +0100189 assert(is_inter_compound_mode(mode));
Thomas Davies8c08a332017-06-26 17:30:34 +0100190 (void)cm;
191 aom_write_symbol(w, INTER_COMPOUND_OFFSET(mode),
192 xd->tile_ctx->inter_compound_mode_cdf[mode_ctx],
193 INTER_COMPOUND_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700194}
Zoe Liu85b66462017-04-20 14:28:19 -0700195
Yaowu Xuf883b422016-08-30 14:01:10 -0700196static void encode_unsigned_max(struct aom_write_bit_buffer *wb, int data,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700197 int max) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700198 aom_wb_write_literal(wb, data, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700199}
200
Thomas Davies985bfc32017-06-27 16:51:26 +0100201static void write_tx_size_vartx(const AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700202 const MB_MODE_INFO *mbmi, TX_SIZE tx_size,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700203 int depth, int blk_row, int blk_col,
204 aom_writer *w) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100205 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
206 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207 const int tx_row = blk_row >> 1;
208 const int tx_col = blk_col >> 1;
Jingning Hanf65b8702016-10-31 12:13:20 -0700209 const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0);
210 const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0);
211
Jingning Han331662e2017-05-30 17:03:32 -0700212 int ctx = txfm_partition_context(xd->above_txfm_context + blk_col,
213 xd->left_txfm_context + blk_row,
Jingning Hanc8b89362016-11-01 10:28:53 -0700214 mbmi->sb_type, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700215
Yaowu Xuc27fc142016-08-22 16:08:15 -0700216 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
217
Jingning Han571189c2016-10-24 10:38:43 -0700218 if (depth == MAX_VARTX_DEPTH) {
Jingning Han331662e2017-05-30 17:03:32 -0700219 txfm_partition_update(xd->above_txfm_context + blk_col,
220 xd->left_txfm_context + blk_row, tx_size, tx_size);
Jingning Han94d5bfc2016-10-21 10:14:36 -0700221 return;
222 }
223
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000224 const int write_txfm_partition =
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000225 tx_size == mbmi->inter_tx_size[tx_row][tx_col];
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000226 if (write_txfm_partition) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100227 aom_write_symbol(w, 0, ec_ctx->txfm_partition_cdf[ctx], 2);
Thomas Davies985bfc32017-06-27 16:51:26 +0100228
Jingning Han331662e2017-05-30 17:03:32 -0700229 txfm_partition_update(xd->above_txfm_context + blk_col,
230 xd->left_txfm_context + blk_row, tx_size, tx_size);
Yue Chend6bdd462017-07-19 16:05:43 -0700231 // TODO(yuec): set correct txfm partition update for qttx
Yaowu Xuc27fc142016-08-22 16:08:15 -0700232 } else {
Jingning Hanf64062f2016-11-02 16:22:18 -0700233 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700234 const int bsw = tx_size_wide_unit[sub_txs];
235 const int bsh = tx_size_high_unit[sub_txs];
Jingning Hanf64062f2016-11-02 16:22:18 -0700236
Thomas Davies985bfc32017-06-27 16:51:26 +0100237 aom_write_symbol(w, 1, ec_ctx->txfm_partition_cdf[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700238
David Barker16c64e32017-08-23 16:54:59 +0100239 if (sub_txs == TX_4X4) {
Jingning Han331662e2017-05-30 17:03:32 -0700240 txfm_partition_update(xd->above_txfm_context + blk_col,
241 xd->left_txfm_context + blk_row, sub_txs, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700242 return;
243 }
244
Yue Chen0797a202017-10-27 17:24:56 -0700245 assert(bsw > 0 && bsh > 0);
246 for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh)
247 for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
248 int offsetr = blk_row + row;
249 int offsetc = blk_col + col;
250 write_tx_size_vartx(cm, xd, mbmi, sub_txs, depth + 1, offsetr, offsetc,
251 w);
252 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700253 }
254}
255
Yaowu Xuf883b422016-08-30 14:01:10 -0700256static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd,
257 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700258 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
259 const BLOCK_SIZE bsize = mbmi->sb_type;
Thomas Davies15580c52017-03-09 13:53:42 +0000260 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
261 (void)cm;
Rupert Swarbrickfcff0b22017-10-05 09:26:04 +0100262 if (block_signals_txsize(bsize)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700263 const TX_SIZE tx_size = mbmi->tx_size;
Urvang Joshiab8840e2017-10-06 16:38:24 -0700264 const int tx_size_ctx = get_tx_size_context(xd);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800265 const int32_t tx_size_cat = intra_tx_size_cat_lookup[bsize];
Debargha Mukherjee0fa057f2017-12-06 17:06:29 -0800266 const int depth = tx_size_to_depth(tx_size, bsize, 0);
267 const int max_depths = bsize_to_max_depth(bsize, 0);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800268
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800269 assert(depth >= 0 && depth <= max_depths);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800270 assert(!is_inter_block(mbmi));
Yue Chen49587a72016-09-28 17:09:47 -0700271 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700272
Thomas Davies15580c52017-03-09 13:53:42 +0000273 aom_write_symbol(w, depth, ec_ctx->tx_size_cdf[tx_size_cat][tx_size_ctx],
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800274 max_depths + 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700275 }
276}
277
Yaowu Xuf883b422016-08-30 14:01:10 -0700278static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
279 int segment_id, const MODE_INFO *mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700280 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
281 return 1;
282 } else {
283 const int skip = mi->mbmi.skip;
Zoe Liue646daa2017-10-17 15:28:46 -0700284 const int ctx = av1_get_skip_context(xd);
Thomas Davies61e3e372017-04-04 16:10:23 +0100285 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Davies61e3e372017-04-04 16:10:23 +0100286 aom_write_symbol(w, skip, ec_ctx->skip_cdfs[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700287 return skip;
288 }
289}
290
Zoe Liuf40a9572017-10-13 12:37:19 -0700291#if CONFIG_EXT_SKIP
292static int write_skip_mode(const AV1_COMMON *cm, const MACROBLOCKD *xd,
293 int segment_id, const MODE_INFO *mi, aom_writer *w) {
294 if (!cm->skip_mode_flag) return 0;
295 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
296 return 0;
297 }
298 const int skip_mode = mi->mbmi.skip_mode;
299 if (!is_comp_ref_allowed(mi->mbmi.sb_type)) {
300 assert(!skip_mode);
301 return 0;
302 }
303 const int ctx = av1_get_skip_mode_context(xd);
304 aom_write_symbol(w, skip_mode, xd->tile_ctx->skip_mode_cdfs[ctx], 2);
305 return skip_mode;
306}
307#endif // CONFIG_EXT_SKIP
308
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100309static void write_is_inter(const AV1_COMMON *cm, const MACROBLOCKD *xd,
310 int segment_id, aom_writer *w, const int is_inter) {
311 if (!segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
Yue Chen170678a2017-10-17 13:43:10 -0700312 const int ctx = av1_get_intra_inter_context(xd);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100313 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100314 aom_write_symbol(w, is_inter, ec_ctx->intra_inter_cdf[ctx], 2);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100315 }
316}
317
Thomas Daviesd9b57262017-06-27 17:43:25 +0100318static void write_motion_mode(const AV1_COMMON *cm, MACROBLOCKD *xd,
319 const MODE_INFO *mi, aom_writer *w) {
Sarah Parker19234cc2017-03-10 16:43:25 -0800320 const MB_MODE_INFO *mbmi = &mi->mbmi;
Thomas Daviesd9b57262017-06-27 17:43:25 +0100321
Sebastien Alaiwan48795802017-10-30 12:07:13 +0100322 MOTION_MODE last_motion_mode_allowed =
Sebastien Alaiwan1f56b8e2017-10-31 17:37:16 +0100323 motion_mode_allowed(0, cm->global_motion, xd, mi);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000324 switch (last_motion_mode_allowed) {
325 case SIMPLE_TRANSLATION: break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000326 case OBMC_CAUSAL:
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000327 aom_write_symbol(w, mbmi->motion_mode == OBMC_CAUSAL,
328 xd->tile_ctx->obmc_cdf[mbmi->sb_type], 2);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000329 break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000330 default:
Yunqing Wang3afbf3f2017-11-21 20:16:18 -0800331#if CONFIG_EXT_WARPED_MOTION
332 {
333 int wm_ctx = 0;
334 if (mbmi->wm_ctx != -1) {
335 wm_ctx = 1;
336 if (mbmi->mode == NEARESTMV) wm_ctx = 2;
337 }
338 aom_write_symbol(w, mbmi->motion_mode,
339 xd->tile_ctx->motion_mode_cdf[wm_ctx][mbmi->sb_type],
340 MOTION_MODES);
341 }
342#else
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000343 aom_write_symbol(w, mbmi->motion_mode,
344 xd->tile_ctx->motion_mode_cdf[mbmi->sb_type],
345 MOTION_MODES);
Yunqing Wang3afbf3f2017-11-21 20:16:18 -0800346#endif // CONFIG_EXT_WARPED_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -0700347 }
Yue Chen69f18e12016-09-08 14:48:15 -0700348}
Wei-Ting Lin85a8f702017-06-22 13:55:15 -0700349
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000350static void write_delta_qindex(const AV1_COMMON *cm, const MACROBLOCKD *xd,
351 int delta_qindex, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +0200352 int sign = delta_qindex < 0;
353 int abs = sign ? -delta_qindex : delta_qindex;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000354 int rem_bits, thr;
Thomas Daviesf6936102016-09-05 16:51:31 +0100355 int smallval = abs < DELTA_Q_SMALL ? 1 : 0;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000356 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
357 (void)cm;
Thomas Daviesf6936102016-09-05 16:51:31 +0100358
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000359 aom_write_symbol(w, AOMMIN(abs, DELTA_Q_SMALL), ec_ctx->delta_q_cdf,
360 DELTA_Q_PROBS + 1);
Thomas Daviesf6936102016-09-05 16:51:31 +0100361
362 if (!smallval) {
363 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
364 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100365 aom_write_literal(w, rem_bits - 1, 3);
Thomas Daviesf6936102016-09-05 16:51:31 +0100366 aom_write_literal(w, abs - thr, rem_bits);
Arild Fuldseth07441162016-08-15 15:07:52 +0200367 }
368 if (abs > 0) {
369 aom_write_bit(w, sign);
370 }
371}
Thomas Daviesf6936102016-09-05 16:51:31 +0100372
Fangwen Fu231fe422017-04-24 17:52:29 -0700373#if CONFIG_EXT_DELTA_Q
374static void write_delta_lflevel(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Cheng Chena97394f2017-09-27 15:05:14 -0700375#if CONFIG_LOOPFILTER_LEVEL
376 int lf_id,
377#endif
Fangwen Fu231fe422017-04-24 17:52:29 -0700378 int delta_lflevel, aom_writer *w) {
379 int sign = delta_lflevel < 0;
380 int abs = sign ? -delta_lflevel : delta_lflevel;
381 int rem_bits, thr;
382 int smallval = abs < DELTA_LF_SMALL ? 1 : 0;
Fangwen Fu231fe422017-04-24 17:52:29 -0700383 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
384 (void)cm;
Fangwen Fu231fe422017-04-24 17:52:29 -0700385
Cheng Chena97394f2017-09-27 15:05:14 -0700386#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -0700387 if (cm->delta_lf_multi) {
388 assert(lf_id >= 0 && lf_id < FRAME_LF_COUNT);
389 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL),
390 ec_ctx->delta_lf_multi_cdf[lf_id], DELTA_LF_PROBS + 1);
391 } else {
392 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
393 DELTA_LF_PROBS + 1);
394 }
Cheng Chena97394f2017-09-27 15:05:14 -0700395#else
Fangwen Fu231fe422017-04-24 17:52:29 -0700396 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
397 DELTA_LF_PROBS + 1);
Cheng Chena97394f2017-09-27 15:05:14 -0700398#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -0700399
400 if (!smallval) {
401 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
402 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100403 aom_write_literal(w, rem_bits - 1, 3);
Fangwen Fu231fe422017-04-24 17:52:29 -0700404 aom_write_literal(w, abs - thr, rem_bits);
405 }
406 if (abs > 0) {
407 aom_write_bit(w, sign);
408 }
409}
Fangwen Fu231fe422017-04-24 17:52:29 -0700410#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +0200411
Sarah Parker99e7daa2017-08-29 10:30:13 -0700412static void pack_map_tokens(aom_writer *w, const TOKENEXTRA **tp, int n,
413 int num) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700414 const TOKENEXTRA *p = *tp;
hui su40b9e7f2017-07-13 18:15:56 -0700415 write_uniform(w, n, p->token); // The first color index.
416 ++p;
417 --num;
418 for (int i = 0; i < num; ++i) {
Sarah Parker0cf4d9f2017-08-18 13:09:14 -0700419 aom_write_symbol(w, p->token, p->color_map_cdf, n);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700420 ++p;
421 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700422 *tp = p;
423}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700424
James Zern35545dd2017-08-25 18:48:02 -0700425#if !CONFIG_LV_MAP
Thomas Davies9b8393f2017-03-21 11:04:08 +0000426static INLINE void write_coeff_extra(const aom_cdf_prob *const *cdf, int val,
427 int n, aom_writer *w) {
428 // Code the extra bits from LSB to MSB in groups of 4
429 int i = 0;
430 int count = 0;
431 while (count < n) {
432 const int size = AOMMIN(n - count, 4);
433 const int mask = (1 << size) - 1;
434 aom_write_cdf(w, val & mask, cdf[i++], 1 << size);
435 val >>= size;
436 count += size;
437 }
438}
Thomas Davies9b8393f2017-03-21 11:04:08 +0000439
Yaowu Xuf883b422016-08-30 14:01:10 -0700440static void pack_mb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700441 const TOKENEXTRA *const stop,
Angie Chiangd4022822016-11-02 18:30:25 -0700442 aom_bit_depth_t bit_depth, const TX_SIZE tx_size,
443 TOKEN_STATS *token_stats) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700444 const TOKENEXTRA *p = *tp;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700445 int count = 0;
Urvang Joshi80893152017-10-27 11:51:14 -0700446 const int seg_eob = av1_get_max_eob(tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700447
448 while (p < stop && p->token != EOSB_TOKEN) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700449 const int token = p->token;
Yaowu Xuabe52152017-10-20 14:37:54 -0700450 const int8_t eob_val = p->eob_val;
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000451 if (token == BLOCK_Z_TOKEN) {
Thomas Davies1c05c632017-03-15 09:58:34 +0000452 aom_write_symbol(w, 0, *p->head_cdf, HEAD_TOKENS + 1);
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000453 p++;
Jingning Han0481e8b2017-05-27 08:10:17 -0700454 break;
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000455 continue;
456 }
Yaowu Xuc8ab0bc2017-04-11 21:53:22 -0700457
458 const av1_extra_bit *const extra_bits = &av1_extra_bits[token];
Jingning Han24b15c92017-05-17 15:56:48 -0700459 if (eob_val == LAST_EOB) {
Thomas Davies04bdd522017-03-13 22:34:14 +0000460 // Just code a flag indicating whether the value is >1 or 1.
461 aom_write_bit(w, token != ONE_TOKEN);
462 } else {
Jingning Han24b15c92017-05-17 15:56:48 -0700463 int comb_symb = 2 * AOMMIN(token, TWO_TOKEN) - eob_val + p->first_val;
Thomas Davies1c05c632017-03-15 09:58:34 +0000464 aom_write_symbol(w, comb_symb, *p->head_cdf, HEAD_TOKENS + p->first_val);
Thomas Davies04bdd522017-03-13 22:34:14 +0000465 }
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000466 if (token > ONE_TOKEN) {
Thomas Davies1c05c632017-03-15 09:58:34 +0000467 aom_write_symbol(w, token - TWO_TOKEN, *p->tail_cdf, TAIL_TOKENS);
Alex Conversedc62b092016-10-11 16:50:56 -0700468 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700469
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700470 if (extra_bits->base_val) {
471 const int bit_string = p->extra;
472 const int bit_string_length = extra_bits->len; // Length of extra bits to
Thomas Davies9b8393f2017-03-21 11:04:08 +0000473 const int is_cat6 = (extra_bits->base_val == CAT6_MIN_VAL);
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000474 // be written excluding
475 // the sign bit.
Thomas Davies9b8393f2017-03-21 11:04:08 +0000476 int skip_bits = is_cat6
Alex Converseda3d94f2017-03-15 14:54:29 -0700477 ? (int)sizeof(av1_cat6_prob) -
478 av1_get_cat6_extrabits_size(tx_size, bit_depth)
479 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700480
Thomas Davies9b8393f2017-03-21 11:04:08 +0000481 assert(!(bit_string >> (bit_string_length - skip_bits + 1)));
482 if (bit_string_length > 0)
Thomas Davies9b8393f2017-03-21 11:04:08 +0000483 write_coeff_extra(extra_bits->cdf, bit_string >> 1,
484 bit_string_length - skip_bits, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700485
Angie Chiangd4022822016-11-02 18:30:25 -0700486 aom_write_bit_record(w, bit_string & 1, token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700487 }
488 ++p;
489
Yaowu Xuc27fc142016-08-22 16:08:15 -0700490 ++count;
Jingning Han24b15c92017-05-17 15:56:48 -0700491 if (eob_val == EARLY_EOB || count == seg_eob) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700492 }
493
494 *tp = p;
495}
Angie Chiangc8af6112017-03-16 16:11:22 -0700496#endif // !CONFIG_LV_MAP
Yushin Cho258a0242017-03-06 13:53:01 -0800497
Jingning Han4fe5f672017-05-19 15:46:07 -0700498#if CONFIG_LV_MAP
Jingning Hana2285692017-10-25 15:14:31 -0700499static void pack_txb_tokens(aom_writer *w, AV1_COMMON *cm, MACROBLOCK *const x,
Jingning Han4fe5f672017-05-19 15:46:07 -0700500 const TOKENEXTRA **tp,
Jingning Hana2285692017-10-25 15:14:31 -0700501 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
502 MB_MODE_INFO *mbmi, int plane,
Jingning Han4fe5f672017-05-19 15:46:07 -0700503 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
504 int block, int blk_row, int blk_col,
505 TX_SIZE tx_size, TOKEN_STATS *token_stats) {
506 const struct macroblockd_plane *const pd = &xd->plane[plane];
507 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
508 const int tx_row = blk_row >> (1 - pd->subsampling_y);
509 const int tx_col = blk_col >> (1 - pd->subsampling_x);
510 TX_SIZE plane_tx_size;
511 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
512 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
513
514 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
515
516 plane_tx_size =
517 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
518 : mbmi->inter_tx_size[tx_row][tx_col];
519
Debargha Mukherjee891a8772017-11-22 10:09:37 -0800520 if (tx_size == plane_tx_size
521#if DISABLE_VARTX_FOR_CHROMA
522 || pd->subsampling_x || pd->subsampling_y
523#endif // DISABLE_VARTX_FOR_CHROMA
524 ) {
Jingning Han4fe5f672017-05-19 15:46:07 -0700525 TOKEN_STATS tmp_token_stats;
526 init_token_stats(&tmp_token_stats);
527
Jingning Han4fe5f672017-05-19 15:46:07 -0700528 tran_low_t *tcoeff = BLOCK_OFFSET(x->mbmi_ext->tcoeff[plane], block);
529 uint16_t eob = x->mbmi_ext->eobs[plane][block];
530 TXB_CTX txb_ctx = { x->mbmi_ext->txb_skip_ctx[plane][block],
531 x->mbmi_ext->dc_sign_ctx[plane][block] };
Jingning Han7eab9ff2017-07-06 10:12:54 -0700532 av1_write_coeffs_txb(cm, xd, w, blk_row, blk_col, block, plane, tx_size,
533 tcoeff, eob, &txb_ctx);
Jingning Han4fe5f672017-05-19 15:46:07 -0700534#if CONFIG_RD_DEBUG
535 token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
536 token_stats->cost += tmp_token_stats.cost;
537#endif
538 } else {
539 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700540 const int bsw = tx_size_wide_unit[sub_txs];
541 const int bsh = tx_size_high_unit[sub_txs];
Jingning Han4fe5f672017-05-19 15:46:07 -0700542
Yue Chen0797a202017-10-27 17:24:56 -0700543 assert(bsw > 0 && bsh > 0);
Jingning Han4fe5f672017-05-19 15:46:07 -0700544
Yue Chen0797a202017-10-27 17:24:56 -0700545 for (int r = 0; r < tx_size_high_unit[tx_size]; r += bsh) {
546 for (int c = 0; c < tx_size_wide_unit[tx_size]; c += bsw) {
547 const int offsetr = blk_row + r;
548 const int offsetc = blk_col + c;
549 const int step = bsh * bsw;
Jingning Han4fe5f672017-05-19 15:46:07 -0700550
Yue Chen0797a202017-10-27 17:24:56 -0700551 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
Jingning Han4fe5f672017-05-19 15:46:07 -0700552
Yue Chen0797a202017-10-27 17:24:56 -0700553 pack_txb_tokens(w, cm, x, tp, tok_end, xd, mbmi, plane, plane_bsize,
554 bit_depth, block, offsetr, offsetc, sub_txs,
555 token_stats);
556 block += step;
557 }
Jingning Han4fe5f672017-05-19 15:46:07 -0700558 }
559 }
560}
561#else // CONFIG_LV_MAP
Yaowu Xuf883b422016-08-30 14:01:10 -0700562static void pack_txb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yushin Chod0b77ac2017-10-20 17:33:16 -0700563 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
564 MB_MODE_INFO *mbmi, int plane,
Yaowu Xuf883b422016-08-30 14:01:10 -0700565 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700566 int block, int blk_row, int blk_col,
Angie Chiangd4022822016-11-02 18:30:25 -0700567 TX_SIZE tx_size, TOKEN_STATS *token_stats) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700568 const struct macroblockd_plane *const pd = &xd->plane[plane];
569 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
570 const int tx_row = blk_row >> (1 - pd->subsampling_y);
571 const int tx_col = blk_col >> (1 - pd->subsampling_x);
572 TX_SIZE plane_tx_size;
Jingning Hanf65b8702016-10-31 12:13:20 -0700573 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
574 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700575
576 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
577
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700578 plane_tx_size =
579 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
580 : mbmi->inter_tx_size[tx_row][tx_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700581
Debargha Mukherjee891a8772017-11-22 10:09:37 -0800582 if (tx_size == plane_tx_size
583#if DISABLE_VARTX_FOR_CHROMA
584 || pd->subsampling_x || pd->subsampling_y
585#endif // DISABLE_VARTX_FOR_CHROMA
586 ) {
Angie Chiangd02001d2016-11-06 15:31:49 -0800587 TOKEN_STATS tmp_token_stats;
588 init_token_stats(&tmp_token_stats);
Sebastien Alaiwan9f001f32017-11-28 16:32:33 +0100589 pack_mb_tokens(w, tp, tok_end, bit_depth, tx_size, &tmp_token_stats);
Angie Chiangd02001d2016-11-06 15:31:49 -0800590#if CONFIG_RD_DEBUG
591 token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
592 token_stats->cost += tmp_token_stats.cost;
593#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700594 } else {
Jingning Han1807fdc2016-11-08 15:17:58 -0800595 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700596 const int bsw = tx_size_wide_unit[sub_txs];
597 const int bsh = tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700598
Yue Chen0797a202017-10-27 17:24:56 -0700599 assert(bsw > 0 && bsh > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700600
Yue Chen0797a202017-10-27 17:24:56 -0700601 for (int r = 0; r < tx_size_high_unit[tx_size]; r += bsh) {
602 for (int c = 0; c < tx_size_wide_unit[tx_size]; c += bsw) {
603 const int offsetr = blk_row + r;
604 const int offsetc = blk_col + c;
605 const int step = bsh * bsw;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700606
Yue Chen0797a202017-10-27 17:24:56 -0700607 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700608
Yue Chen0797a202017-10-27 17:24:56 -0700609 pack_txb_tokens(w, tp, tok_end, xd, mbmi, plane, plane_bsize, bit_depth,
610 block, offsetr, offsetc, sub_txs, token_stats);
611 block += step;
612 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700613 }
614 }
615}
Jingning Han4fe5f672017-05-19 15:46:07 -0700616#endif // CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700617
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100618#if CONFIG_Q_SEGMENTATION
619static int neg_interleave(int x, int ref, int max) {
620 const int diff = x - ref;
621 if (!ref) return x;
622 if (ref >= (max - 1)) return -diff;
623 if (2 * ref < max) {
624 if (abs(diff) <= ref) {
625 if (diff > 0)
626 return (diff << 1) - 1;
627 else
628 return ((-diff) << 1);
629 }
630 return x;
631 } else {
632 if (abs(diff) < (max - ref)) {
633 if (diff > 0)
634 return (diff << 1) - 1;
635 else
636 return ((-diff) << 1);
637 }
638 return (max - x) - 1;
639 }
640}
641
642static void write_q_segment_id(const AV1_COMMON *cm, int skip,
643 const MB_MODE_INFO *const mbmi, aom_writer *w,
644 const struct segmentation *seg,
645 struct segmentation_probs *segp,
646 BLOCK_SIZE bsize, int mi_row, int mi_col) {
647 int prev_ul = 0; /* Top left segment_id */
648 int prev_l = 0; /* Current left segment_id */
649 int prev_u = 0; /* Current top segment_id */
650
651 if (!seg->q_lvls) return;
652
653 MODE_INFO *const mi = cm->mi + mi_row * cm->mi_stride + mi_col;
654 int tinfo = mi->mbmi.boundary_info;
655 int above = (!(tinfo & TILE_ABOVE_BOUNDARY)) && ((mi_row - 1) >= 0);
656 int left = (!(tinfo & TILE_LEFT_BOUNDARY)) && ((mi_col - 1) >= 0);
657
658 if (above && left)
659 prev_ul =
660 get_segment_id(cm, cm->q_seg_map, BLOCK_4X4, mi_row - 1, mi_col - 1);
661
662 if (above)
663 prev_u = get_segment_id(cm, cm->q_seg_map, BLOCK_4X4, mi_row - 1, mi_col);
664
665 if (left)
666 prev_l = get_segment_id(cm, cm->q_seg_map, BLOCK_4X4, mi_row, mi_col - 1);
667
668 int cdf_num = pick_q_seg_cdf(prev_ul, prev_u, prev_l);
669 int pred = pick_q_seg_pred(prev_ul, prev_u, prev_l);
670
671 if (skip) {
672 set_q_segment_id(cm, cm->q_seg_map, mbmi->sb_type, mi_row, mi_col, pred);
673 return;
674 }
675
676 int coded_id = neg_interleave(mbmi->q_segment_id, pred, seg->q_lvls);
677
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100678 aom_cdf_prob *pred_cdf = segp->q_seg_cdf[cdf_num];
679 aom_write_symbol(w, coded_id, pred_cdf, 8);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100680
681 set_q_segment_id(cm, cm->q_seg_map, bsize, mi_row, mi_col,
682 mbmi->q_segment_id);
683}
684#endif
685
Yaowu Xuf883b422016-08-30 14:01:10 -0700686static void write_segment_id(aom_writer *w, const struct segmentation *seg,
Thomas9ac55082016-09-23 18:04:17 +0100687 struct segmentation_probs *segp, int segment_id) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400688 if (seg->enabled && seg->update_map) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400689 aom_write_symbol(w, segment_id, segp->tree_cdf, MAX_SEGMENTS);
Nathan E. Eggef627e582016-08-19 20:06:51 -0400690 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700691}
692
Thomas Davies315f5782017-06-14 15:14:55 +0100693#define WRITE_REF_BIT(bname, pname) \
Thomas Davies894cc812017-06-22 17:51:33 +0100694 aom_write_symbol(w, bname, av1_get_pred_cdf_##pname(cm, xd), 2)
Thomas Davies0fbd2b72017-09-12 10:49:45 +0100695#define WRITE_REF_BIT2(bname, pname) \
696 aom_write_symbol(w, bname, av1_get_pred_cdf_##pname(xd), 2)
Thomas Davies315f5782017-06-14 15:14:55 +0100697
Yaowu Xuc27fc142016-08-22 16:08:15 -0700698// This function encodes the reference frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700699static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd,
700 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700701 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
702 const int is_compound = has_second_ref(mbmi);
703 const int segment_id = mbmi->segment_id;
704
705 // If segment level coding of this signal is disabled...
706 // or the segment allows multiple reference frame options
707 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
708 assert(!is_compound);
709 assert(mbmi->ref_frame[0] ==
710 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
David Barkerd92f3562017-10-09 17:46:23 +0100711 }
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700712#if CONFIG_SEGMENT_GLOBALMV
David Barkerd92f3562017-10-09 17:46:23 +0100713 else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP) ||
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700714 segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV))
David Barkerd92f3562017-10-09 17:46:23 +0100715#else
716 else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP))
717#endif
718 {
719 assert(!is_compound);
720 assert(mbmi->ref_frame[0] == LAST_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700721 } else {
722 // does the feature use compound prediction or not
723 // (if not specified at the frame/segment level)
724 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Debargha Mukherjee0f248c42017-09-07 12:40:18 -0700725 if (is_comp_ref_allowed(mbmi->sb_type))
Thomas Davies860def62017-06-14 10:00:03 +0100726 aom_write_symbol(w, is_compound, av1_get_reference_mode_cdf(cm, xd), 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700727 } else {
728 assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE));
729 }
730
731 if (is_compound) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700732#if CONFIG_EXT_COMP_REFS
733 const COMP_REFERENCE_TYPE comp_ref_type = has_uni_comp_refs(mbmi)
734 ? UNIDIR_COMP_REFERENCE
735 : BIDIR_COMP_REFERENCE;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100736 aom_write_symbol(w, comp_ref_type, av1_get_comp_reference_type_cdf(xd),
737 2);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700738
739 if (comp_ref_type == UNIDIR_COMP_REFERENCE) {
740 const int bit = mbmi->ref_frame[0] == BWDREF_FRAME;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100741 WRITE_REF_BIT2(bit, uni_comp_ref_p);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700742
Zoe Liuc082bbc2017-05-17 13:31:37 -0700743 if (!bit) {
Zoe Liufcf5fa22017-06-26 16:00:38 -0700744 assert(mbmi->ref_frame[0] == LAST_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100745 const int bit1 = mbmi->ref_frame[1] == LAST3_FRAME ||
746 mbmi->ref_frame[1] == GOLDEN_FRAME;
747 WRITE_REF_BIT2(bit1, uni_comp_ref_p1);
748 if (bit1) {
749 const int bit2 = mbmi->ref_frame[1] == GOLDEN_FRAME;
750 WRITE_REF_BIT2(bit2, uni_comp_ref_p2);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700751 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700752 } else {
753 assert(mbmi->ref_frame[1] == ALTREF_FRAME);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700754 }
755
756 return;
757 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700758
759 assert(comp_ref_type == BIDIR_COMP_REFERENCE);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700760#endif // CONFIG_EXT_COMP_REFS
761
Yaowu Xuc27fc142016-08-22 16:08:15 -0700762 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
763 mbmi->ref_frame[0] == LAST3_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100764 WRITE_REF_BIT(bit, comp_ref_p);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700765
Yaowu Xuc27fc142016-08-22 16:08:15 -0700766 if (!bit) {
Zoe Liu87818282017-11-26 17:09:59 -0800767 const int bit1 = mbmi->ref_frame[0] == LAST2_FRAME;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100768 WRITE_REF_BIT(bit1, comp_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700769 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100770 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
771 WRITE_REF_BIT(bit2, comp_ref_p2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700772 }
Zoe Liu7b1ec7a2017-05-24 22:28:24 -0700773
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100774 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
775 WRITE_REF_BIT(bit_bwd, comp_bwdref_p);
Zoe Liue9b15e22017-07-19 15:53:01 -0700776
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100777 if (!bit_bwd) {
778 WRITE_REF_BIT(mbmi->ref_frame[1] == ALTREF2_FRAME, comp_bwdref_p1);
Zoe Liue9b15e22017-07-19 15:53:01 -0700779 }
Zoe Liue9b15e22017-07-19 15:53:01 -0700780
Yaowu Xuc27fc142016-08-22 16:08:15 -0700781 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -0700782 const int bit0 = (mbmi->ref_frame[0] <= ALTREF_FRAME &&
783 mbmi->ref_frame[0] >= BWDREF_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100784 WRITE_REF_BIT(bit0, single_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700785
786 if (bit0) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100787 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
788 WRITE_REF_BIT(bit1, single_ref_p2);
Zoe Liue9b15e22017-07-19 15:53:01 -0700789
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100790 if (!bit1) {
791 WRITE_REF_BIT(mbmi->ref_frame[0] == ALTREF2_FRAME, single_ref_p6);
Zoe Liue9b15e22017-07-19 15:53:01 -0700792 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700793 } else {
794 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
795 mbmi->ref_frame[0] == GOLDEN_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100796 WRITE_REF_BIT(bit2, single_ref_p3);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700797
798 if (!bit2) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100799 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
800 WRITE_REF_BIT(bit3, single_ref_p4);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700801 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100802 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
803 WRITE_REF_BIT(bit4, single_ref_p5);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700804 }
805 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700806 }
807 }
808}
809
hui su5db97432016-10-14 16:10:14 -0700810#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -0800811static void write_filter_intra_mode_info(const MACROBLOCKD *xd,
hui su5db97432016-10-14 16:10:14 -0700812 const MB_MODE_INFO *const mbmi,
813 aom_writer *w) {
Yue Chen95e13e22017-11-01 23:56:35 -0700814 if (mbmi->mode == DC_PRED && mbmi->palette_mode_info.palette_size[0] == 0 &&
815 av1_filter_intra_allowed_txsize(mbmi->tx_size)) {
Yue Chen4eba69b2017-11-09 22:37:35 -0800816 aom_write_symbol(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[0],
817 xd->tile_ctx->filter_intra_cdfs[mbmi->tx_size], 2);
hui su5db97432016-10-14 16:10:14 -0700818 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[0]) {
819 const FILTER_INTRA_MODE mode =
820 mbmi->filter_intra_mode_info.filter_intra_mode[0];
Yue Chen63ce36f2017-10-10 23:37:31 -0700821 aom_write_symbol(w, mode, xd->tile_ctx->filter_intra_mode_cdf[0],
822 FILTER_INTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700823 }
824 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700825}
hui su5db97432016-10-14 16:10:14 -0700826#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -0700827
hui su5db97432016-10-14 16:10:14 -0700828#if CONFIG_EXT_INTRA
Joe Young3ca43bf2017-10-06 15:12:46 -0700829static void write_intra_angle_info(const MACROBLOCKD *xd,
830 FRAME_CONTEXT *const ec_ctx, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700831 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
832 const BLOCK_SIZE bsize = mbmi->sb_type;
Joe Young830d4ce2017-05-30 17:48:13 -0700833 if (!av1_use_angle_delta(bsize)) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700834
hui su45dc5972016-12-08 17:42:50 -0800835 if (av1_is_directional_mode(mbmi->mode, bsize)) {
Joe Young3ca43bf2017-10-06 15:12:46 -0700836#if CONFIG_EXT_INTRA_MOD
837 aom_write_symbol(w, mbmi->angle_delta[0] + MAX_ANGLE_DELTA,
838 ec_ctx->angle_delta_cdf[mbmi->mode - V_PRED],
839 2 * MAX_ANGLE_DELTA + 1);
840#else
841 (void)ec_ctx;
hui su0a6731f2017-04-26 15:23:47 -0700842 write_uniform(w, 2 * MAX_ANGLE_DELTA + 1,
843 MAX_ANGLE_DELTA + mbmi->angle_delta[0]);
Joe Young3ca43bf2017-10-06 15:12:46 -0700844#endif // CONFIG_EXT_INTRA_MOD
Yaowu Xuc27fc142016-08-22 16:08:15 -0700845 }
846
Luc Trudeaud6d9eee2017-07-12 12:36:50 -0400847 if (av1_is_directional_mode(get_uv_mode(mbmi->uv_mode), bsize)) {
Joe Young3ca43bf2017-10-06 15:12:46 -0700848#if CONFIG_EXT_INTRA_MOD
849 aom_write_symbol(w, mbmi->angle_delta[1] + MAX_ANGLE_DELTA,
850 ec_ctx->angle_delta_cdf[mbmi->uv_mode - V_PRED],
851 2 * MAX_ANGLE_DELTA + 1);
852#else
hui su0a6731f2017-04-26 15:23:47 -0700853 write_uniform(w, 2 * MAX_ANGLE_DELTA + 1,
854 MAX_ANGLE_DELTA + mbmi->angle_delta[1]);
Joe Young3ca43bf2017-10-06 15:12:46 -0700855#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700856 }
857}
858#endif // CONFIG_EXT_INTRA
859
Angie Chiang5678ad92016-11-21 09:38:40 -0800860static void write_mb_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd,
861 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700862 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700863 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
Thomas Davies77c7c402017-01-11 17:58:54 +0000864 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Jingning Han203b1d32017-01-12 16:00:13 -0800865
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700866 if (!av1_is_interp_needed(xd)) {
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100867 assert(mbmi->interp_filters ==
868 av1_broadcast_interp_filter(
869 av1_unswitchable_filter(cm->interp_filter)));
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700870 return;
871 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700872 if (cm->interp_filter == SWITCHABLE) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700873#if CONFIG_DUAL_FILTER
Jingning Han203b1d32017-01-12 16:00:13 -0800874 int dir;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700875 for (dir = 0; dir < 2; ++dir) {
876 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
877 (mbmi->ref_frame[1] > INTRA_FRAME &&
878 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700879 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100880 InterpFilter filter =
881 av1_extract_interp_filter(mbmi->interp_filters, dir);
882 aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
Angie Chiangb9b42a02017-01-20 12:47:36 -0800883 SWITCHABLE_FILTERS);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100884 ++cpi->interp_filter_selected[0][filter];
Angie Chiang38edf682017-02-21 15:13:09 -0800885 } else {
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100886 assert(av1_extract_interp_filter(mbmi->interp_filters, dir) ==
887 EIGHTTAP_REGULAR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700888 }
889 }
890#else
891 {
Yaowu Xuf883b422016-08-30 14:01:10 -0700892 const int ctx = av1_get_pred_context_switchable_interp(xd);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100893 InterpFilter filter = av1_extract_interp_filter(mbmi->interp_filters, 0);
894 aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
895 SWITCHABLE_FILTERS);
896 ++cpi->interp_filter_selected[0][filter];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700897 }
Jingning Han203b1d32017-01-12 16:00:13 -0800898#endif // CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -0700899 }
900}
901
hui sud13c24a2017-04-07 16:13:07 -0700902#if CONFIG_PALETTE_DELTA_ENCODING
hui su33567b22017-04-30 16:40:19 -0700903// Transmit color values with delta encoding. Write the first value as
904// literal, and the deltas between each value and the previous one. "min_val" is
905// the smallest possible value of the deltas.
906static void delta_encode_palette_colors(const int *colors, int num,
907 int bit_depth, int min_val,
908 aom_writer *w) {
909 if (num <= 0) return;
hui sufa4ff852017-05-15 12:20:50 -0700910 assert(colors[0] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700911 aom_write_literal(w, colors[0], bit_depth);
912 if (num == 1) return;
913 int max_delta = 0;
914 int deltas[PALETTE_MAX_SIZE];
915 memset(deltas, 0, sizeof(deltas));
916 for (int i = 1; i < num; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700917 assert(colors[i] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700918 const int delta = colors[i] - colors[i - 1];
919 deltas[i - 1] = delta;
920 assert(delta >= min_val);
921 if (delta > max_delta) max_delta = delta;
922 }
923 const int min_bits = bit_depth - 3;
924 int bits = AOMMAX(av1_ceil_log2(max_delta + 1 - min_val), min_bits);
hui sufa4ff852017-05-15 12:20:50 -0700925 assert(bits <= bit_depth);
hui su33567b22017-04-30 16:40:19 -0700926 int range = (1 << bit_depth) - colors[0] - min_val;
hui sud13c24a2017-04-07 16:13:07 -0700927 aom_write_literal(w, bits - min_bits, 2);
hui su33567b22017-04-30 16:40:19 -0700928 for (int i = 0; i < num - 1; ++i) {
929 aom_write_literal(w, deltas[i] - min_val, bits);
930 range -= deltas[i];
931 bits = AOMMIN(bits, av1_ceil_log2(range));
hui sud13c24a2017-04-07 16:13:07 -0700932 }
933}
934
hui su33567b22017-04-30 16:40:19 -0700935// Transmit luma palette color values. First signal if each color in the color
936// cache is used. Those colors that are not in the cache are transmitted with
937// delta encoding.
938static void write_palette_colors_y(const MACROBLOCKD *const xd,
939 const PALETTE_MODE_INFO *const pmi,
940 int bit_depth, aom_writer *w) {
941 const int n = pmi->palette_size[0];
hui su33567b22017-04-30 16:40:19 -0700942 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700943 const int n_cache = av1_get_palette_cache(xd, 0, color_cache);
hui su33567b22017-04-30 16:40:19 -0700944 int out_cache_colors[PALETTE_MAX_SIZE];
945 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
946 const int n_out_cache =
947 av1_index_color_cache(color_cache, n_cache, pmi->palette_colors, n,
948 cache_color_found, out_cache_colors);
949 int n_in_cache = 0;
950 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
951 const int found = cache_color_found[i];
952 aom_write_bit(w, found);
953 n_in_cache += found;
954 }
955 assert(n_in_cache + n_out_cache == n);
956 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 1, w);
957}
958
959// Write chroma palette color values. U channel is handled similarly to the luma
960// channel. For v channel, either use delta encoding or transmit raw values
961// directly, whichever costs less.
962static void write_palette_colors_uv(const MACROBLOCKD *const xd,
963 const PALETTE_MODE_INFO *const pmi,
hui sud13c24a2017-04-07 16:13:07 -0700964 int bit_depth, aom_writer *w) {
hui sud13c24a2017-04-07 16:13:07 -0700965 const int n = pmi->palette_size[1];
hui sud13c24a2017-04-07 16:13:07 -0700966 const uint16_t *colors_u = pmi->palette_colors + PALETTE_MAX_SIZE;
967 const uint16_t *colors_v = pmi->palette_colors + 2 * PALETTE_MAX_SIZE;
hui sud13c24a2017-04-07 16:13:07 -0700968 // U channel colors.
hui su33567b22017-04-30 16:40:19 -0700969 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700970 const int n_cache = av1_get_palette_cache(xd, 1, color_cache);
hui su33567b22017-04-30 16:40:19 -0700971 int out_cache_colors[PALETTE_MAX_SIZE];
972 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
973 const int n_out_cache = av1_index_color_cache(
974 color_cache, n_cache, colors_u, n, cache_color_found, out_cache_colors);
975 int n_in_cache = 0;
976 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
977 const int found = cache_color_found[i];
978 aom_write_bit(w, found);
979 n_in_cache += found;
hui sud13c24a2017-04-07 16:13:07 -0700980 }
hui su33567b22017-04-30 16:40:19 -0700981 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 0, w);
982
983 // V channel colors. Don't use color cache as the colors are not sorted.
hui sud13c24a2017-04-07 16:13:07 -0700984 const int max_val = 1 << bit_depth;
985 int zero_count = 0, min_bits_v = 0;
986 int bits_v =
987 av1_get_palette_delta_bits_v(pmi, bit_depth, &zero_count, &min_bits_v);
988 const int rate_using_delta =
989 2 + bit_depth + (bits_v + 1) * (n - 1) - zero_count;
990 const int rate_using_raw = bit_depth * n;
991 if (rate_using_delta < rate_using_raw) { // delta encoding
hui sufa4ff852017-05-15 12:20:50 -0700992 assert(colors_v[0] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700993 aom_write_bit(w, 1);
994 aom_write_literal(w, bits_v - min_bits_v, 2);
995 aom_write_literal(w, colors_v[0], bit_depth);
hui su33567b22017-04-30 16:40:19 -0700996 for (int i = 1; i < n; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700997 assert(colors_v[i] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700998 if (colors_v[i] == colors_v[i - 1]) { // No need to signal sign bit.
999 aom_write_literal(w, 0, bits_v);
1000 continue;
1001 }
1002 const int delta = abs((int)colors_v[i] - colors_v[i - 1]);
1003 const int sign_bit = colors_v[i] < colors_v[i - 1];
1004 if (delta <= max_val - delta) {
1005 aom_write_literal(w, delta, bits_v);
1006 aom_write_bit(w, sign_bit);
1007 } else {
1008 aom_write_literal(w, max_val - delta, bits_v);
1009 aom_write_bit(w, !sign_bit);
1010 }
1011 }
1012 } else { // Transmit raw values.
1013 aom_write_bit(w, 0);
hui sufa4ff852017-05-15 12:20:50 -07001014 for (int i = 0; i < n; ++i) {
1015 assert(colors_v[i] < (1 << bit_depth));
1016 aom_write_literal(w, colors_v[i], bit_depth);
1017 }
hui sud13c24a2017-04-07 16:13:07 -07001018 }
1019}
1020#endif // CONFIG_PALETTE_DELTA_ENCODING
1021
Yaowu Xuf883b422016-08-30 14:01:10 -07001022static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1023 const MODE_INFO *const mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001024 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1025 const MODE_INFO *const above_mi = xd->above_mi;
1026 const MODE_INFO *const left_mi = xd->left_mi;
1027 const BLOCK_SIZE bsize = mbmi->sb_type;
Hui Su473cf892017-11-08 18:14:31 -08001028 assert(av1_allow_palette(cm->allow_screen_content_tools, bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001029 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
Rupert Swarbrick6f9cd942017-08-02 15:57:18 +01001030 const int block_palette_idx = bsize - BLOCK_8X8;
1031
Yaowu Xuc27fc142016-08-22 16:08:15 -07001032 if (mbmi->mode == DC_PRED) {
Urvang Joshi23a61112017-01-30 14:59:27 -08001033 const int n = pmi->palette_size[0];
1034 int palette_y_mode_ctx = 0;
hui su40b9e7f2017-07-13 18:15:56 -07001035 if (above_mi) {
Urvang Joshi23a61112017-01-30 14:59:27 -08001036 palette_y_mode_ctx +=
1037 (above_mi->mbmi.palette_mode_info.palette_size[0] > 0);
hui su40b9e7f2017-07-13 18:15:56 -07001038 }
1039 if (left_mi) {
Urvang Joshi23a61112017-01-30 14:59:27 -08001040 palette_y_mode_ctx +=
1041 (left_mi->mbmi.palette_mode_info.palette_size[0] > 0);
hui su40b9e7f2017-07-13 18:15:56 -07001042 }
Thomas Davies59f92312017-08-23 00:33:12 +01001043 aom_write_symbol(
1044 w, n > 0,
1045 xd->tile_ctx->palette_y_mode_cdf[block_palette_idx][palette_y_mode_ctx],
1046 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001047 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +01001048 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Rupert Swarbrick6f9cd942017-08-02 15:57:18 +01001049 xd->tile_ctx->palette_y_size_cdf[block_palette_idx],
Thomas Daviesce7272d2017-07-04 16:11:08 +01001050 PALETTE_SIZES);
hui sud13c24a2017-04-07 16:13:07 -07001051#if CONFIG_PALETTE_DELTA_ENCODING
hui su33567b22017-04-30 16:40:19 -07001052 write_palette_colors_y(xd, pmi, cm->bit_depth, w);
hui sud13c24a2017-04-07 16:13:07 -07001053#else
hui sufa4ff852017-05-15 12:20:50 -07001054 for (int i = 0; i < n; ++i) {
1055 assert(pmi->palette_colors[i] < (1 << cm->bit_depth));
Yaowu Xuf883b422016-08-30 14:01:10 -07001056 aom_write_literal(w, pmi->palette_colors[i], cm->bit_depth);
hui sufa4ff852017-05-15 12:20:50 -07001057 }
hui sud13c24a2017-04-07 16:13:07 -07001058#endif // CONFIG_PALETTE_DELTA_ENCODING
Yaowu Xuc27fc142016-08-22 16:08:15 -07001059 }
1060 }
1061
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001062 const int uv_dc_pred =
1063#if CONFIG_MONO_VIDEO
1064 !cm->seq_params.monochrome &&
1065#endif
1066 mbmi->uv_mode == UV_DC_PRED;
1067 if (uv_dc_pred) {
Urvang Joshi23a61112017-01-30 14:59:27 -08001068 const int n = pmi->palette_size[1];
1069 const int palette_uv_mode_ctx = (pmi->palette_size[0] > 0);
Thomas Davies59f92312017-08-23 00:33:12 +01001070 aom_write_symbol(w, n > 0,
1071 xd->tile_ctx->palette_uv_mode_cdf[palette_uv_mode_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001072 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +01001073 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Rupert Swarbrick6f9cd942017-08-02 15:57:18 +01001074 xd->tile_ctx->palette_uv_size_cdf[block_palette_idx],
Thomas Daviesce7272d2017-07-04 16:11:08 +01001075 PALETTE_SIZES);
hui sud13c24a2017-04-07 16:13:07 -07001076#if CONFIG_PALETTE_DELTA_ENCODING
hui su33567b22017-04-30 16:40:19 -07001077 write_palette_colors_uv(xd, pmi, cm->bit_depth, w);
hui sud13c24a2017-04-07 16:13:07 -07001078#else
hui sufa4ff852017-05-15 12:20:50 -07001079 for (int i = 0; i < n; ++i) {
1080 assert(pmi->palette_colors[PALETTE_MAX_SIZE + i] <
1081 (1 << cm->bit_depth));
1082 assert(pmi->palette_colors[2 * PALETTE_MAX_SIZE + i] <
1083 (1 << cm->bit_depth));
Yaowu Xuf883b422016-08-30 14:01:10 -07001084 aom_write_literal(w, pmi->palette_colors[PALETTE_MAX_SIZE + i],
1085 cm->bit_depth);
1086 aom_write_literal(w, pmi->palette_colors[2 * PALETTE_MAX_SIZE + i],
1087 cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001088 }
hui sud13c24a2017-04-07 16:13:07 -07001089#endif // CONFIG_PALETTE_DELTA_ENCODING
Yaowu Xuc27fc142016-08-22 16:08:15 -07001090 }
1091 }
1092}
1093
Angie Chiangc31ea682017-04-13 16:20:54 -07001094void av1_write_tx_type(const AV1_COMMON *const cm, const MACROBLOCKD *xd,
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001095#if CONFIG_TXK_SEL
Jingning Han19b5c8f2017-07-06 15:10:12 -07001096 int blk_row, int blk_col, int block, int plane,
1097 TX_SIZE tx_size,
Angie Chiangc31ea682017-04-13 16:20:54 -07001098#endif
1099 aom_writer *w) {
1100 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Jingning Han2a4da942016-11-03 18:31:30 -07001101 const int is_inter = is_inter_block(mbmi);
Jingning Han243b66b2017-06-23 12:11:47 -07001102#if !CONFIG_TXK_SEL
Debargha Mukherjee5577bd12017-11-20 16:04:26 -08001103 const TX_SIZE mtx_size =
1104 get_max_rect_tx_size(xd->mi[0]->mbmi.sb_type, is_inter);
Sarah Parker90024e42017-10-06 16:50:47 -07001105 const TX_SIZE tx_size =
Debargha Mukherjee5577bd12017-11-20 16:04:26 -08001106 is_inter ? AOMMAX(sub_tx_size_map[mtx_size], mbmi->min_tx_size)
Sarah Parker90024e42017-10-06 16:50:47 -07001107 : mbmi->tx_size;
Jingning Han243b66b2017-06-23 12:11:47 -07001108#endif // !CONFIG_TXK_SEL
Thomas Daviescef09622017-01-11 17:27:12 +00001109 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviescef09622017-01-11 17:27:12 +00001110
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001111#if !CONFIG_TXK_SEL
Angie Chiangc31ea682017-04-13 16:20:54 -07001112 TX_TYPE tx_type = mbmi->tx_type;
1113#else
1114 // Only y plane's tx_type is transmitted
Angie Chiang39b06eb2017-04-14 09:52:29 -07001115 if (plane > 0) return;
1116 PLANE_TYPE plane_type = get_plane_type(plane);
Jingning Han19b5c8f2017-07-06 15:10:12 -07001117 TX_TYPE tx_type =
1118 av1_get_tx_type(plane_type, xd, blk_row, blk_col, block, tx_size);
Angie Chiangc31ea682017-04-13 16:20:54 -07001119#endif
1120
Jingning Han2a4da942016-11-03 18:31:30 -07001121 if (!FIXED_TX_TYPE) {
Urvang Joshifeb925f2016-12-05 10:37:29 -08001122 const TX_SIZE square_tx_size = txsize_sqr_map[tx_size];
Jingning Han2a4da942016-11-03 18:31:30 -07001123 const BLOCK_SIZE bsize = mbmi->sb_type;
Sarah Parkere68a3e42017-02-16 14:03:24 -08001124 if (get_ext_tx_types(tx_size, bsize, is_inter, cm->reduced_tx_set_used) >
1125 1 &&
Yue Cheneeacc4c2017-01-17 17:29:17 -08001126 ((!cm->seg.enabled && cm->base_qindex > 0) ||
1127 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
Jingning Han641b1ad2016-11-04 09:58:36 -07001128 !mbmi->skip &&
Jingning Han2a4da942016-11-03 18:31:30 -07001129 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
Hui Suddbcde22017-09-18 17:22:02 -07001130 const TxSetType tx_set_type = get_ext_tx_set_type(
1131 tx_size, bsize, is_inter, cm->reduced_tx_set_used);
Sarah Parkere68a3e42017-02-16 14:03:24 -08001132 const int eset =
1133 get_ext_tx_set(tx_size, bsize, is_inter, cm->reduced_tx_set_used);
Sarah Parker784596d2017-06-23 08:41:26 -07001134 // eset == 0 should correspond to a set with only DCT_DCT and there
1135 // is no need to send the tx_type
1136 assert(eset > 0);
Hui Suddbcde22017-09-18 17:22:02 -07001137 assert(av1_ext_tx_used[tx_set_type][tx_type]);
Jingning Han2a4da942016-11-03 18:31:30 -07001138 if (is_inter) {
Hui Suddbcde22017-09-18 17:22:02 -07001139 aom_write_symbol(w, av1_ext_tx_ind[tx_set_type][tx_type],
Sarah Parker784596d2017-06-23 08:41:26 -07001140 ec_ctx->inter_ext_tx_cdf[eset][square_tx_size],
Hui Suddbcde22017-09-18 17:22:02 -07001141 av1_num_ext_tx_set[tx_set_type]);
Jingning Han2a4da942016-11-03 18:31:30 -07001142 } else if (ALLOW_INTRA_EXT_TX) {
Yue Chen57b8ff62017-10-10 23:37:31 -07001143#if CONFIG_FILTER_INTRA
1144 PREDICTION_MODE intra_dir;
1145 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[0])
1146 intra_dir = fimode_to_intradir[mbmi->filter_intra_mode_info
1147 .filter_intra_mode[0]];
1148 else
1149 intra_dir = mbmi->mode;
1150 aom_write_symbol(
1151 w, av1_ext_tx_ind[tx_set_type][tx_type],
1152 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][intra_dir],
1153 av1_num_ext_tx_set[tx_set_type]);
1154#else
Sarah Parker784596d2017-06-23 08:41:26 -07001155 aom_write_symbol(
Hui Suddbcde22017-09-18 17:22:02 -07001156 w, av1_ext_tx_ind[tx_set_type][tx_type],
Sarah Parker784596d2017-06-23 08:41:26 -07001157 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][mbmi->mode],
Hui Suddbcde22017-09-18 17:22:02 -07001158 av1_num_ext_tx_set[tx_set_type]);
Yue Chen57b8ff62017-10-10 23:37:31 -07001159#endif
Jingning Han2a4da942016-11-03 18:31:30 -07001160 }
Lester Lu432012f2017-08-17 14:39:29 -07001161 }
Jingning Han2a4da942016-11-03 18:31:30 -07001162 }
1163}
1164
Jingning Hanf04254f2017-03-08 10:51:35 -08001165static void write_intra_mode(FRAME_CONTEXT *frame_ctx, BLOCK_SIZE bsize,
1166 PREDICTION_MODE mode, aom_writer *w) {
Hui Su814f41e2017-10-02 12:21:24 -07001167 aom_write_symbol(w, mode, frame_ctx->y_mode_cdf[size_group_lookup[bsize]],
Jingning Hanf04254f2017-03-08 10:51:35 -08001168 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -08001169}
1170
1171static void write_intra_uv_mode(FRAME_CONTEXT *frame_ctx,
Luc Trudeaud6d9eee2017-07-12 12:36:50 -04001172 UV_PREDICTION_MODE uv_mode,
1173 PREDICTION_MODE y_mode, aom_writer *w) {
Luc Trudeau6e1cd782017-06-21 13:52:36 -04001174#if !CONFIG_CFL
Hui Su814f41e2017-10-02 12:21:24 -07001175 uv_mode = get_uv_mode(uv_mode);
Luc Trudeau6e1cd782017-06-21 13:52:36 -04001176#endif
1177 aom_write_symbol(w, uv_mode, frame_ctx->uv_mode_cdf[y_mode], UV_INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -08001178}
1179
Luc Trudeauf5334002017-04-25 12:21:26 -04001180#if CONFIG_CFL
David Michael Barrf6eaa152017-07-19 19:42:28 +09001181static void write_cfl_alphas(FRAME_CONTEXT *const ec_ctx, int idx,
1182 int joint_sign, aom_writer *w) {
1183 aom_write_symbol(w, joint_sign, ec_ctx->cfl_sign_cdf, CFL_JOINT_SIGNS);
1184 // Magnitudes are only signaled for nonzero codes.
1185 if (CFL_SIGN_U(joint_sign) != CFL_SIGN_ZERO) {
1186 aom_cdf_prob *cdf_u = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_U(joint_sign)];
1187 aom_write_symbol(w, CFL_IDX_U(idx), cdf_u, CFL_ALPHABET_SIZE);
1188 }
1189 if (CFL_SIGN_V(joint_sign) != CFL_SIGN_ZERO) {
1190 aom_cdf_prob *cdf_v = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_V(joint_sign)];
1191 aom_write_symbol(w, CFL_IDX_V(idx), cdf_v, CFL_ALPHABET_SIZE);
1192 }
Luc Trudeauf5334002017-04-25 12:21:26 -04001193}
1194#endif
1195
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001196static void write_cdef(AV1_COMMON *cm, aom_writer *w, int skip, int mi_col,
1197 int mi_row) {
1198 if (cm->all_lossless) return;
1199
1200 const int m = ~((1 << (6 - MI_SIZE_LOG2)) - 1);
1201 const MB_MODE_INFO *mbmi =
1202 &cm->mi_grid_visible[(mi_row & m) * cm->mi_stride + (mi_col & m)]->mbmi;
1203 // Initialise when at top left part of the superblock
1204 if (!(mi_row & (cm->mib_size - 1)) &&
1205 !(mi_col & (cm->mib_size - 1))) { // Top left?
1206#if CONFIG_EXT_PARTITION
1207 cm->cdef_preset[0] = cm->cdef_preset[1] = cm->cdef_preset[2] =
1208 cm->cdef_preset[3] = -1;
1209#else
1210 cm->cdef_preset = -1;
1211#endif
1212 }
1213
1214// Emit CDEF param at first non-skip coding block
1215#if CONFIG_EXT_PARTITION
1216 const int mask = 1 << (6 - MI_SIZE_LOG2);
1217 const int index = cm->sb_size == BLOCK_128X128
1218 ? !!(mi_col & mask) + 2 * !!(mi_row & mask)
1219 : 0;
1220 if (cm->cdef_preset[index] == -1 && !skip) {
1221 aom_write_literal(w, mbmi->cdef_strength, cm->cdef_bits);
1222 cm->cdef_preset[index] = mbmi->cdef_strength;
1223 }
1224#else
1225 if (cm->cdef_preset == -1 && !skip) {
1226 aom_write_literal(w, mbmi->cdef_strength, cm->cdef_bits);
1227 cm->cdef_preset = mbmi->cdef_strength;
1228 }
1229#endif
1230}
1231
Angie Chiangc31ea682017-04-13 16:20:54 -07001232static void pack_inter_mode_mvs(AV1_COMP *cpi, const int mi_row,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001233 const int mi_col, aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001234 AV1_COMMON *const cm = &cpi->common;
Arild Fuldseth07441162016-08-15 15:07:52 +02001235 MACROBLOCK *const x = &cpi->td.mb;
1236 MACROBLOCKD *const xd = &x->e_mbd;
Thomas Davies24523292017-01-11 16:56:47 +00001237 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Angie Chiangc31ea682017-04-13 16:20:54 -07001238 const MODE_INFO *mi = xd->mi[0];
Thomas Davies24523292017-01-11 16:56:47 +00001239
Yaowu Xuc27fc142016-08-22 16:08:15 -07001240 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001241 struct segmentation_probs *const segp = &ec_ctx->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001242 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1243 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
1244 const PREDICTION_MODE mode = mbmi->mode;
1245 const int segment_id = mbmi->segment_id;
1246 const BLOCK_SIZE bsize = mbmi->sb_type;
1247 const int allow_hp = cm->allow_high_precision_mv;
1248 const int is_inter = is_inter_block(mbmi);
1249 const int is_compound = has_second_ref(mbmi);
1250 int skip, ref;
David Barker45390c12017-02-20 14:44:40 +00001251 (void)mi_row;
1252 (void)mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001253
1254 if (seg->update_map) {
1255 if (seg->temporal_update) {
1256 const int pred_flag = mbmi->seg_id_predicted;
Thomas Davies00021352017-07-11 16:07:55 +01001257 aom_cdf_prob *pred_cdf = av1_get_pred_cdf_seg_id(segp, xd);
1258 aom_write_symbol(w, pred_flag, pred_cdf, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001259 if (!pred_flag) write_segment_id(w, seg, segp, segment_id);
1260 } else {
1261 write_segment_id(w, seg, segp, segment_id);
1262 }
1263 }
1264
Zoe Liuf40a9572017-10-13 12:37:19 -07001265#if CONFIG_EXT_SKIP
1266 write_skip_mode(cm, xd, segment_id, mi, w);
1267
1268 if (mbmi->skip_mode) {
1269 skip = mbmi->skip;
1270 assert(skip);
1271 } else {
1272#endif // CONFIG_EXT_SKIP
1273 skip = write_skip(cm, xd, segment_id, mi, w);
1274#if CONFIG_EXT_SKIP
1275 }
1276#endif // CONFIG_EXT_SKIP
1277
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +01001278#if CONFIG_Q_SEGMENTATION
1279 write_q_segment_id(cm, skip, mbmi, w, seg, segp, bsize, mi_row, mi_col);
Zoe Liuf40a9572017-10-13 12:37:19 -07001280#endif // CONFIG_Q_SEGMENTATION
1281
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001282 write_cdef(cm, w, skip, mi_col, mi_row);
1283
Arild Fuldseth07441162016-08-15 15:07:52 +02001284 if (cm->delta_q_present_flag) {
Pavel Frolov1dbe92d2017-11-02 01:49:19 +03001285 int super_block_upper_left = ((mi_row & (cm->mib_size - 1)) == 0) &&
1286 ((mi_col & (cm->mib_size - 1)) == 0);
Pavel Frolovbfa2b8c2017-11-01 20:08:44 +03001287 if ((bsize != cm->sb_size || skip == 0) && super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001288 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001289 int reduced_delta_qindex =
1290 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001291 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001292 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001293#if CONFIG_EXT_DELTA_Q
Cheng Chena97394f2017-09-27 15:05:14 -07001294#if CONFIG_LOOPFILTER_LEVEL
1295 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07001296 if (cm->delta_lf_multi) {
1297 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id) {
1298 int reduced_delta_lflevel =
1299 (mbmi->curr_delta_lf[lf_id] - xd->prev_delta_lf[lf_id]) /
1300 cm->delta_lf_res;
1301 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
1302 xd->prev_delta_lf[lf_id] = mbmi->curr_delta_lf[lf_id];
1303 }
1304 } else {
Cheng Chena97394f2017-09-27 15:05:14 -07001305 int reduced_delta_lflevel =
Cheng Chen880166a2017-10-02 17:48:48 -07001306 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
Cheng Chena97394f2017-09-27 15:05:14 -07001307 cm->delta_lf_res;
Cheng Chen880166a2017-10-02 17:48:48 -07001308 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
1309 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -07001310 }
1311 }
1312#else
Fangwen Fu231fe422017-04-24 17:52:29 -07001313 if (cm->delta_lf_present_flag) {
1314 int reduced_delta_lflevel =
1315 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
1316 cm->delta_lf_res;
1317 write_delta_lflevel(cm, xd, reduced_delta_lflevel, w);
1318 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
1319 }
Cheng Chena97394f2017-09-27 15:05:14 -07001320#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07001321#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001322 }
1323 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001324
Zoe Liuf40a9572017-10-13 12:37:19 -07001325#if CONFIG_EXT_SKIP
1326 if (!mbmi->skip_mode)
1327#endif // CONFIG_EXT_SKIP
1328 write_is_inter(cm, xd, mbmi->segment_id, w, is_inter);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001329
Debargha Mukherjee4def76a2017-10-19 13:38:35 -07001330 if (cm->tx_mode == TX_MODE_SELECT && block_signals_txsize(bsize) &&
Yaowu Xuc27fc142016-08-22 16:08:15 -07001331 !(is_inter && skip) && !xd->lossless[segment_id]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001332 if (is_inter) { // This implies skip flag is 0.
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001333 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, bsize, 0);
Jingning Hanf64062f2016-11-02 16:22:18 -07001334 const int bh = tx_size_high_unit[max_tx_size];
1335 const int bw = tx_size_wide_unit[max_tx_size];
Jingning Han9ca05b72017-01-03 14:41:36 -08001336 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1337 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001338 int idx, idy;
Jingning Hanfe45b212016-11-22 10:30:23 -08001339 for (idy = 0; idy < height; idy += bh)
1340 for (idx = 0; idx < width; idx += bw)
Debargha Mukherjeeedc73462017-10-31 15:13:32 -07001341 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, 0, idy, idx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001342 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001343 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001344 write_selected_tx_size(cm, xd, w);
1345 }
1346 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001347 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001348 }
1349
Zoe Liuf40a9572017-10-13 12:37:19 -07001350#if CONFIG_EXT_SKIP
1351 if (mbmi->skip_mode) return;
1352#endif // CONFIG_EXT_SKIP
1353
Yaowu Xuc27fc142016-08-22 16:08:15 -07001354 if (!is_inter) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001355 write_intra_mode(ec_ctx, bsize, mode, w);
Jingning Hand3a64432017-04-06 17:04:17 -07001356 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001357 xd->plane[1].subsampling_y)) {
Jingning Hanf04254f2017-03-08 10:51:35 -08001358 write_intra_uv_mode(ec_ctx, mbmi->uv_mode, mode, w);
Jingning Han0b7cbe62017-03-08 10:22:47 -08001359
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001360#if CONFIG_CFL
Luc Trudeau6e1cd782017-06-21 13:52:36 -04001361 if (mbmi->uv_mode == UV_CFL_PRED) {
Luc Trudeau4d6ea542017-11-22 21:24:42 -05001362 if (!is_cfl_allowed(mbmi)) {
1363 aom_internal_error(
1364 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1365 "Chroma from Luma (CfL) cannot be signaled for a %dx%d block.",
1366 block_size_wide[bsize], block_size_high[bsize]);
1367 }
David Michael Barr23198662017-06-19 23:19:48 +09001368 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001369 }
1370#endif
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001371 }
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001372
Yaowu Xuc27fc142016-08-22 16:08:15 -07001373#if CONFIG_EXT_INTRA
Joe Young3ca43bf2017-10-06 15:12:46 -07001374 write_intra_angle_info(xd, ec_ctx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001375#endif // CONFIG_EXT_INTRA
Hui Sue87fb232017-10-05 15:00:15 -07001376 if (av1_allow_palette(cm->allow_screen_content_tools, bsize))
Yaowu Xuc27fc142016-08-22 16:08:15 -07001377 write_palette_mode_info(cm, xd, mi, w);
hui su5db97432016-10-14 16:10:14 -07001378#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -08001379 write_filter_intra_mode_info(xd, mbmi, w);
hui su5db97432016-10-14 16:10:14 -07001380#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001381 } else {
Yaowu Xub0d0d002016-11-22 09:26:43 -08001382 int16_t mode_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001383 write_ref_frames(cm, xd, w);
1384
Yaowu Xuc27fc142016-08-22 16:08:15 -07001385 if (is_compound)
1386 mode_ctx = mbmi_ext->compound_mode_context[mbmi->ref_frame[0]];
1387 else
Yaowu Xuf883b422016-08-30 14:01:10 -07001388 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1389 mbmi->ref_frame, bsize, -1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001390
1391 // If segment skip is not enabled code the mode.
1392 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001393 if (is_inter_compound_mode(mode))
1394 write_inter_compound_mode(cm, xd, w, mode, mode_ctx);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001395 else if (is_inter_singleref_mode(mode))
1396 write_inter_mode(w, mode, ec_ctx, mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001397
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001398 if (mode == NEWMV || mode == NEW_NEWMV || have_nearmv_in_inter_mode(mode))
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001399 write_drl_idx(ec_ctx, mbmi, mbmi_ext, w);
1400 else
1401 assert(mbmi->ref_mv_idx == 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001402 }
1403
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001404 if (mode == NEWMV || mode == NEW_NEWMV) {
1405 int_mv ref_mv;
1406 for (ref = 0; ref < 1 + is_compound; ++ref) {
1407 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1408 int nmv_ctx =
1409 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1410 mbmi_ext->ref_mv_stack[rf_type], ref, mbmi->ref_mv_idx);
1411 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1412 ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0];
1413 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv, nmvc,
1414 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001415 }
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001416 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
1417 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1418 int nmv_ctx =
1419 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1420 mbmi_ext->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
1421 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1422 av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv,
1423 &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv, nmvc,
1424 allow_hp);
1425 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
1426 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1427 int nmv_ctx =
1428 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1429 mbmi_ext->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
1430 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1431 av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv,
1432 &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv, nmvc,
1433 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001434 }
1435
Yaowu Xuc27fc142016-08-22 16:08:15 -07001436 if (cpi->common.reference_mode != COMPOUND_REFERENCE &&
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001437 cpi->common.allow_interintra_compound && is_interintra_allowed(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001438 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
1439 const int bsize_group = size_group_lookup[bsize];
Thomas Daviescff91712017-07-07 11:49:55 +01001440 aom_write_symbol(w, interintra, ec_ctx->interintra_cdf[bsize_group], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001441 if (interintra) {
Thomas Davies299ff042017-06-27 13:41:59 +01001442 aom_write_symbol(w, mbmi->interintra_mode,
1443 ec_ctx->interintra_mode_cdf[bsize_group],
1444 INTERINTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001445 if (is_interintra_wedge_used(bsize)) {
Thomas Daviescff91712017-07-07 11:49:55 +01001446 aom_write_symbol(w, mbmi->use_wedge_interintra,
1447 ec_ctx->wedge_interintra_cdf[bsize], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001448 if (mbmi->use_wedge_interintra) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001449 aom_write_literal(w, mbmi->interintra_wedge_index,
1450 get_wedge_bits_lookup(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001451 assert(mbmi->interintra_wedge_sign == 0);
1452 }
1453 }
1454 }
1455 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001456
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001457 if (mbmi->ref_frame[1] != INTRA_FRAME) write_motion_mode(cm, xd, mi, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001458
Cheng Chen33a13d92017-11-28 16:49:59 -08001459#if CONFIG_JNT_COMP
1460 // First write idx to indicate current compound inter prediction mode group
1461 // Group A (0): jnt_comp, compound_average
1462 // Group B (1): interintra, compound_segment, wedge
1463 if (has_second_ref(mbmi)) {
Zoe Liu5f11e912017-12-05 23:23:56 -08001464 const int masked_compound_used =
1465 is_any_masked_compound_used(bsize) && cm->allow_masked_compound;
Cheng Chen5a881722017-11-30 17:05:10 -08001466
Zoe Liu5f11e912017-12-05 23:23:56 -08001467 if (masked_compound_used) {
Cheng Chen5a881722017-11-30 17:05:10 -08001468 const int ctx_comp_group_idx = get_comp_group_idx_context(xd);
1469 aom_write_symbol(w, mbmi->comp_group_idx,
1470 ec_ctx->comp_group_idx_cdf[ctx_comp_group_idx], 2);
Zoe Liu5f11e912017-12-05 23:23:56 -08001471 } else {
1472 assert(mbmi->comp_group_idx == 0);
Cheng Chen5a881722017-11-30 17:05:10 -08001473 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001474
1475 if (mbmi->comp_group_idx == 0) {
1476 if (mbmi->compound_idx)
1477 assert(mbmi->interinter_compound_type == COMPOUND_AVERAGE);
1478
1479 const int comp_index_ctx = get_comp_index_context(cm, xd);
1480 aom_write_symbol(w, mbmi->compound_idx,
1481 ec_ctx->compound_index_cdf[comp_index_ctx], 2);
1482 } else {
Zoe Liu5f11e912017-12-05 23:23:56 -08001483 assert(cpi->common.reference_mode != SINGLE_REFERENCE &&
1484 is_inter_compound_mode(mbmi->mode) &&
1485 mbmi->motion_mode == SIMPLE_TRANSLATION);
1486 assert(masked_compound_used);
1487 // compound_segment, wedge
Cheng Chen33a13d92017-11-28 16:49:59 -08001488 assert(mbmi->interinter_compound_type == COMPOUND_WEDGE ||
1489 mbmi->interinter_compound_type == COMPOUND_SEG);
Cheng Chen33a13d92017-11-28 16:49:59 -08001490
Zoe Liu5f11e912017-12-05 23:23:56 -08001491 if (is_interinter_compound_used(COMPOUND_WEDGE, bsize))
1492 aom_write_symbol(w, mbmi->interinter_compound_type - 1,
1493 ec_ctx->compound_type_cdf[bsize],
1494 COMPOUND_TYPES - 1);
1495
1496 if (mbmi->interinter_compound_type == COMPOUND_WEDGE) {
1497 assert(is_interinter_compound_used(COMPOUND_WEDGE, bsize));
1498 aom_write_literal(w, mbmi->wedge_index, get_wedge_bits_lookup(bsize));
1499 aom_write_bit(w, mbmi->wedge_sign);
1500 } else {
1501 assert(mbmi->interinter_compound_type == COMPOUND_SEG);
1502 aom_write_literal(w, mbmi->mask_type, MAX_SEG_MASK_BITS);
Cheng Chen33a13d92017-11-28 16:49:59 -08001503 }
1504 }
1505 }
1506#else // CONFIG_JNT_COMP
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001507 if (cpi->common.reference_mode != SINGLE_REFERENCE &&
Zoe Liu85b66462017-04-20 14:28:19 -07001508 is_inter_compound_mode(mbmi->mode) &&
Zoe Liu85b66462017-04-20 14:28:19 -07001509 mbmi->motion_mode == SIMPLE_TRANSLATION &&
Zoe Liu85b66462017-04-20 14:28:19 -07001510 is_any_masked_compound_used(bsize)) {
Cheng Chen33a13d92017-11-28 16:49:59 -08001511 if (cm->allow_masked_compound) {
Sarah Parker680b9b12017-08-16 18:55:34 -07001512 if (!is_interinter_compound_used(COMPOUND_WEDGE, bsize))
1513 aom_write_bit(w, mbmi->interinter_compound_type == COMPOUND_AVERAGE);
1514 else
Cheng Chenbdd6ca82017-10-23 22:34:25 -07001515 aom_write_symbol(w, mbmi->interinter_compound_type,
1516 ec_ctx->compound_type_cdf[bsize], COMPOUND_TYPES);
Sarah Parker680b9b12017-08-16 18:55:34 -07001517 if (is_interinter_compound_used(COMPOUND_WEDGE, bsize) &&
1518 mbmi->interinter_compound_type == COMPOUND_WEDGE) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001519 aom_write_literal(w, mbmi->wedge_index, get_wedge_bits_lookup(bsize));
1520 aom_write_bit(w, mbmi->wedge_sign);
1521 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001522 if (mbmi->interinter_compound_type == COMPOUND_SEG) {
Cheng Chenbdd6ca82017-10-23 22:34:25 -07001523 aom_write_literal(w, mbmi->mask_type, MAX_SEG_MASK_BITS);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001524 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001525 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001526 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001527#endif // CONFIG_JNT_COMP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001528
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07001529 write_mb_interp_filter(cpi, xd, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001530 }
1531
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001532#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001533 av1_write_tx_type(cm, xd, w);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001534#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07001535}
1536
Hui Suc2232cf2017-10-11 17:32:56 -07001537#if CONFIG_INTRABC
1538static void write_intrabc_info(AV1_COMMON *cm, MACROBLOCKD *xd,
1539 const MB_MODE_INFO_EXT *mbmi_ext,
1540 int enable_tx_size, aom_writer *w) {
1541 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1542 int use_intrabc = is_intrabc_block(mbmi);
1543 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1544 aom_write_symbol(w, use_intrabc, ec_ctx->intrabc_cdf, 2);
1545 if (use_intrabc) {
1546 assert(mbmi->mode == DC_PRED);
1547 assert(mbmi->uv_mode == UV_DC_PRED);
Hui Su12546aa2017-10-13 16:10:01 -07001548 if ((enable_tx_size && !mbmi->skip)) {
Hui Su12546aa2017-10-13 16:10:01 -07001549 const BLOCK_SIZE bsize = mbmi->sb_type;
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001550 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, bsize, 0);
Hui Su12546aa2017-10-13 16:10:01 -07001551 const int bh = tx_size_high_unit[max_tx_size];
1552 const int bw = tx_size_wide_unit[max_tx_size];
1553 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1554 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Hui Su12546aa2017-10-13 16:10:01 -07001555 int idx, idy;
1556 for (idy = 0; idy < height; idy += bh) {
1557 for (idx = 0; idx < width; idx += bw) {
Debargha Mukherjeeedc73462017-10-31 15:13:32 -07001558 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, 0, idy, idx, w);
Hui Su12546aa2017-10-13 16:10:01 -07001559 }
1560 }
Hui Su12546aa2017-10-13 16:10:01 -07001561 } else {
Hui Su12546aa2017-10-13 16:10:01 -07001562 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, mbmi->skip, xd);
Hui Su12546aa2017-10-13 16:10:01 -07001563 }
Hui Suc2232cf2017-10-11 17:32:56 -07001564 int_mv dv_ref = mbmi_ext->ref_mvs[INTRA_FRAME][0];
1565 av1_encode_dv(w, &mbmi->mv[0].as_mv, &dv_ref.as_mv, &ec_ctx->ndvc);
Sebastien Alaiwan3bac9922017-11-02 12:34:41 +01001566#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001567 av1_write_tx_type(cm, xd, w);
Sebastien Alaiwan3bac9922017-11-02 12:34:41 +01001568#endif // !CONFIG_TXK_SEL
Hui Suc2232cf2017-10-11 17:32:56 -07001569 }
1570}
1571#endif // CONFIG_INTRABC
1572
Thomas Davies3ab20b42017-09-19 10:30:53 +01001573static void write_mb_modes_kf(AV1_COMMON *cm, MACROBLOCKD *xd,
Alex Converse44c2bad2017-05-11 09:36:10 -07001574#if CONFIG_INTRABC
1575 const MB_MODE_INFO_EXT *mbmi_ext,
1576#endif // CONFIG_INTRABC
Jingning Han36fe3202017-02-20 22:31:49 -08001577 const int mi_row, const int mi_col,
Angie Chiangc31ea682017-04-13 16:20:54 -07001578 aom_writer *w) {
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001579 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001580 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001581 struct segmentation_probs *const segp = &ec_ctx->seg;
Angie Chiangc31ea682017-04-13 16:20:54 -07001582 const MODE_INFO *const mi = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001583 const MODE_INFO *const above_mi = xd->above_mi;
1584 const MODE_INFO *const left_mi = xd->left_mi;
1585 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1586 const BLOCK_SIZE bsize = mbmi->sb_type;
David Barker45390c12017-02-20 14:44:40 +00001587 (void)mi_row;
1588 (void)mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001589
1590 if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id);
1591
Alex Converse619576b2017-05-10 15:14:18 -07001592 const int skip = write_skip(cm, xd, mbmi->segment_id, mi, w);
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001593
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +01001594#if CONFIG_Q_SEGMENTATION
1595 write_q_segment_id(cm, skip, mbmi, w, seg, segp, bsize, mi_row, mi_col);
1596#endif
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001597
1598 write_cdef(cm, w, skip, mi_col, mi_row);
1599
Arild Fuldseth07441162016-08-15 15:07:52 +02001600 if (cm->delta_q_present_flag) {
Pavel Frolov1dbe92d2017-11-02 01:49:19 +03001601 int super_block_upper_left = ((mi_row & (cm->mib_size - 1)) == 0) &&
1602 ((mi_col & (cm->mib_size - 1)) == 0);
Pavel Frolovbfa2b8c2017-11-01 20:08:44 +03001603 if ((bsize != cm->sb_size || skip == 0) && super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001604 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001605 int reduced_delta_qindex =
1606 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001607 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001608 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001609#if CONFIG_EXT_DELTA_Q
Cheng Chena97394f2017-09-27 15:05:14 -07001610#if CONFIG_LOOPFILTER_LEVEL
1611 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07001612 if (cm->delta_lf_multi) {
1613 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id) {
1614 int reduced_delta_lflevel =
1615 (mbmi->curr_delta_lf[lf_id] - xd->prev_delta_lf[lf_id]) /
1616 cm->delta_lf_res;
1617 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
1618 xd->prev_delta_lf[lf_id] = mbmi->curr_delta_lf[lf_id];
1619 }
1620 } else {
Cheng Chena97394f2017-09-27 15:05:14 -07001621 int reduced_delta_lflevel =
Cheng Chen880166a2017-10-02 17:48:48 -07001622 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
Cheng Chena97394f2017-09-27 15:05:14 -07001623 cm->delta_lf_res;
Cheng Chen880166a2017-10-02 17:48:48 -07001624 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
1625 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -07001626 }
1627 }
1628#else
Fangwen Fu231fe422017-04-24 17:52:29 -07001629 if (cm->delta_lf_present_flag) {
1630 int reduced_delta_lflevel =
1631 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
1632 cm->delta_lf_res;
1633 write_delta_lflevel(cm, xd, reduced_delta_lflevel, w);
1634 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
1635 }
Cheng Chena97394f2017-09-27 15:05:14 -07001636#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07001637#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001638 }
1639 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001640
Alex Conversef71808c2017-06-06 12:21:17 -07001641 int enable_tx_size = cm->tx_mode == TX_MODE_SELECT &&
Rupert Swarbrickfcff0b22017-10-05 09:26:04 +01001642 block_signals_txsize(bsize) &&
Alex Conversef71808c2017-06-06 12:21:17 -07001643 !xd->lossless[mbmi->segment_id];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001644
Alex Converse28744302017-04-13 14:46:22 -07001645#if CONFIG_INTRABC
RogerZhouca865462017-10-05 15:06:27 -07001646 if (av1_allow_intrabc(bsize, cm)) {
Hui Suc2232cf2017-10-11 17:32:56 -07001647 write_intrabc_info(cm, xd, mbmi_ext, enable_tx_size, w);
1648 if (is_intrabc_block(mbmi)) return;
Alex Converse28744302017-04-13 14:46:22 -07001649 }
1650#endif // CONFIG_INTRABC
Hui Suc2232cf2017-10-11 17:32:56 -07001651
Alex Conversef71808c2017-06-06 12:21:17 -07001652 if (enable_tx_size) write_selected_tx_size(cm, xd, w);
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001653#if CONFIG_INTRABC
Hui Su12546aa2017-10-13 16:10:01 -07001654 if (cm->allow_screen_content_tools)
1655 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, mbmi->skip, xd);
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001656#endif // CONFIG_INTRABC
Alex Converse28744302017-04-13 14:46:22 -07001657
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001658 write_intra_mode_kf(cm, ec_ctx, mi, above_mi, left_mi, 0, mbmi->mode, w);
Jingning Han0b7cbe62017-03-08 10:22:47 -08001659
Jingning Hand3a64432017-04-06 17:04:17 -07001660 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
Luc Trudeau2c317902017-04-28 11:06:50 -04001661 xd->plane[1].subsampling_y)) {
Jingning Hanf04254f2017-03-08 10:51:35 -08001662 write_intra_uv_mode(ec_ctx, mbmi->uv_mode, mbmi->mode, w);
Jingning Han0b7cbe62017-03-08 10:22:47 -08001663
Luc Trudeauf5334002017-04-25 12:21:26 -04001664#if CONFIG_CFL
Luc Trudeau6e1cd782017-06-21 13:52:36 -04001665 if (mbmi->uv_mode == UV_CFL_PRED) {
Luc Trudeau4d6ea542017-11-22 21:24:42 -05001666 if (!is_cfl_allowed(mbmi)) {
1667 aom_internal_error(
1668 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1669 "Chroma from Luma (CfL) cannot be signaled for a %dx%d block.",
1670 block_size_wide[bsize], block_size_high[bsize]);
1671 }
David Michael Barr23198662017-06-19 23:19:48 +09001672 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Luc Trudeauf5334002017-04-25 12:21:26 -04001673 }
Luc Trudeauf5334002017-04-25 12:21:26 -04001674#endif
Luc Trudeau2c317902017-04-28 11:06:50 -04001675 }
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07001676
Yaowu Xuc27fc142016-08-22 16:08:15 -07001677#if CONFIG_EXT_INTRA
Joe Young3ca43bf2017-10-06 15:12:46 -07001678 write_intra_angle_info(xd, ec_ctx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001679#endif // CONFIG_EXT_INTRA
Hui Sue87fb232017-10-05 15:00:15 -07001680 if (av1_allow_palette(cm->allow_screen_content_tools, bsize))
Yaowu Xuc27fc142016-08-22 16:08:15 -07001681 write_palette_mode_info(cm, xd, mi, w);
hui su5db97432016-10-14 16:10:14 -07001682#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -08001683 write_filter_intra_mode_info(xd, mbmi, w);
hui su5db97432016-10-14 16:10:14 -07001684#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001685
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001686#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001687 av1_write_tx_type(cm, xd, w);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001688#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07001689}
1690
Angie Chiangd4022822016-11-02 18:30:25 -07001691#if CONFIG_RD_DEBUG
1692static void dump_mode_info(MODE_INFO *mi) {
1693 printf("\nmi->mbmi.mi_row == %d\n", mi->mbmi.mi_row);
1694 printf("&& mi->mbmi.mi_col == %d\n", mi->mbmi.mi_col);
1695 printf("&& mi->mbmi.sb_type == %d\n", mi->mbmi.sb_type);
1696 printf("&& mi->mbmi.tx_size == %d\n", mi->mbmi.tx_size);
1697 if (mi->mbmi.sb_type >= BLOCK_8X8) {
1698 printf("&& mi->mbmi.mode == %d\n", mi->mbmi.mode);
1699 } else {
1700 printf("&& mi->bmi[0].as_mode == %d\n", mi->bmi[0].as_mode);
1701 }
1702}
Angie Chiangd02001d2016-11-06 15:31:49 -08001703static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats,
1704 int plane) {
1705 if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) {
1706 int r, c;
1707 printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n",
1708 plane, rd_stats->txb_coeff_cost[plane], token_stats->cost);
Angie Chiangd02001d2016-11-06 15:31:49 -08001709 printf("rd txb_coeff_cost_map\n");
1710 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1711 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1712 printf("%d ", rd_stats->txb_coeff_cost_map[plane][r][c]);
1713 }
1714 printf("\n");
1715 }
1716
1717 printf("pack txb_coeff_cost_map\n");
1718 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1719 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1720 printf("%d ", token_stats->txb_coeff_cost_map[r][c]);
1721 }
1722 printf("\n");
1723 }
Angie Chiangd02001d2016-11-06 15:31:49 -08001724 return 1;
1725 }
1726 return 0;
1727}
Angie Chiangd4022822016-11-02 18:30:25 -07001728#endif
1729
Di Chen56586622017-06-09 13:49:44 -07001730#if ENC_MISMATCH_DEBUG
1731static void enc_dump_logs(AV1_COMP *cpi, int mi_row, int mi_col) {
1732 AV1_COMMON *const cm = &cpi->common;
1733 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1734 MODE_INFO *m;
1735 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1736 m = xd->mi[0];
1737 if (is_inter_block(&m->mbmi)) {
Zoe Liuf40a9572017-10-13 12:37:19 -07001738#define FRAME_TO_CHECK 11
Zoe Liu17af2742017-10-06 10:36:42 -07001739 if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
Di Chen56586622017-06-09 13:49:44 -07001740 const MB_MODE_INFO *const mbmi = &m->mbmi;
1741 const BLOCK_SIZE bsize = mbmi->sb_type;
1742
1743 int_mv mv[2];
1744 int is_comp_ref = has_second_ref(&m->mbmi);
1745 int ref;
1746
1747 for (ref = 0; ref < 1 + is_comp_ref; ++ref)
1748 mv[ref].as_mv = m->mbmi.mv[ref].as_mv;
1749
1750 if (!is_comp_ref) {
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001751 mv[1].as_int = 0;
Di Chen56586622017-06-09 13:49:44 -07001752 }
Di Chen56586622017-06-09 13:49:44 -07001753
Di Chen56586622017-06-09 13:49:44 -07001754 MACROBLOCK *const x = &cpi->td.mb;
Di Chen56586622017-06-09 13:49:44 -07001755 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
Zoe Liuf40a9572017-10-13 12:37:19 -07001756 const int16_t mode_ctx =
1757 is_comp_ref ? mbmi_ext->compound_mode_context[mbmi->ref_frame[0]]
1758 : av1_mode_context_analyzer(mbmi_ext->mode_context,
1759 mbmi->ref_frame, bsize, -1);
1760
Di Chen56586622017-06-09 13:49:44 -07001761 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
1762 int16_t zeromv_ctx = -1;
1763 int16_t refmv_ctx = -1;
Zoe Liuf40a9572017-10-13 12:37:19 -07001764
Di Chen56586622017-06-09 13:49:44 -07001765 if (mbmi->mode != NEWMV) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -07001766 zeromv_ctx = (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Di Chen56586622017-06-09 13:49:44 -07001767 if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -07001768 assert(mbmi->mode == GLOBALMV);
Di Chen56586622017-06-09 13:49:44 -07001769 }
Sarah Parker2b9ec2e2017-10-30 17:34:08 -07001770 if (mbmi->mode != GLOBALMV) {
Di Chen56586622017-06-09 13:49:44 -07001771 refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
1772 if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6;
1773 if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7;
1774 if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8;
1775 }
1776 }
1777
Zoe Liuf40a9572017-10-13 12:37:19 -07001778#if CONFIG_EXT_SKIP
1779 printf(
1780 "=== ENCODER ===: "
1781 "Frame=%d, (mi_row,mi_col)=(%d,%d), skip_mode=%d, mode=%d, bsize=%d, "
1782 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
1783 "ref[1]=%d, motion_mode=%d, mode_ctx=%d, "
1784 "newmv_ctx=%d, zeromv_ctx=%d, refmv_ctx=%d, tx_size=%d\n",
1785 cm->current_video_frame, mi_row, mi_col, mbmi->skip_mode, mbmi->mode,
1786 bsize, cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col,
1787 mv[1].as_mv.row, mv[1].as_mv.col, mbmi->ref_frame[0],
1788 mbmi->ref_frame[1], mbmi->motion_mode, mode_ctx, newmv_ctx,
1789 zeromv_ctx, refmv_ctx, mbmi->tx_size);
1790#else
Di Chen56586622017-06-09 13:49:44 -07001791 printf(
1792 "=== ENCODER ===: "
1793 "Frame=%d, (mi_row,mi_col)=(%d,%d), mode=%d, bsize=%d, "
1794 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
Zoe Liuf40a9572017-10-13 12:37:19 -07001795 "ref[1]=%d, motion_mode=%d, mode_ctx=%d, "
1796 "newmv_ctx=%d, zeromv_ctx=%d, refmv_ctx=%d, tx_size=%d\n",
Di Chen56586622017-06-09 13:49:44 -07001797 cm->current_video_frame, mi_row, mi_col, mbmi->mode, bsize,
1798 cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col, mv[1].as_mv.row,
1799 mv[1].as_mv.col, mbmi->ref_frame[0], mbmi->ref_frame[1],
Zoe Liuf40a9572017-10-13 12:37:19 -07001800 mbmi->motion_mode, mode_ctx, newmv_ctx, zeromv_ctx, refmv_ctx,
1801 mbmi->tx_size);
1802#endif // CONFIG_EXT_SKIP
Di Chen56586622017-06-09 13:49:44 -07001803 }
1804 }
1805}
1806#endif // ENC_MISMATCH_DEBUG
1807
Yue Chen64550b62017-01-12 12:18:22 -08001808static void write_mbmi_b(AV1_COMP *cpi, const TileInfo *const tile,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001809 aom_writer *w, int mi_row, int mi_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001810 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001811 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1812 MODE_INFO *m;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001813 int bh, bw;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001814 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1815 m = xd->mi[0];
1816
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001817 assert(m->mbmi.sb_type <= cm->sb_size ||
Rupert Swarbrick72678572017-08-02 12:05:26 +01001818 (m->mbmi.sb_type >= BLOCK_SIZES && m->mbmi.sb_type < BLOCK_SIZES_ALL));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001819
Jingning Hanc709e1f2016-12-06 14:48:09 -08001820 bh = mi_size_high[m->mbmi.sb_type];
1821 bw = mi_size_wide[m->mbmi.sb_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001822
1823 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1824
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001825 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001826#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001827 cm->dependent_horz_tiles,
1828#endif // CONFIG_DEPENDENT_HORZTILES
1829 cm->mi_rows, cm->mi_cols);
Yushin Cho77bba8d2016-11-04 16:36:56 -07001830
Yaowu Xuc27fc142016-08-22 16:08:15 -07001831 if (frame_is_intra_only(cm)) {
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001832#if CONFIG_INTRABC
Hui Su12546aa2017-10-13 16:10:01 -07001833 if (cm->allow_screen_content_tools) {
1834 xd->above_txfm_context =
1835 cm->above_txfm_context + (mi_col << TX_UNIT_WIDE_LOG2);
1836 xd->left_txfm_context = xd->left_txfm_context_buffer +
1837 ((mi_row & MAX_MIB_MASK) << TX_UNIT_HIGH_LOG2);
1838 }
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001839#endif // CONFIG_INTRABC
Alex Converse44c2bad2017-05-11 09:36:10 -07001840 write_mb_modes_kf(cm, xd,
1841#if CONFIG_INTRABC
1842 cpi->td.mb.mbmi_ext,
1843#endif // CONFIG_INTRABC
1844 mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001845 } else {
Jingning Han331662e2017-05-30 17:03:32 -07001846 xd->above_txfm_context =
1847 cm->above_txfm_context + (mi_col << TX_UNIT_WIDE_LOG2);
1848 xd->left_txfm_context = xd->left_txfm_context_buffer +
1849 ((mi_row & MAX_MIB_MASK) << TX_UNIT_HIGH_LOG2);
Angie Chiang38edf682017-02-21 15:13:09 -08001850 // has_subpel_mv_component needs the ref frame buffers set up to look
1851 // up if they are scaled. has_subpel_mv_component is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07001852 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
1853 set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
Zoe Liu85b66462017-04-20 14:28:19 -07001854
Di Chen56586622017-06-09 13:49:44 -07001855#if ENC_MISMATCH_DEBUG
Di Chen56586622017-06-09 13:49:44 -07001856 enc_dump_logs(cpi, mi_row, mi_col);
1857#endif // ENC_MISMATCH_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001858
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001859 pack_inter_mode_mvs(cpi, mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001860 }
Yue Chen64550b62017-01-12 12:18:22 -08001861}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001862
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001863static void write_inter_txb_coeff(AV1_COMMON *const cm, MACROBLOCK *const x,
1864 MB_MODE_INFO *const mbmi, aom_writer *w,
1865 const TOKENEXTRA **tok,
1866 const TOKENEXTRA *const tok_end,
1867 TOKEN_STATS *token_stats, const int row,
1868 const int col, int *block, const int plane) {
1869 MACROBLOCKD *const xd = &x->e_mbd;
1870 const struct macroblockd_plane *const pd = &xd->plane[plane];
Debargha Mukherjee19619882017-11-22 13:13:14 -08001871 const BLOCK_SIZE bsize = mbmi->sb_type;
1872 const BLOCK_SIZE bsizec =
1873 scale_chroma_bsize(bsize, pd->subsampling_x, pd->subsampling_y);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001874
1875 const BLOCK_SIZE plane_bsize =
Debargha Mukherjee19619882017-11-22 13:13:14 -08001876 AOMMAX(BLOCK_4X4, get_plane_block_size(bsizec, pd));
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001877
Debargha Mukherjee19619882017-11-22 13:13:14 -08001878 TX_SIZE max_tx_size = get_vartx_max_txsize(
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001879 xd, plane_bsize, pd->subsampling_x || pd->subsampling_y);
Debargha Mukherjee19619882017-11-22 13:13:14 -08001880#if DISABLE_VARTX_FOR_CHROMA == 2
1881 // If the luma transform size is split at least one level, split the chroma
1882 // by one level. Otherwise use the largest possible trasnform size for
1883 // chroma.
1884 if (plane && (pd->subsampling_x || pd->subsampling_y)) {
1885 const TX_SIZE l_max_tx_size = get_vartx_max_txsize(xd, bsizec, 0);
1886 const int is_split =
1887 (l_max_tx_size != mbmi->inter_tx_size[0][0] && bsize == bsizec &&
1888 txsize_to_bsize[l_max_tx_size] == bsizec);
1889 if (is_split) max_tx_size = sub_tx_size_map[max_tx_size];
1890 }
1891#endif // DISABLE_VARTX_FOR_CHROMA == 2
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001892 const int step =
1893 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
1894 const int bkw = tx_size_wide_unit[max_tx_size];
1895 const int bkh = tx_size_high_unit[max_tx_size];
1896
1897 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
1898 int mu_blocks_wide = block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
1899 int mu_blocks_high = block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
1900
1901 int blk_row, blk_col;
1902
1903 const int num_4x4_w = block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
1904 const int num_4x4_h = block_size_high[plane_bsize] >> tx_size_wide_log2[0];
1905
1906 const int unit_height = AOMMIN(mu_blocks_high + row, num_4x4_h);
1907 const int unit_width = AOMMIN(mu_blocks_wide + col, num_4x4_w);
1908 for (blk_row = row; blk_row < unit_height; blk_row += bkh) {
1909 for (blk_col = col; blk_col < unit_width; blk_col += bkw) {
1910 pack_txb_tokens(w,
1911#if CONFIG_LV_MAP
1912 cm, x,
1913#endif
1914 tok, tok_end, xd, mbmi, plane, plane_bsize, cm->bit_depth,
1915 *block, blk_row, blk_col, max_tx_size, token_stats);
1916 *block += step;
1917 }
1918 }
1919}
1920
Yue Chen64550b62017-01-12 12:18:22 -08001921static void write_tokens_b(AV1_COMP *cpi, const TileInfo *const tile,
1922 aom_writer *w, const TOKENEXTRA **tok,
1923 const TOKENEXTRA *const tok_end, int mi_row,
1924 int mi_col) {
1925 AV1_COMMON *const cm = &cpi->common;
1926 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Wei-Ting Lin1d46d902017-06-26 15:57:18 -07001927 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1928 MODE_INFO *const m = *(cm->mi_grid_visible + mi_offset);
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001929 MB_MODE_INFO *const mbmi = &m->mbmi;
Yue Chen64550b62017-01-12 12:18:22 -08001930 int plane;
1931 int bh, bw;
Yushin Cho258a0242017-03-06 13:53:01 -08001932 MACROBLOCK *const x = &cpi->td.mb;
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001933#if CONFIG_LV_MAP
Yue Chen64550b62017-01-12 12:18:22 -08001934 (void)tok;
1935 (void)tok_end;
1936#endif
Wei-Ting Lin1d46d902017-06-26 15:57:18 -07001937 xd->mi = cm->mi_grid_visible + mi_offset;
Yue Chen64550b62017-01-12 12:18:22 -08001938
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001939 assert(mbmi->sb_type <= cm->sb_size ||
Rupert Swarbrick72678572017-08-02 12:05:26 +01001940 (mbmi->sb_type >= BLOCK_SIZES && mbmi->sb_type < BLOCK_SIZES_ALL));
Yue Chen64550b62017-01-12 12:18:22 -08001941
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001942 bh = mi_size_high[mbmi->sb_type];
1943 bw = mi_size_wide[mbmi->sb_type];
Yue Chen64550b62017-01-12 12:18:22 -08001944 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1945
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001946 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001947#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001948 cm->dependent_horz_tiles,
1949#endif // CONFIG_DEPENDENT_HORZTILES
1950 cm->mi_rows, cm->mi_cols);
Yue Chen64550b62017-01-12 12:18:22 -08001951
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001952 const int num_planes = av1_num_planes(cm);
1953 for (plane = 0; plane < AOMMIN(2, num_planes); ++plane) {
Fangwen Fub3be9262017-03-06 15:34:28 -08001954 const uint8_t palette_size_plane =
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001955 mbmi->palette_mode_info.palette_size[plane];
Zoe Liuf40a9572017-10-13 12:37:19 -07001956#if CONFIG_EXT_SKIP
1957 assert(!mbmi->skip_mode || !palette_size_plane);
1958#endif // CONFIG_EXT_SKIP
Fangwen Fub3be9262017-03-06 15:34:28 -08001959 if (palette_size_plane > 0) {
Alex Converseed37d012017-04-24 11:15:24 -07001960#if CONFIG_INTRABC
1961 assert(mbmi->use_intrabc == 0);
1962#endif
Fangwen Fub3be9262017-03-06 15:34:28 -08001963 int rows, cols;
hui su9bc1d8d2017-03-24 12:36:03 -07001964 assert(mbmi->sb_type >= BLOCK_8X8);
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001965 av1_get_block_dimensions(mbmi->sb_type, plane, xd, NULL, NULL, &rows,
Fangwen Fub3be9262017-03-06 15:34:28 -08001966 &cols);
1967 assert(*tok < tok_end);
Sarah Parker99e7daa2017-08-29 10:30:13 -07001968 pack_map_tokens(w, tok, palette_size_plane, rows * cols);
Jingning Han13648e72017-08-17 09:21:53 -07001969#if !CONFIG_LV_MAP
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001970 assert(*tok < tok_end + mbmi->skip);
Jingning Han13648e72017-08-17 09:21:53 -07001971#endif // !CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001972 }
Fangwen Fub3be9262017-03-06 15:34:28 -08001973 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001974
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001975 if (!mbmi->skip) {
Yushin Chod0b77ac2017-10-20 17:33:16 -07001976#if !CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001977 assert(*tok < tok_end);
Yushin Cho258a0242017-03-06 13:53:01 -08001978#endif
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001979 for (plane = 0; plane < num_planes; ++plane) {
Debargha Mukherjee3aa28112017-11-25 07:03:31 -08001980 const struct macroblockd_plane *const pd = &xd->plane[plane];
1981 if (!is_chroma_reference(mi_row, mi_col, mbmi->sb_type, pd->subsampling_x,
1982 pd->subsampling_y)) {
Jingning Han13648e72017-08-17 09:21:53 -07001983#if !CONFIG_LV_MAP
Jingning Hanc20dc8e2017-02-17 15:37:28 -08001984 (*tok)++;
Jingning Han13648e72017-08-17 09:21:53 -07001985#endif // !CONFIG_LV_MAP
Jingning Hanc20dc8e2017-02-17 15:37:28 -08001986 continue;
1987 }
Debargha Mukherjee3aa28112017-11-25 07:03:31 -08001988 const BLOCK_SIZE bsize = mbmi->sb_type;
1989 const BLOCK_SIZE bsizec =
1990 scale_chroma_bsize(bsize, pd->subsampling_x, pd->subsampling_y);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001991 const BLOCK_SIZE plane_bsize =
Debargha Mukherjee3aa28112017-11-25 07:03:31 -08001992 AOMMAX(BLOCK_4X4, get_plane_block_size(bsizec, pd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001993
Jingning Han42a0fb32016-10-31 10:43:31 -07001994 const int num_4x4_w =
1995 block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
1996 const int num_4x4_h =
1997 block_size_high[plane_bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001998 int row, col;
Angie Chiangd4022822016-11-02 18:30:25 -07001999 TOKEN_STATS token_stats;
Angie Chiangd02001d2016-11-06 15:31:49 -08002000 init_token_stats(&token_stats);
Angie Chiangd4022822016-11-02 18:30:25 -07002001
Jingning Hanc2b797f2017-07-19 09:37:11 -07002002 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
2003 int mu_blocks_wide =
2004 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
2005 int mu_blocks_high =
2006 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
2007
2008 mu_blocks_wide = AOMMIN(num_4x4_w, mu_blocks_wide);
2009 mu_blocks_high = AOMMIN(num_4x4_h, mu_blocks_high);
2010
Jingning Hanfe45b212016-11-22 10:30:23 -08002011 if (is_inter_block(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002012 int block = 0;
Jingning Hanc2b797f2017-07-19 09:37:11 -07002013 for (row = 0; row < num_4x4_h; row += mu_blocks_high) {
2014 for (col = 0; col < num_4x4_w; col += mu_blocks_wide) {
Jingning Hane5e8f4d2017-11-20 20:11:04 -08002015 write_inter_txb_coeff(cm, x, mbmi, w, tok, tok_end, &token_stats,
2016 row, col, &block, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002017 }
2018 }
Angie Chiangd02001d2016-11-06 15:31:49 -08002019#if CONFIG_RD_DEBUG
Angie Chiang3963d632016-11-10 18:41:40 -08002020 if (mbmi->sb_type >= BLOCK_8X8 &&
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002021 rd_token_stats_mismatch(&mbmi->rd_stats, &token_stats, plane)) {
Angie Chiangd02001d2016-11-06 15:31:49 -08002022 dump_mode_info(m);
2023 assert(0);
2024 }
Jingning Hanfe45b212016-11-22 10:30:23 -08002025#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07002026 } else {
Jingning Han5ab7ed42017-05-18 16:15:52 -07002027#if CONFIG_LV_MAP
2028 av1_write_coeffs_mb(cm, x, w, plane);
2029#else
hui su0c6244b2017-07-12 17:11:43 -07002030 const TX_SIZE tx = av1_get_tx_size(plane, xd);
Jingning Han42a0fb32016-10-31 10:43:31 -07002031 const int bkw = tx_size_wide_unit[tx];
2032 const int bkh = tx_size_high_unit[tx];
Jingning Han5b701742017-07-19 14:39:07 -07002033 int blk_row, blk_col;
2034
2035 for (row = 0; row < num_4x4_h; row += mu_blocks_high) {
2036 for (col = 0; col < num_4x4_w; col += mu_blocks_wide) {
2037 const int unit_height = AOMMIN(mu_blocks_high + row, num_4x4_h);
2038 const int unit_width = AOMMIN(mu_blocks_wide + col, num_4x4_w);
2039
2040 for (blk_row = row; blk_row < unit_height; blk_row += bkh) {
2041 for (blk_col = col; blk_col < unit_width; blk_col += bkw) {
Jingning Han5b701742017-07-19 14:39:07 -07002042 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx,
2043 &token_stats);
Jingning Han5b701742017-07-19 14:39:07 -07002044 }
2045 }
Fangwen Fu33bcd112017-02-07 16:42:41 -08002046 }
2047 }
Jingning Han5ab7ed42017-05-18 16:15:52 -07002048#endif // CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07002049 }
Angie Chiangd4022822016-11-02 18:30:25 -07002050
Yushin Chod0b77ac2017-10-20 17:33:16 -07002051#if !CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07002052 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2053 (*tok)++;
Yushin Cho258a0242017-03-06 13:53:01 -08002054#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002055 }
2056 }
2057}
2058
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +01002059#if NC_MODE_INFO
Yue Chen9ab6d712017-01-12 15:50:46 -08002060static void write_tokens_sb(AV1_COMP *cpi, const TileInfo *const tile,
2061 aom_writer *w, const TOKENEXTRA **tok,
2062 const TOKENEXTRA *const tok_end, int mi_row,
2063 int mi_col, BLOCK_SIZE bsize) {
2064 const AV1_COMMON *const cm = &cpi->common;
Yue Chenf27b1602017-01-13 11:11:43 -08002065 const int hbs = mi_size_wide[bsize] / 2;
Yue Chen9ab6d712017-01-12 15:50:46 -08002066 PARTITION_TYPE partition;
2067 BLOCK_SIZE subsize;
Yue Chen9ab6d712017-01-12 15:50:46 -08002068
2069 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2070
Yue Chenf27b1602017-01-13 11:11:43 -08002071 partition = get_partition(cm, mi_row, mi_col, bsize);
Yue Chen9ab6d712017-01-12 15:50:46 -08002072 subsize = get_subsize(bsize, partition);
2073
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002074 switch (partition) {
2075 case PARTITION_NONE:
2076 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2077 break;
2078 case PARTITION_HORZ:
2079 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2080 if (mi_row + hbs < cm->mi_rows)
2081 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2082 break;
2083 case PARTITION_VERT:
2084 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2085 if (mi_col + hbs < cm->mi_cols)
2086 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2087 break;
2088 case PARTITION_SPLIT:
2089 write_tokens_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
2090 write_tokens_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs,
2091 subsize);
2092 write_tokens_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col,
2093 subsize);
2094 write_tokens_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs,
2095 subsize);
2096 break;
Yue Chenf27b1602017-01-13 11:11:43 -08002097#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002098#if CONFIG_EXT_PARTITION_TYPES_AB
2099#error NC_MODE_INFO+MOTION_VAR not yet supported for new HORZ/VERT_AB partitions
2100#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002101 case PARTITION_HORZ_A:
2102 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2103 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2104 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2105 break;
2106 case PARTITION_HORZ_B:
2107 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2108 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2109 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
2110 break;
2111 case PARTITION_VERT_A:
2112 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2113 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2114 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2115 break;
2116 case PARTITION_VERT_B:
2117 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2118 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2119 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
2120 break;
Yue Chenf27b1602017-01-13 11:11:43 -08002121#endif // CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002122 default: assert(0);
Yue Chen9ab6d712017-01-12 15:50:46 -08002123 }
2124}
2125#endif
2126
Yue Chen64550b62017-01-12 12:18:22 -08002127static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
2128 aom_writer *w, const TOKENEXTRA **tok,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02002129 const TOKENEXTRA *const tok_end, int mi_row,
2130 int mi_col) {
2131 write_mbmi_b(cpi, tile, w, mi_row, mi_col);
Jingning Hanf5a4d3b2017-08-27 23:01:19 -07002132
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +01002133#if NC_MODE_INFO
Yue Chen9ab6d712017-01-12 15:50:46 -08002134 (void)tok;
2135 (void)tok_end;
2136#else
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02002137 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
Yue Chen9ab6d712017-01-12 15:50:46 -08002138#endif
Yue Chen64550b62017-01-12 12:18:22 -08002139}
2140
Yaowu Xuf883b422016-08-30 14:01:10 -07002141static void write_partition(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002142 const MACROBLOCKD *const xd, int hbs, int mi_row,
2143 int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize,
Yaowu Xuf883b422016-08-30 14:01:10 -07002144 aom_writer *w) {
Alex Converse55c6bde2017-01-12 15:55:31 -08002145 const int is_partition_point = bsize >= BLOCK_8X8;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00002146
Jingning Hanbf9c6b72016-12-14 14:50:45 -08002147 if (!is_partition_point) return;
2148
Rupert Swarbrickeb123932017-11-22 15:20:47 +00002149 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2150 const int has_cols = (mi_col + hbs) < cm->mi_cols;
2151 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
2152 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
2153
2154 if (!has_rows && !has_cols) {
2155 assert(p == PARTITION_SPLIT);
2156 return;
2157 }
2158
Yaowu Xuc27fc142016-08-22 16:08:15 -07002159 if (has_rows && has_cols) {
Rupert Swarbrickeb123932017-11-22 15:20:47 +00002160 aom_write_symbol(w, p, ec_ctx->partition_cdf[ctx],
2161 partition_cdf_length(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002162 } else if (!has_rows && has_cols) {
2163 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002164 assert(bsize > BLOCK_8X8);
2165 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00002166 partition_gather_vert_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002167 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Rupert Swarbrickeb123932017-11-22 15:20:47 +00002168 } else {
2169 assert(has_rows && !has_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002170 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002171 assert(bsize > BLOCK_8X8);
2172 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00002173 partition_gather_horz_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002174 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002175 }
2176}
2177
Yaowu Xuf883b422016-08-30 14:01:10 -07002178static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile,
2179 aom_writer *const w, const TOKENEXTRA **tok,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02002180 const TOKENEXTRA *const tok_end, int mi_row,
2181 int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002182 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002183 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Jingning Hanc709e1f2016-12-06 14:48:09 -08002184 const int hbs = mi_size_wide[bsize] / 2;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002185#if CONFIG_EXT_PARTITION_TYPES
2186 const int quarter_step = mi_size_wide[bsize] / 4;
2187 int i;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002188#if CONFIG_EXT_PARTITION_TYPES_AB
2189 const int qbs = mi_size_wide[bsize] / 4;
2190#endif // CONFIG_EXT_PARTITION_TYPES_AB
2191#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002192 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
2193 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
Jingning Han52261842016-12-14 12:17:49 -08002194
Yaowu Xuc27fc142016-08-22 16:08:15 -07002195 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2196
2197 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002198 switch (partition) {
2199 case PARTITION_NONE:
2200 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2201 break;
2202 case PARTITION_HORZ:
2203 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2204 if (mi_row + hbs < cm->mi_rows)
2205 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2206 break;
2207 case PARTITION_VERT:
2208 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2209 if (mi_col + hbs < cm->mi_cols)
2210 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2211 break;
2212 case PARTITION_SPLIT:
2213 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
2214 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs, subsize);
2215 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col, subsize);
2216 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs,
2217 subsize);
2218 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002219#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002220#if CONFIG_EXT_PARTITION_TYPES_AB
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002221 case PARTITION_HORZ_A:
2222 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2223 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + qbs, mi_col);
2224 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2225 break;
2226 case PARTITION_HORZ_B:
2227 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2228 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2229 if (mi_row + 3 * qbs < cm->mi_rows)
2230 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + 3 * qbs, mi_col);
2231 break;
2232 case PARTITION_VERT_A:
2233 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2234 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + qbs);
2235 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2236 break;
2237 case PARTITION_VERT_B:
2238 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2239 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2240 if (mi_col + 3 * qbs < cm->mi_cols)
2241 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + 3 * qbs);
2242 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002243#else
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002244 case PARTITION_HORZ_A:
2245 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2246 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2247 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2248 break;
2249 case PARTITION_HORZ_B:
2250 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2251 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2252 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
2253 break;
2254 case PARTITION_VERT_A:
2255 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2256 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2257 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2258 break;
2259 case PARTITION_VERT_B:
2260 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2261 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2262 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
2263 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002264#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002265 case PARTITION_HORZ_4:
2266 for (i = 0; i < 4; ++i) {
2267 int this_mi_row = mi_row + i * quarter_step;
2268 if (i > 0 && this_mi_row >= cm->mi_rows) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002269
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002270 write_modes_b(cpi, tile, w, tok, tok_end, this_mi_row, mi_col);
2271 }
2272 break;
2273 case PARTITION_VERT_4:
2274 for (i = 0; i < 4; ++i) {
2275 int this_mi_col = mi_col + i * quarter_step;
2276 if (i > 0 && this_mi_col >= cm->mi_cols) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002277
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002278 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, this_mi_col);
2279 }
2280 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002281#endif // CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002282 default: assert(0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002283 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002284
2285// update partition context
2286#if CONFIG_EXT_PARTITION_TYPES
2287 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
2288#else
2289 if (bsize >= BLOCK_8X8 &&
2290 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
2291 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01002292#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002293
Cheng Chenf572cd32017-08-25 18:34:51 -07002294#if CONFIG_LPF_SB
2295 // send filter level for each superblock (64x64)
2296 if (bsize == cm->sb_size) {
Cheng Chena4b27de2017-08-31 16:05:19 -07002297 if (mi_row == 0 && mi_col == 0) {
Cheng Chenebcee0b2017-12-05 12:36:01 -08002298 aom_write_literal(w, cm->mi[0].mbmi.filt_lvl, 6);
Cheng Chen41d37c22017-09-08 19:00:21 -07002299 cm->mi_grid_visible[0]->mbmi.reuse_sb_lvl = 0;
2300 cm->mi_grid_visible[0]->mbmi.delta = 0;
2301 cm->mi_grid_visible[0]->mbmi.sign = 0;
Cheng Chena4b27de2017-08-31 16:05:19 -07002302 } else {
2303 int prev_mi_row, prev_mi_col;
2304 if (mi_col - MAX_MIB_SIZE < 0) {
2305 prev_mi_row = mi_row - MAX_MIB_SIZE;
2306 prev_mi_col = mi_col;
2307 } else {
2308 prev_mi_row = mi_row;
2309 prev_mi_col = mi_col - MAX_MIB_SIZE;
2310 }
2311 MB_MODE_INFO *curr_mbmi =
2312 &cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi;
2313 MB_MODE_INFO *prev_mbmi =
2314 &cm->mi_grid_visible[prev_mi_row * cm->mi_stride + prev_mi_col]->mbmi;
2315
2316 const uint8_t curr_lvl = curr_mbmi->filt_lvl;
2317 const uint8_t prev_lvl = prev_mbmi->filt_lvl;
Cheng Chena4b27de2017-08-31 16:05:19 -07002318
Cheng Chen41d37c22017-09-08 19:00:21 -07002319 const int reuse_prev_lvl = curr_lvl == prev_lvl;
2320 const int reuse_ctx = prev_mbmi->reuse_sb_lvl;
2321 curr_mbmi->reuse_sb_lvl = reuse_prev_lvl;
2322 aom_write_symbol(w, reuse_prev_lvl,
2323 xd->tile_ctx->lpf_reuse_cdf[reuse_ctx], 2);
Cheng Chen855f0fc2017-08-25 18:34:51 -07002324 cpi->td.counts->lpf_reuse[reuse_ctx][reuse_prev_lvl]++;
Cheng Chenc7855b12017-09-05 10:49:08 -07002325
Cheng Chen41d37c22017-09-08 19:00:21 -07002326 if (reuse_prev_lvl) {
2327 curr_mbmi->delta = 0;
2328 curr_mbmi->sign = 0;
2329 } else {
2330 const unsigned int delta = abs(curr_lvl - prev_lvl) / LPF_STEP;
2331 const int delta_ctx = prev_mbmi->delta;
2332 curr_mbmi->delta = delta;
2333 aom_write_symbol(w, delta, xd->tile_ctx->lpf_delta_cdf[delta_ctx],
2334 DELTA_RANGE);
Cheng Chen855f0fc2017-08-25 18:34:51 -07002335 cpi->td.counts->lpf_delta[delta_ctx][delta]++;
Cheng Chen41d37c22017-09-08 19:00:21 -07002336
2337 if (delta) {
2338 const int sign = curr_lvl > prev_lvl;
2339 const int sign_ctx = prev_mbmi->sign;
2340 curr_mbmi->sign = sign;
2341 aom_write_symbol(w, sign,
2342 xd->tile_ctx->lpf_sign_cdf[reuse_ctx][sign_ctx], 2);
Cheng Chen855f0fc2017-08-25 18:34:51 -07002343 cpi->td.counts->lpf_sign[reuse_ctx][sign_ctx][sign]++;
Cheng Chen41d37c22017-09-08 19:00:21 -07002344 } else {
2345 curr_mbmi->sign = 0;
2346 }
Cheng Chenc7855b12017-09-05 10:49:08 -07002347 }
Cheng Chena4b27de2017-08-31 16:05:19 -07002348 }
Cheng Chenf572cd32017-08-25 18:34:51 -07002349 }
2350#endif
2351
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002352#if CONFIG_LOOP_RESTORATION
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002353 for (int plane = 0; plane < av1_num_planes(cm); ++plane) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01002354 int rcol0, rcol1, rrow0, rrow1, tile_tl_idx;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002355 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
2356 &rcol0, &rcol1, &rrow0, &rrow1,
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01002357 &tile_tl_idx)) {
2358 const int rstride = cm->rst_info[plane].horz_units_per_tile;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002359 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
2360 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01002361 const int rtile_idx = tile_tl_idx + rcol + rrow * rstride;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002362 const RestorationUnitInfo *rui =
2363 &cm->rst_info[plane].unit_info[rtile_idx];
2364 loop_restoration_write_sb_coeffs(cm, xd, rui, w, plane);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002365 }
2366 }
2367 }
2368 }
2369#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002370}
2371
Yaowu Xuf883b422016-08-30 14:01:10 -07002372static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile,
2373 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002374 const TOKENEXTRA *const tok_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002375 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002376 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
2377 const int mi_row_start = tile->mi_row_start;
2378 const int mi_row_end = tile->mi_row_end;
2379 const int mi_col_start = tile->mi_col_start;
2380 const int mi_col_end = tile->mi_col_end;
2381 int mi_row, mi_col;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002382
2383#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08002384 if (!cm->dependent_horz_tiles || mi_row_start == 0 ||
2385 tile->tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002386 av1_zero_above_context(cm, mi_col_start, mi_col_end);
2387 }
2388#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002389 av1_zero_above_context(cm, mi_col_start, mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002390#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02002391 if (cpi->common.delta_q_present_flag) {
2392 xd->prev_qindex = cpi->common.base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07002393#if CONFIG_EXT_DELTA_Q
2394 if (cpi->common.delta_lf_present_flag) {
Cheng Chena97394f2017-09-27 15:05:14 -07002395#if CONFIG_LOOPFILTER_LEVEL
2396 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
2397 xd->prev_delta_lf[lf_id] = 0;
2398#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07002399 xd->prev_delta_lf_from_base = 0;
2400 }
2401#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02002402 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002403
2404 for (mi_row = mi_row_start; mi_row < mi_row_end; mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002405 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002406
2407 for (mi_col = mi_col_start; mi_col < mi_col_end; mi_col += cm->mib_size) {
Sebastien Alaiwan6534ba82017-10-13 20:35:14 +02002408 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, cm->sb_size);
Sebastien Alaiwan1bc94fc2017-10-31 10:25:17 +01002409#if NC_MODE_INFO
Yue Chen9ab6d712017-01-12 15:50:46 -08002410 write_tokens_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, cm->sb_size);
2411#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002412 }
2413 }
2414}
2415
Yaowu Xuc27fc142016-08-22 16:08:15 -07002416#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002417static void encode_restoration_mode(AV1_COMMON *cm,
2418 struct aom_write_bit_buffer *wb) {
Hui Su27df8342017-11-07 15:16:05 -08002419#if CONFIG_INTRABC
2420 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
2421#endif // CONFIG_INTRABC
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002422 int all_none = 1, chroma_none = 1;
2423 for (int p = 0; p < av1_num_planes(cm); ++p) {
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00002424 RestorationInfo *rsi = &cm->rst_info[p];
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002425 if (rsi->frame_restoration_type != RESTORE_NONE) {
2426 all_none = 0;
2427 chroma_none &= p == 0;
2428 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002429 switch (rsi->frame_restoration_type) {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07002430 case RESTORE_NONE:
2431 aom_wb_write_bit(wb, 0);
2432 aom_wb_write_bit(wb, 0);
2433 break;
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07002434 case RESTORE_WIENER:
2435 aom_wb_write_bit(wb, 1);
2436 aom_wb_write_bit(wb, 0);
2437 break;
2438 case RESTORE_SGRPROJ:
2439 aom_wb_write_bit(wb, 1);
2440 aom_wb_write_bit(wb, 1);
2441 break;
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07002442 case RESTORE_SWITCHABLE:
2443 aom_wb_write_bit(wb, 0);
2444 aom_wb_write_bit(wb, 1);
2445 break;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002446 default: assert(0);
2447 }
2448 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002449 if (!all_none) {
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00002450 RestorationInfo *rsi = &cm->rst_info[0];
2451 const int qsize = RESTORATION_TILESIZE_MAX >> 2;
2452 const int hsize = RESTORATION_TILESIZE_MAX >> 1;
2453 aom_wb_write_bit(wb, rsi->restoration_unit_size != qsize);
2454 if (rsi->restoration_unit_size != qsize) {
2455 aom_wb_write_bit(wb, rsi->restoration_unit_size != hsize);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002456 }
2457 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002458
2459 if (av1_num_planes(cm) > 1) {
2460 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
2461 if (s && !chroma_none) {
2462 aom_wb_write_bit(wb,
2463 cm->rst_info[1].restoration_unit_size !=
2464 cm->rst_info[0].restoration_unit_size);
2465 assert(cm->rst_info[1].restoration_unit_size ==
2466 cm->rst_info[0].restoration_unit_size ||
2467 cm->rst_info[1].restoration_unit_size ==
2468 (cm->rst_info[0].restoration_unit_size >> s));
2469 assert(cm->rst_info[2].restoration_unit_size ==
2470 cm->rst_info[1].restoration_unit_size);
2471 } else if (!s) {
2472 assert(cm->rst_info[1].restoration_unit_size ==
2473 cm->rst_info[0].restoration_unit_size);
2474 assert(cm->rst_info[2].restoration_unit_size ==
2475 cm->rst_info[1].restoration_unit_size);
2476 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07002477 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002478}
2479
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002480static void write_wiener_filter(int wiener_win, const WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002481 WienerInfo *ref_wiener_info, aom_writer *wb) {
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002482 if (wiener_win == WIENER_WIN)
2483 aom_write_primitive_refsubexpfin(
2484 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2485 WIENER_FILT_TAP0_SUBEXP_K,
2486 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV,
2487 wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV);
2488 else
2489 assert(wiener_info->vfilter[0] == 0 &&
2490 wiener_info->vfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002491 aom_write_primitive_refsubexpfin(
2492 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2493 WIENER_FILT_TAP1_SUBEXP_K,
2494 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV,
2495 wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV);
2496 aom_write_primitive_refsubexpfin(
2497 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2498 WIENER_FILT_TAP2_SUBEXP_K,
2499 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV,
2500 wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV);
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002501 if (wiener_win == WIENER_WIN)
2502 aom_write_primitive_refsubexpfin(
2503 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2504 WIENER_FILT_TAP0_SUBEXP_K,
2505 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV,
2506 wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV);
2507 else
2508 assert(wiener_info->hfilter[0] == 0 &&
2509 wiener_info->hfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002510 aom_write_primitive_refsubexpfin(
2511 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2512 WIENER_FILT_TAP1_SUBEXP_K,
2513 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV,
2514 wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV);
2515 aom_write_primitive_refsubexpfin(
2516 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2517 WIENER_FILT_TAP2_SUBEXP_K,
2518 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV,
2519 wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV);
2520 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002521}
2522
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002523static void write_sgrproj_filter(const SgrprojInfo *sgrproj_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002524 SgrprojInfo *ref_sgrproj_info,
2525 aom_writer *wb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002526 aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002527 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1,
2528 SGRPROJ_PRJ_SUBEXP_K,
2529 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
2530 sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0);
2531 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1,
2532 SGRPROJ_PRJ_SUBEXP_K,
2533 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
2534 sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1);
2535 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002536}
2537
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002538static void loop_restoration_write_sb_coeffs(const AV1_COMMON *const cm,
2539 MACROBLOCKD *xd,
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002540 const RestorationUnitInfo *rui,
2541 aom_writer *const w, int plane) {
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002542 const RestorationInfo *rsi = cm->rst_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002543 RestorationType frame_rtype = rsi->frame_restoration_type;
2544 if (frame_rtype == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002545
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002546 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
2547 WienerInfo *wiener_info = xd->wiener_info + plane;
2548 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002549 RestorationType unit_rtype = rui->restoration_type;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002550
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002551 if (frame_rtype == RESTORE_SWITCHABLE) {
2552 aom_write_symbol(w, unit_rtype, xd->tile_ctx->switchable_restore_cdf,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002553 RESTORE_SWITCHABLE_TYPES);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002554 switch (unit_rtype) {
2555 case RESTORE_WIENER:
2556 write_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, w);
2557 break;
2558 case RESTORE_SGRPROJ:
2559 write_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, w);
2560 break;
2561 default: assert(unit_rtype == RESTORE_NONE); break;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002562 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002563 } else if (frame_rtype == RESTORE_WIENER) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002564 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002565 xd->tile_ctx->wiener_restore_cdf, 2);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002566 if (unit_rtype != RESTORE_NONE) {
2567 write_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, w);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002568 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002569 } else if (frame_rtype == RESTORE_SGRPROJ) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002570 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002571 xd->tile_ctx->sgrproj_restore_cdf, 2);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002572 if (unit_rtype != RESTORE_NONE) {
2573 write_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, w);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002574 }
2575 }
2576}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002577#endif // CONFIG_LOOP_RESTORATION
2578
Yaowu Xuf883b422016-08-30 14:01:10 -07002579static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Hui Su27df8342017-11-07 15:16:05 -08002580#if CONFIG_INTRABC
2581 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
2582#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07002583 int i;
2584 struct loopfilter *lf = &cm->lf;
2585
Cheng Chen179479f2017-08-04 10:56:39 -07002586// Encode the loop filter level and type
Cheng Chenf572cd32017-08-25 18:34:51 -07002587#if !CONFIG_LPF_SB
Cheng Chen13fc8192017-08-19 11:49:28 -07002588#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen179479f2017-08-04 10:56:39 -07002589 aom_wb_write_literal(wb, lf->filter_level[0], 6);
2590 aom_wb_write_literal(wb, lf->filter_level[1], 6);
2591 if (lf->filter_level[0] || lf->filter_level[1]) {
Cheng Chene94df5c2017-07-19 17:25:33 -07002592 aom_wb_write_literal(wb, lf->filter_level_u, 6);
2593 aom_wb_write_literal(wb, lf->filter_level_v, 6);
2594 }
Cheng Chen179479f2017-08-04 10:56:39 -07002595#else
2596 aom_wb_write_literal(wb, lf->filter_level, 6);
Cheng Chenf572cd32017-08-25 18:34:51 -07002597#endif // CONFIG_LOOPFILTER_LEVEL
2598#endif // CONFIG_LPF_SB
Yaowu Xuf883b422016-08-30 14:01:10 -07002599 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002600
2601 // Write out loop filter deltas applied at the MB level based on mode or
2602 // ref frame (if they are enabled).
Yaowu Xuf883b422016-08-30 14:01:10 -07002603 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002604
2605 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002606 aom_wb_write_bit(wb, lf->mode_ref_delta_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002607 if (lf->mode_ref_delta_update) {
2608 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
2609 const int delta = lf->ref_deltas[i];
2610 const int changed = delta != lf->last_ref_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002611 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002612 if (changed) {
2613 lf->last_ref_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002614 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002615 }
2616 }
2617
2618 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
2619 const int delta = lf->mode_deltas[i];
2620 const int changed = delta != lf->last_mode_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002621 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002622 if (changed) {
2623 lf->last_mode_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002624 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002625 }
2626 }
2627 }
2628 }
2629}
2630
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002631static void encode_cdef(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Hui Su27df8342017-11-07 15:16:05 -08002632#if CONFIG_INTRABC
2633 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
2634#endif // CONFIG_INTRABC
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002635 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02002636#if CONFIG_CDEF_SINGLEPASS
2637 aom_wb_write_literal(wb, cm->cdef_pri_damping - 3, 2);
2638 assert(cm->cdef_pri_damping == cm->cdef_sec_damping);
2639#else
Steinar Midtskogen94de0aa2017-08-02 10:30:12 +02002640 aom_wb_write_literal(wb, cm->cdef_pri_damping - 5, 1);
2641 aom_wb_write_literal(wb, cm->cdef_sec_damping - 3, 2);
Steinar Midtskogen59782122017-07-20 08:49:43 +02002642#endif
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002643 aom_wb_write_literal(wb, cm->cdef_bits, 2);
2644 for (i = 0; i < cm->nb_cdef_strengths; i++) {
2645 aom_wb_write_literal(wb, cm->cdef_strengths[i], CDEF_STRENGTH_BITS);
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02002646 if (cm->subsampling_x == cm->subsampling_y)
2647 aom_wb_write_literal(wb, cm->cdef_uv_strengths[i], CDEF_STRENGTH_BITS);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002648 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002649}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002650
Yaowu Xuf883b422016-08-30 14:01:10 -07002651static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002652 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002653 aom_wb_write_bit(wb, 1);
2654 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002655 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002656 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002657 }
2658}
2659
Yaowu Xuf883b422016-08-30 14:01:10 -07002660static void encode_quantization(const AV1_COMMON *const cm,
2661 struct aom_write_bit_buffer *wb) {
2662 aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002663 write_delta_q(wb, cm->y_dc_delta_q);
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002664 int diff_uv_delta = (cm->u_dc_delta_q != cm->v_dc_delta_q) ||
2665 (cm->u_ac_delta_q != cm->v_ac_delta_q);
2666#if CONFIG_EXT_QM
2667 if (cm->separate_uv_delta_q) aom_wb_write_bit(wb, diff_uv_delta);
2668#else
2669 assert(!diff_uv_delta);
2670#endif
Yaowu Xube42dc72017-11-08 17:38:24 -08002671 write_delta_q(wb, cm->u_dc_delta_q);
Yaowu Xube42dc72017-11-08 17:38:24 -08002672 write_delta_q(wb, cm->u_ac_delta_q);
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002673 if (diff_uv_delta) {
2674 write_delta_q(wb, cm->v_dc_delta_q);
2675 write_delta_q(wb, cm->v_ac_delta_q);
2676 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002677#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07002678 aom_wb_write_bit(wb, cm->using_qmatrix);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002679 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002680 aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS);
2681 aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002682 }
2683#endif
2684}
2685
Yaowu Xuf883b422016-08-30 14:01:10 -07002686static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
2687 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002688 int i, j;
2689 const struct segmentation *seg = &cm->seg;
2690
Yaowu Xuf883b422016-08-30 14:01:10 -07002691 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002692 if (!seg->enabled) return;
2693
2694 // Segmentation map
2695 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002696 aom_wb_write_bit(wb, seg->update_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002697 } else {
2698 assert(seg->update_map == 1);
2699 }
2700 if (seg->update_map) {
2701 // Select the coding strategy (temporal or spatial)
Yaowu Xuf883b422016-08-30 14:01:10 -07002702 av1_choose_segmap_coding_method(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002703
2704 // Write out the chosen coding method.
2705 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002706 aom_wb_write_bit(wb, seg->temporal_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002707 } else {
2708 assert(seg->temporal_update == 0);
2709 }
2710 }
2711
2712 // Segmentation data
Yaowu Xuf883b422016-08-30 14:01:10 -07002713 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002714 if (seg->update_data) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002715 for (i = 0; i < MAX_SEGMENTS; i++) {
2716 for (j = 0; j < SEG_LVL_MAX; j++) {
2717 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002718 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002719 if (active) {
2720 const int data = get_segdata(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002721 const int data_max = av1_seg_feature_data_max(j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002722
Yaowu Xuf883b422016-08-30 14:01:10 -07002723 if (av1_is_segfeature_signed(j)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002724 encode_unsigned_max(wb, abs(data), data_max);
Yaowu Xuf883b422016-08-30 14:01:10 -07002725 aom_wb_write_bit(wb, data < 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002726 } else {
2727 encode_unsigned_max(wb, data, data_max);
2728 }
2729 }
2730 }
2731 }
2732 }
2733}
2734
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +01002735#if CONFIG_Q_SEGMENTATION
2736static void encode_q_segmentation(AV1_COMMON *cm,
2737 struct aom_write_bit_buffer *wb) {
2738 int i;
2739 struct segmentation *seg = &cm->seg;
2740
2741 for (i = 0; i < MAX_SEGMENTS; i++) {
2742 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
2743 seg->q_lvls = 0;
2744 return;
2745 }
2746 }
2747
2748 aom_wb_write_bit(wb, !!seg->q_lvls);
2749 if (!seg->q_lvls) return;
2750
2751 encode_unsigned_max(wb, seg->q_lvls, MAX_SEGMENTS);
2752
2753 for (i = 0; i < seg->q_lvls; i++) {
2754 const int val = seg->q_delta[i];
2755 encode_unsigned_max(wb, abs(val), MAXQ);
2756 aom_wb_write_bit(wb, val < 0);
2757 }
2758}
2759#endif
2760
Thomas Daedef636d5c2017-06-29 13:48:27 -07002761static void write_tx_mode(AV1_COMMON *cm, TX_MODE *mode,
Yue Cheneeacc4c2017-01-17 17:29:17 -08002762 struct aom_write_bit_buffer *wb) {
Thomas Daedef636d5c2017-06-29 13:48:27 -07002763 if (cm->all_lossless) {
Yue Cheneeacc4c2017-01-17 17:29:17 -08002764 *mode = ONLY_4X4;
2765 return;
2766 }
Debargha Mukherjee923b73d2017-10-31 18:11:34 -07002767#if CONFIG_SIMPLIFY_TX_MODE
2768 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
2769#else
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08002770#if CONFIG_TX64X64
Yue Cheneeacc4c2017-01-17 17:29:17 -08002771 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
2772 if (*mode != TX_MODE_SELECT) {
2773 aom_wb_write_literal(wb, AOMMIN(*mode, ALLOW_32X32), 2);
2774 if (*mode >= ALLOW_32X32) aom_wb_write_bit(wb, *mode == ALLOW_64X64);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08002775 }
2776#else
Yue Cheneeacc4c2017-01-17 17:29:17 -08002777 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
2778 if (*mode != TX_MODE_SELECT) aom_wb_write_literal(wb, *mode, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08002779#endif // CONFIG_TX64X64
Debargha Mukherjee923b73d2017-10-31 18:11:34 -07002780#endif // CONFIG_SIMPLIFY_TX_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002781}
2782
Angie Chiang5678ad92016-11-21 09:38:40 -08002783static void write_frame_interp_filter(InterpFilter filter,
2784 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002785 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002786 if (filter != SWITCHABLE)
Angie Chiang6305abe2016-10-24 12:24:44 -07002787 aom_wb_write_literal(wb, filter, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002788}
2789
Yaowu Xuf883b422016-08-30 14:01:10 -07002790static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002791 if (cm->interp_filter == SWITCHABLE) {
2792 // Check to see if only one of the filters is actually used
2793 int count[SWITCHABLE_FILTERS];
2794 int i, j, c = 0;
2795 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2796 count[i] = 0;
2797 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
2798 count[i] += counts->switchable_interp[j][i];
2799 c += (count[i] > 0);
2800 }
2801 if (c == 1) {
2802 // Only one filter is used. So set the filter at frame level
2803 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2804 if (count[i]) {
Sarah Parker4c10a3c2017-04-10 19:37:59 -07002805 if (i == EIGHTTAP_REGULAR || WARP_WM_NEIGHBORS_WITH_OBMC)
Debargha Mukherjee604d8462017-04-06 15:27:00 -07002806 cm->interp_filter = i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002807 break;
2808 }
2809 }
2810 }
2811 }
2812}
2813
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002814#if CONFIG_MAX_TILE
2815
2816// Same function as write_uniform but writing to uncompresses header wb
2817static void wb_write_uniform(struct aom_write_bit_buffer *wb, int n, int v) {
2818 const int l = get_unsigned_bits(n);
2819 const int m = (1 << l) - n;
2820 if (l == 0) return;
2821 if (v < m) {
2822 aom_wb_write_literal(wb, v, l - 1);
2823 } else {
2824 aom_wb_write_literal(wb, m + ((v - m) >> 1), l - 1);
2825 aom_wb_write_literal(wb, (v - m) & 1, 1);
2826 }
2827}
2828
2829static void write_tile_info_max_tile(const AV1_COMMON *const cm,
2830 struct aom_write_bit_buffer *wb) {
Dominic Symes917d6c02017-10-11 18:00:52 +02002831 int width_mi = ALIGN_POWER_OF_TWO(cm->mi_cols, cm->mib_size_log2);
2832 int height_mi = ALIGN_POWER_OF_TWO(cm->mi_rows, cm->mib_size_log2);
2833 int width_sb = width_mi >> cm->mib_size_log2;
2834 int height_sb = height_mi >> cm->mib_size_log2;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002835 int size_sb, i;
2836
2837 aom_wb_write_bit(wb, cm->uniform_tile_spacing_flag);
2838
2839 if (cm->uniform_tile_spacing_flag) {
2840 // Uniform spaced tiles with power-of-two number of rows and columns
2841 // tile columns
2842 int ones = cm->log2_tile_cols - cm->min_log2_tile_cols;
2843 while (ones--) {
2844 aom_wb_write_bit(wb, 1);
2845 }
2846 if (cm->log2_tile_cols < cm->max_log2_tile_cols) {
2847 aom_wb_write_bit(wb, 0);
2848 }
2849
2850 // rows
2851 ones = cm->log2_tile_rows - cm->min_log2_tile_rows;
2852 while (ones--) {
2853 aom_wb_write_bit(wb, 1);
2854 }
2855 if (cm->log2_tile_rows < cm->max_log2_tile_rows) {
2856 aom_wb_write_bit(wb, 0);
2857 }
2858 } else {
2859 // Explicit tiles with configurable tile widths and heights
2860 // columns
2861 for (i = 0; i < cm->tile_cols; i++) {
2862 size_sb = cm->tile_col_start_sb[i + 1] - cm->tile_col_start_sb[i];
2863 wb_write_uniform(wb, AOMMIN(width_sb, MAX_TILE_WIDTH_SB), size_sb - 1);
2864 width_sb -= size_sb;
2865 }
2866 assert(width_sb == 0);
2867
2868 // rows
2869 for (i = 0; i < cm->tile_rows; i++) {
2870 size_sb = cm->tile_row_start_sb[i + 1] - cm->tile_row_start_sb[i];
2871 wb_write_uniform(wb, AOMMIN(height_sb, cm->max_tile_height_sb),
2872 size_sb - 1);
2873 height_sb -= size_sb;
2874 }
2875 assert(height_sb == 0);
2876 }
2877}
2878#endif
2879
Yaowu Xuf883b422016-08-30 14:01:10 -07002880static void write_tile_info(const AV1_COMMON *const cm,
2881 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002882#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002883 if (cm->large_scale_tile) {
2884 const int tile_width =
2885 ALIGN_POWER_OF_TWO(cm->tile_width, cm->mib_size_log2) >>
2886 cm->mib_size_log2;
2887 const int tile_height =
2888 ALIGN_POWER_OF_TWO(cm->tile_height, cm->mib_size_log2) >>
2889 cm->mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002890
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002891 assert(tile_width > 0);
2892 assert(tile_height > 0);
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08002893
Yaowu Xuc27fc142016-08-22 16:08:15 -07002894// Write the tile sizes
2895#if CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002896 if (cm->sb_size == BLOCK_128X128) {
2897 assert(tile_width <= 32);
2898 assert(tile_height <= 32);
2899 aom_wb_write_literal(wb, tile_width - 1, 5);
2900 aom_wb_write_literal(wb, tile_height - 1, 5);
2901 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002902#endif // CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002903 assert(tile_width <= 64);
2904 assert(tile_height <= 64);
2905 aom_wb_write_literal(wb, tile_width - 1, 6);
2906 aom_wb_write_literal(wb, tile_height - 1, 6);
2907#if CONFIG_EXT_PARTITION
2908 }
2909#endif // CONFIG_EXT_PARTITION
2910 } else {
2911#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002912
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002913#if CONFIG_MAX_TILE
2914 write_tile_info_max_tile(cm, wb);
2915#else
2916 int min_log2_tile_cols, max_log2_tile_cols, ones;
2917 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002918
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002919 // columns
2920 ones = cm->log2_tile_cols - min_log2_tile_cols;
2921 while (ones--) aom_wb_write_bit(wb, 1);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002922
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002923 if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0);
2924
2925 // rows
2926 aom_wb_write_bit(wb, cm->log2_tile_rows != 0);
2927 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1);
2928#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002929#if CONFIG_DEPENDENT_HORZTILES
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002930 if (cm->tile_rows > 1) aom_wb_write_bit(wb, cm->dependent_horz_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002931#endif
2932#if CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002933 }
Fangwen Fu70bcb892017-05-06 17:05:19 -07002934#endif // CONFIG_EXT_TILE
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002935
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002936#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yunqing Wang42015d12017-10-17 15:43:49 -07002937 if (cm->tile_cols * cm->tile_rows > 1)
2938 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_enabled);
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002939#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002940}
2941
Zoe Liu8dd1c982017-09-11 10:14:35 -07002942#if USE_GF16_MULTI_LAYER
2943static int get_refresh_mask_gf16(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002944 int refresh_mask = 0;
2945
Zoe Liu8dd1c982017-09-11 10:14:35 -07002946 if (cpi->refresh_last_frame || cpi->refresh_golden_frame ||
2947 cpi->refresh_bwd_ref_frame || cpi->refresh_alt2_ref_frame ||
2948 cpi->refresh_alt_ref_frame) {
2949 assert(cpi->refresh_fb_idx >= 0 && cpi->refresh_fb_idx < REF_FRAMES);
2950 refresh_mask |= (1 << cpi->refresh_fb_idx);
2951 }
2952
2953 return refresh_mask;
2954}
2955#endif // USE_GF16_MULTI_LAYER
Zoe Liu8dd1c982017-09-11 10:14:35 -07002956
2957static int get_refresh_mask(AV1_COMP *cpi) {
Yi Luo2e6a9ab2017-09-15 08:13:59 -07002958 int refresh_mask = 0;
Zoe Liu8dd1c982017-09-11 10:14:35 -07002959#if USE_GF16_MULTI_LAYER
2960 if (cpi->rc.baseline_gf_interval == 16) return get_refresh_mask_gf16(cpi);
2961#endif // USE_GF16_MULTI_LAYER
2962
Yaowu Xuc27fc142016-08-22 16:08:15 -07002963 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
2964 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
2965 // the 3 LAST reference frames will be updated accordingly, i.e.:
2966 // (1) The original virtual index for LAST3_FRAME will become the new virtual
2967 // index for LAST_FRAME; and
2968 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
2969 // shifted and become the new virtual indexes for LAST2_FRAME and
2970 // LAST3_FRAME.
2971 refresh_mask |=
2972 (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]);
Zoe Liue9b15e22017-07-19 15:53:01 -07002973
Zoe Liue9b15e22017-07-19 15:53:01 -07002974 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
2975 refresh_mask |= (cpi->refresh_alt2_ref_frame << cpi->alt2_fb_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002976
Yaowu Xuf883b422016-08-30 14:01:10 -07002977 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002978 // We have decided to preserve the previously existing golden frame as our
2979 // new ARF frame. However, in the short term we leave it in the GF slot and,
2980 // if we're updating the GF with the current decoded frame, we save it
2981 // instead to the ARF slot.
Yaowu Xuf883b422016-08-30 14:01:10 -07002982 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
Yaowu Xuc27fc142016-08-22 16:08:15 -07002983 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
2984 // there so that it can be done outside of the recode loop.
2985 // Note: This is highly specific to the use of ARF as a forward reference,
2986 // and this needs to be generalized as other uses are implemented
2987 // (like RTC/temporal scalability).
2988 return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx);
2989 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -07002990 const int arf_idx = cpi->alt_fb_idx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002991 return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) |
2992 (cpi->refresh_alt_ref_frame << arf_idx);
2993 }
2994}
2995
2996#if CONFIG_EXT_TILE
2997static INLINE int find_identical_tile(
2998 const int tile_row, const int tile_col,
2999 TileBufferEnc (*const tile_buffers)[1024]) {
3000 const MV32 candidate_offset[1] = { { 1, 0 } };
3001 const uint8_t *const cur_tile_data =
3002 tile_buffers[tile_row][tile_col].data + 4;
Jingning Han99ffce62017-04-25 15:48:41 -07003003 const size_t cur_tile_size = tile_buffers[tile_row][tile_col].size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003004
3005 int i;
3006
3007 if (tile_row == 0) return 0;
3008
3009 // (TODO: yunqingwang) For now, only above tile is checked and used.
3010 // More candidates such as left tile can be added later.
3011 for (i = 0; i < 1; i++) {
3012 int row_offset = candidate_offset[0].row;
3013 int col_offset = candidate_offset[0].col;
3014 int row = tile_row - row_offset;
3015 int col = tile_col - col_offset;
3016 uint8_t tile_hdr;
3017 const uint8_t *tile_data;
3018 TileBufferEnc *candidate;
3019
3020 if (row < 0 || col < 0) continue;
3021
3022 tile_hdr = *(tile_buffers[row][col].data);
3023
3024 // Read out tcm bit
3025 if ((tile_hdr >> 7) == 1) {
3026 // The candidate is a copy tile itself
3027 row_offset += tile_hdr & 0x7f;
3028 row = tile_row - row_offset;
3029 }
3030
3031 candidate = &tile_buffers[row][col];
3032
3033 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
3034
3035 tile_data = candidate->data + 4;
3036
3037 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
3038
3039 // Identical tile found
3040 assert(row_offset > 0);
3041 return row_offset;
3042 }
3043
3044 // No identical tile found
3045 return 0;
3046}
3047#endif // CONFIG_EXT_TILE
3048
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003049#if !CONFIG_OBU || CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07003050static uint32_t write_tiles(AV1_COMP *const cpi, uint8_t *const dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003051 unsigned int *max_tile_size,
3052 unsigned int *max_tile_col_size) {
Thomas Davies4822e142017-10-10 11:30:36 +01003053 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuf883b422016-08-30 14:01:10 -07003054 aom_writer mode_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003055 int tile_row, tile_col;
3056 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
clang-format67948d32016-09-07 22:40:40 -07003057 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
James Zern71a37de2017-04-20 16:03:13 -07003058 uint32_t total_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003059 const int tile_cols = cm->tile_cols;
3060 const int tile_rows = cm->tile_rows;
Thomas Daviesaf6df172016-11-09 14:04:18 +00003061 unsigned int tile_size = 0;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003062 const int have_tiles = tile_cols * tile_rows > 1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003063 struct aom_write_bit_buffer wb = { dst, 0 };
Thomas Davies80188d12016-10-26 16:08:35 -07003064 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003065 uint32_t compressed_hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07003066 // Fixed size tile groups for the moment
3067 const int num_tg_hdrs = cm->num_tg;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003068 const int tg_size =
3069#if CONFIG_EXT_TILE
3070 (cm->large_scale_tile)
3071 ? 1
3072 :
3073#endif // CONFIG_EXT_TILE
3074 (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
Thomas Davies80188d12016-10-26 16:08:35 -07003075 int tile_count = 0;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00003076 int tg_count = 1;
3077 int tile_size_bytes = 4;
3078 int tile_col_size_bytes;
James Zern71a37de2017-04-20 16:03:13 -07003079 uint32_t uncompressed_hdr_size = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07003080 struct aom_write_bit_buffer tg_params_wb;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00003081 struct aom_write_bit_buffer tile_size_bytes_wb;
James Zern71a37de2017-04-20 16:03:13 -07003082 uint32_t saved_offset;
Thomas Daviesaf6df172016-11-09 14:04:18 +00003083 int mtu_size = cpi->oxcf.mtu;
3084 int curr_tg_data_size = 0;
3085 int hdr_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003086
3087 *max_tile_size = 0;
3088 *max_tile_col_size = 0;
3089
3090// All tile size fields are output on 4 bytes. A call to remux_tiles will
3091// later compact the data if smaller headers are adequate.
3092
Thomas Davies4822e142017-10-10 11:30:36 +01003093#if CONFIG_SIMPLE_BWD_ADAPT
3094 cm->largest_tile_id = 0;
3095#endif
3096
Yaowu Xuc27fc142016-08-22 16:08:15 -07003097#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003098 if (cm->large_scale_tile) {
3099 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3100 TileInfo tile_info;
3101 const int is_last_col = (tile_col == tile_cols - 1);
3102 const uint32_t col_offset = total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003103
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003104 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003105
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003106 // The last column does not have a column header
3107 if (!is_last_col) total_size += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003108
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003109 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3110 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
3111 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3112 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3113 const int data_offset = have_tiles ? 4 : 0;
3114 const int tile_idx = tile_row * tile_cols + tile_col;
3115 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
3116 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003117
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003118 buf->data = dst + total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003119
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003120 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
3121 // even for the last one, unless no tiling is used at all.
3122 total_size += data_offset;
3123 // Initialise tile context from the frame context
3124 this_tile->tctx = *cm->fc;
3125 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07003126 mode_bc.allow_update_cdf = !cm->large_scale_tile;
Rupert Swarbrick7546b302017-10-26 10:45:26 +01003127#if CONFIG_LOOP_RESTORATION
Rupert Swarbrick76405202017-11-07 16:35:55 +00003128 av1_reset_loop_restoration(&cpi->td.mb.e_mbd);
Rupert Swarbrick7546b302017-10-26 10:45:26 +01003129#endif // CONFIG_LOOP_RESTORATION
3130
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003131 aom_start_encode(&mode_bc, buf->data + data_offset);
3132 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
3133 assert(tok == tok_end);
3134 aom_stop_encode(&mode_bc);
3135 tile_size = mode_bc.pos;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003136 buf->size = tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003137
Thomas Davies4822e142017-10-10 11:30:36 +01003138#if CONFIG_SIMPLE_BWD_ADAPT
3139 if (tile_size > *max_tile_size) {
3140 cm->largest_tile_id = tile_cols * tile_row + tile_col;
3141 }
3142#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003143 // Record the maximum tile size we see, so we can compact headers later.
3144 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003145
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003146 if (have_tiles) {
3147 // tile header: size of this tile, or copy offset
3148 uint32_t tile_header = tile_size;
3149 const int tile_copy_mode =
3150 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256)
3151 ? 1
3152 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003153
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003154 // If tile_copy_mode = 1, check if this tile is a copy tile.
3155 // Very low chances to have copy tiles on the key frames, so don't
3156 // search on key frames to reduce unnecessary search.
3157 if (cm->frame_type != KEY_FRAME && tile_copy_mode) {
3158 const int idendical_tile_offset =
3159 find_identical_tile(tile_row, tile_col, tile_buffers);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003160
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003161 if (idendical_tile_offset > 0) {
3162 tile_size = 0;
3163 tile_header = idendical_tile_offset | 0x80;
3164 tile_header <<= 24;
3165 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003166 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003167
3168 mem_put_le32(buf->data, tile_header);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003169 }
3170
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003171 total_size += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003172 }
3173
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003174 if (!is_last_col) {
3175 uint32_t col_size = total_size - col_offset - 4;
3176 mem_put_le32(dst + col_offset, col_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003177
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003178 // If it is not final packing, record the maximum tile column size we
3179 // see, otherwise, check if the tile size is out of the range.
3180 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
3181 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003182 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003183 } else {
3184#endif // CONFIG_EXT_TILE
Soo-Chul Han38427e82017-09-27 15:06:13 -04003185
3186#if !CONFIG_OBU
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003187 write_uncompressed_header_frame(cpi, &wb);
Soo-Chul Han38427e82017-09-27 15:06:13 -04003188#else
3189 write_uncompressed_header_obu(cpi, &wb);
3190#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003191
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003192 if (cm->show_existing_frame) {
3193 total_size = aom_wb_bytes_written(&wb);
3194 return (uint32_t)total_size;
3195 }
Jingning Hand3f441c2017-03-06 09:12:54 -08003196
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003197 // Write the tile length code
3198 tile_size_bytes_wb = wb;
3199 aom_wb_write_literal(&wb, 3, 2);
Thomas Davies80188d12016-10-26 16:08:35 -07003200
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003201 /* Write a placeholder for the number of tiles in each tile group */
3202 tg_params_wb = wb;
3203 saved_offset = wb.bit_offset;
3204 if (have_tiles) {
Imdad Sardharwalla857c99b2017-11-21 15:53:31 +00003205 aom_wb_write_literal(&wb, 3, n_log2_tiles);
3206 aom_wb_write_literal(&wb, (1 << n_log2_tiles) - 1, n_log2_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003207 }
Thomas Davies80188d12016-10-26 16:08:35 -07003208
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003209 if (!use_compressed_header(cm)) {
3210 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
3211 compressed_hdr_size = 0;
3212 } else {
3213 /* Write a placeholder for the compressed header length */
3214 struct aom_write_bit_buffer comp_hdr_len_wb = wb;
3215 aom_wb_write_literal(&wb, 0, 16);
Thomas Davies80188d12016-10-26 16:08:35 -07003216
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003217 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
3218 compressed_hdr_size =
3219 write_compressed_header(cpi, dst + uncompressed_hdr_size);
3220 aom_wb_overwrite_literal(&comp_hdr_len_wb, (int)(compressed_hdr_size),
3221 16);
3222 }
3223
3224 hdr_size = uncompressed_hdr_size + compressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003225 total_size += hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07003226
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003227 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3228 TileInfo tile_info;
3229 const int is_last_row = (tile_row == tile_rows - 1);
3230 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003231
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003232 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3233 const int tile_idx = tile_row * tile_cols + tile_col;
3234 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
3235 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
3236 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3237 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3238 const int is_last_col = (tile_col == tile_cols - 1);
3239 const int is_last_tile = is_last_col && is_last_row;
Thomas Daviesaf6df172016-11-09 14:04:18 +00003240
Thomas Daviesb25ba502017-07-18 10:18:24 +01003241 if ((!mtu_size && tile_count > tg_size) ||
3242 (mtu_size && tile_count && curr_tg_data_size >= mtu_size)) {
3243 // New tile group
3244 tg_count++;
3245 // We've exceeded the packet size
3246 if (tile_count > 1) {
3247 /* The last tile exceeded the packet size. The tile group size
3248 should therefore be tile_count-1.
3249 Move the last tile and insert headers before it
3250 */
3251 uint32_t old_total_size = total_size - tile_size - 4;
3252 memmove(dst + old_total_size + hdr_size, dst + old_total_size,
3253 (tile_size + 4) * sizeof(uint8_t));
3254 // Copy uncompressed header
3255 memmove(dst + old_total_size, dst,
3256 uncompressed_hdr_size * sizeof(uint8_t));
3257 // Write the number of tiles in the group into the last uncompressed
3258 // header before the one we've just inserted
3259 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
3260 n_log2_tiles);
3261 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 2,
3262 n_log2_tiles);
3263 // Update the pointer to the last TG params
3264 tg_params_wb.bit_offset = saved_offset + 8 * old_total_size;
3265 // Copy compressed header
3266 memmove(dst + old_total_size + uncompressed_hdr_size,
3267 dst + uncompressed_hdr_size,
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003268 compressed_hdr_size * sizeof(uint8_t));
Thomas Daviesb25ba502017-07-18 10:18:24 +01003269 total_size += hdr_size;
3270 tile_count = 1;
3271 curr_tg_data_size = hdr_size + tile_size + 4;
3272 } else {
3273 // We exceeded the packet size in just one tile
3274 // Copy uncompressed header
3275 memmove(dst + total_size, dst,
3276 uncompressed_hdr_size * sizeof(uint8_t));
3277 // Write the number of tiles in the group into the last uncompressed
3278 // header
3279 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
3280 n_log2_tiles);
3281 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1,
3282 n_log2_tiles);
3283 tg_params_wb.bit_offset = saved_offset + 8 * total_size;
3284 // Copy compressed header
3285 memmove(dst + total_size + uncompressed_hdr_size,
3286 dst + uncompressed_hdr_size,
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003287 compressed_hdr_size * sizeof(uint8_t));
Thomas Daviesb25ba502017-07-18 10:18:24 +01003288 total_size += hdr_size;
3289 tile_count = 0;
3290 curr_tg_data_size = hdr_size;
3291 }
Thomas Daviesaf6df172016-11-09 14:04:18 +00003292 }
Thomas Daviesb25ba502017-07-18 10:18:24 +01003293 tile_count++;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003294 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003295
Thomas Daviesb25ba502017-07-18 10:18:24 +01003296#if CONFIG_DEPENDENT_HORZTILES
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003297 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
Fangwen Fu73126c02017-02-08 22:37:47 -08003298#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003299 buf->data = dst + total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003300
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003301 // The last tile does not have a header.
3302 if (!is_last_tile) total_size += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003303
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003304 // Initialise tile context from the frame context
3305 this_tile->tctx = *cm->fc;
3306 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07003307 mode_bc.allow_update_cdf = 1;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01003308#if CONFIG_LOOP_RESTORATION
Rupert Swarbrick76405202017-11-07 16:35:55 +00003309 av1_reset_loop_restoration(&cpi->td.mb.e_mbd);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01003310#endif // CONFIG_LOOP_RESTORATION
3311
Alex Converse30f0e152017-03-28 10:13:27 -07003312 aom_start_encode(&mode_bc, dst + total_size);
3313 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
Jingning Han223b90e2017-04-04 09:48:37 -07003314#if !CONFIG_LV_MAP
Alex Converse30f0e152017-03-28 10:13:27 -07003315 assert(tok == tok_end);
Jingning Han223b90e2017-04-04 09:48:37 -07003316#endif // !CONFIG_LV_MAP
Alex Converse30f0e152017-03-28 10:13:27 -07003317 aom_stop_encode(&mode_bc);
3318 tile_size = mode_bc.pos;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003319 assert(tile_size > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003320
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003321 curr_tg_data_size += tile_size + 4;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003322 buf->size = tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003323
Thomas Davies4822e142017-10-10 11:30:36 +01003324#if CONFIG_SIMPLE_BWD_ADAPT
3325 if (tile_size > *max_tile_size) {
3326 cm->largest_tile_id = tile_cols * tile_row + tile_col;
3327 }
3328#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003329 if (!is_last_tile) {
3330 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
3331 // size of this tile
3332 mem_put_le32(buf->data, tile_size);
3333 }
3334
3335 total_size += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003336 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003337 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003338 // Write the final tile group size
3339 if (n_log2_tiles) {
Dominic Symesf58f1112017-09-25 12:47:40 +02003340 aom_wb_overwrite_literal(
3341 &tg_params_wb, (tile_cols * tile_rows) - tile_count, n_log2_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003342 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1, n_log2_tiles);
3343 }
3344 // Remux if possible. TODO (Thomas Davies): do this for more than one tile
3345 // group
3346 if (have_tiles && tg_count == 1) {
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003347 int data_size =
3348 total_size - (uncompressed_hdr_size + compressed_hdr_size);
3349 data_size =
3350 remux_tiles(cm, dst + uncompressed_hdr_size + compressed_hdr_size,
3351 data_size, *max_tile_size, *max_tile_col_size,
3352 &tile_size_bytes, &tile_col_size_bytes);
3353 total_size = data_size + uncompressed_hdr_size + compressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003354 aom_wb_overwrite_literal(&tile_size_bytes_wb, tile_size_bytes - 1, 2);
3355 }
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00003356
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003357#if CONFIG_EXT_TILE
3358 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003359#endif // CONFIG_EXT_TILE
3360 return (uint32_t)total_size;
3361}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003362#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003363
Yaowu Xuf883b422016-08-30 14:01:10 -07003364static void write_render_size(const AV1_COMMON *cm,
3365 struct aom_write_bit_buffer *wb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003366 const int scaling_active = !av1_resize_unscaled(cm);
Yaowu Xuf883b422016-08-30 14:01:10 -07003367 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003368 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003369 aom_wb_write_literal(wb, cm->render_width - 1, 16);
3370 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003371 }
3372}
3373
Fergus Simpsond91c8c92017-04-07 12:12:00 -07003374#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07003375static void write_superres_scale(const AV1_COMMON *const cm,
3376 struct aom_write_bit_buffer *wb) {
Fergus Simpsone7508412017-03-14 18:14:09 -07003377 // First bit is whether to to scale or not
Urvang Joshide71d142017-10-05 12:12:15 -07003378 if (cm->superres_scale_denominator == SCALE_NUMERATOR) {
Fergus Simpsone7508412017-03-14 18:14:09 -07003379 aom_wb_write_bit(wb, 0); // no scaling
3380 } else {
3381 aom_wb_write_bit(wb, 1); // scaling, write scale factor
Urvang Joshi83010182017-10-27 12:36:02 -07003382 assert(cm->superres_scale_denominator >= SUPERRES_SCALE_DENOMINATOR_MIN);
3383 assert(cm->superres_scale_denominator <
3384 SUPERRES_SCALE_DENOMINATOR_MIN + (1 << SUPERRES_SCALE_BITS));
Fergus Simpsone7508412017-03-14 18:14:09 -07003385 aom_wb_write_literal(
Urvang Joshide71d142017-10-05 12:12:15 -07003386 wb, cm->superres_scale_denominator - SUPERRES_SCALE_DENOMINATOR_MIN,
Fergus Simpsone7508412017-03-14 18:14:09 -07003387 SUPERRES_SCALE_BITS);
3388 }
3389}
Fergus Simpsond91c8c92017-04-07 12:12:00 -07003390#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07003391
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003392#if CONFIG_FRAME_SIZE
3393static void write_frame_size(const AV1_COMMON *cm, int frame_size_override,
David Barker22171312017-11-20 11:26:04 +00003394 struct aom_write_bit_buffer *wb)
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003395#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003396static void write_frame_size(const AV1_COMMON *cm,
David Barker22171312017-11-20 11:26:04 +00003397 struct aom_write_bit_buffer *wb)
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003398#endif
David Barker22171312017-11-20 11:26:04 +00003399{
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003400#if CONFIG_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003401 const int coded_width = cm->superres_upscaled_width - 1;
3402 const int coded_height = cm->superres_upscaled_height - 1;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003403#else
David Barker22171312017-11-20 11:26:04 +00003404 const int coded_width = cm->width - 1;
3405 const int coded_height = cm->height - 1;
3406#endif
3407
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003408#if CONFIG_FRAME_SIZE
3409 if (frame_size_override) {
3410 const SequenceHeader *seq_params = &cm->seq_params;
3411 int num_bits_width = seq_params->num_bits_width;
3412 int num_bits_height = seq_params->num_bits_height;
David Barker22171312017-11-20 11:26:04 +00003413 aom_wb_write_literal(wb, coded_width, num_bits_width);
3414 aom_wb_write_literal(wb, coded_height, num_bits_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003415 }
3416#else
David Barker22171312017-11-20 11:26:04 +00003417 aom_wb_write_literal(wb, coded_width, 16);
3418 aom_wb_write_literal(wb, coded_height, 16);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003419#endif
David Barker22171312017-11-20 11:26:04 +00003420
3421#if CONFIG_FRAME_SUPERRES
3422 write_superres_scale(cm, wb);
3423#endif
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003424 write_render_size(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003425}
3426
Yaowu Xuf883b422016-08-30 14:01:10 -07003427static void write_frame_size_with_refs(AV1_COMP *cpi,
3428 struct aom_write_bit_buffer *wb) {
3429 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003430 int found = 0;
3431
3432 MV_REFERENCE_FRAME ref_frame;
3433 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3434 YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
3435
3436 if (cfg != NULL) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003437#if CONFIG_FRAME_SUPERRES
3438 found = cm->superres_upscaled_width == cfg->y_crop_width &&
3439 cm->superres_upscaled_height == cfg->y_crop_height;
3440#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003441 found =
3442 cm->width == cfg->y_crop_width && cm->height == cfg->y_crop_height;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003443#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003444 found &= cm->render_width == cfg->render_width &&
3445 cm->render_height == cfg->render_height;
3446 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003447 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003448 if (found) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003449#if CONFIG_FRAME_SUPERRES
3450 write_superres_scale(cm, wb);
3451#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003452 break;
3453 }
3454 }
3455
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003456#if CONFIG_FRAME_SIZE
3457 if (!found) {
3458 int frame_size_override = 1; // Allways equal to 1 in this function
3459 write_frame_size(cm, frame_size_override, wb);
3460 }
3461#else
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003462 if (!found) write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003463#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003464}
3465
Yaowu Xuc27fc142016-08-22 16:08:15 -07003466static void write_profile(BITSTREAM_PROFILE profile,
Yaowu Xuf883b422016-08-30 14:01:10 -07003467 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003468 switch (profile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003469 case PROFILE_0: aom_wb_write_literal(wb, 0, 2); break;
3470 case PROFILE_1: aom_wb_write_literal(wb, 2, 2); break;
3471 case PROFILE_2: aom_wb_write_literal(wb, 1, 2); break;
3472 case PROFILE_3: aom_wb_write_literal(wb, 6, 3); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003473 default: assert(0);
3474 }
3475}
3476
3477static void write_bitdepth_colorspace_sampling(
Yaowu Xuf883b422016-08-30 14:01:10 -07003478 AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003479 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003480 assert(cm->bit_depth > AOM_BITS_8);
3481 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003482 }
anorkin76fb1262017-03-22 15:12:12 -07003483#if CONFIG_COLORSPACE_HEADERS
3484 aom_wb_write_literal(wb, cm->color_space, 5);
3485 aom_wb_write_literal(wb, cm->transfer_function, 5);
3486#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003487 aom_wb_write_literal(wb, cm->color_space, 3);
anorkin76fb1262017-03-22 15:12:12 -07003488#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003489 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003490 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Yaowu Xuf883b422016-08-30 14:01:10 -07003491 aom_wb_write_bit(wb, cm->color_range);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003492 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
3493 assert(cm->subsampling_x != 1 || cm->subsampling_y != 1);
Yaowu Xuf883b422016-08-30 14:01:10 -07003494 aom_wb_write_bit(wb, cm->subsampling_x);
3495 aom_wb_write_bit(wb, cm->subsampling_y);
3496 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003497 } else {
3498 assert(cm->subsampling_x == 1 && cm->subsampling_y == 1);
3499 }
anorkin76fb1262017-03-22 15:12:12 -07003500#if CONFIG_COLORSPACE_HEADERS
3501 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
3502 aom_wb_write_literal(wb, cm->chroma_sample_position, 2);
3503 }
3504#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003505 } else {
3506 assert(cm->profile == PROFILE_1 || cm->profile == PROFILE_3);
Yaowu Xuf883b422016-08-30 14:01:10 -07003507 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003508 }
Yaowu Xu6fc47e52017-12-04 15:07:48 -08003509#if CONFIG_EXT_QM
3510 aom_wb_write_bit(wb, cm->separate_uv_delta_q);
3511#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003512}
3513
Rupert Swarbrickb394bfe2017-11-07 17:52:13 +00003514#if CONFIG_REFERENCE_BUFFER || CONFIG_OBU
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003515void write_sequence_header(AV1_COMP *cpi, struct aom_write_bit_buffer *wb) {
3516 AV1_COMMON *const cm = &cpi->common;
David Barker5e70a112017-10-03 14:28:17 +01003517 SequenceHeader *seq_params = &cm->seq_params;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003518
3519#if CONFIG_FRAME_SIZE
3520 int num_bits_width = 16;
3521 int num_bits_height = 16;
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003522 int max_frame_width = cpi->oxcf.width;
3523 int max_frame_height = cpi->oxcf.height;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003524
3525 seq_params->num_bits_width = num_bits_width;
3526 seq_params->num_bits_height = num_bits_height;
3527 seq_params->max_frame_width = max_frame_width;
3528 seq_params->max_frame_height = max_frame_height;
3529
3530 aom_wb_write_literal(wb, num_bits_width - 1, 4);
3531 aom_wb_write_literal(wb, num_bits_height - 1, 4);
3532 aom_wb_write_literal(wb, max_frame_width - 1, num_bits_width);
3533 aom_wb_write_literal(wb, max_frame_height - 1, num_bits_height);
3534#endif
3535
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003536 /* Placeholder for actually writing to the bitstream */
Yunqing Wangc2502b52017-07-19 17:44:18 -07003537 seq_params->frame_id_numbers_present_flag =
3538#if CONFIG_EXT_TILE
3539 cm->large_scale_tile ? 0 :
3540#endif // CONFIG_EXT_TILE
3541 FRAME_ID_NUMBERS_PRESENT_FLAG;
Sebastien Alaiwand418f682017-10-19 15:06:52 +02003542 seq_params->frame_id_length = FRAME_ID_LENGTH;
3543 seq_params->delta_frame_id_length = DELTA_FRAME_ID_LENGTH;
David Barker5e70a112017-10-03 14:28:17 +01003544
3545 aom_wb_write_bit(wb, seq_params->frame_id_numbers_present_flag);
3546 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003547 // We must always have delta_frame_id_length < frame_id_length,
3548 // in order for a frame to be referenced with a unique delta.
3549 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003550 aom_wb_write_literal(wb, seq_params->delta_frame_id_length - 2, 4);
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003551 aom_wb_write_literal(
3552 wb, seq_params->frame_id_length - seq_params->delta_frame_id_length - 1,
3553 3);
David Barker5e70a112017-10-03 14:28:17 +01003554 }
Rupert Swarbricke0b15992017-11-09 15:04:32 +00003555
3556#if CONFIG_MONO_VIDEO
Rupert Swarbricke0b15992017-11-09 15:04:32 +00003557 aom_wb_write_bit(wb, seq_params->monochrome);
3558#endif // CONFIG_MONO_VIDEO
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003559}
Rupert Swarbrickb394bfe2017-11-07 17:52:13 +00003560#endif // CONFIG_REFERENCE_BUFFER || CONFIG_OBU
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003561
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07003562static void write_sb_size(const AV1_COMMON *cm,
3563 struct aom_write_bit_buffer *wb) {
3564 (void)cm;
3565 (void)wb;
3566 assert(cm->mib_size == mi_size_wide[cm->sb_size]);
3567 assert(cm->mib_size == 1 << cm->mib_size_log2);
3568#if CONFIG_EXT_PARTITION
3569 assert(cm->sb_size == BLOCK_128X128 || cm->sb_size == BLOCK_64X64);
3570 aom_wb_write_bit(wb, cm->sb_size == BLOCK_128X128 ? 1 : 0);
3571#else
3572 assert(cm->sb_size == BLOCK_64X64);
3573#endif // CONFIG_EXT_PARTITION
3574}
3575
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003576static void write_compound_tools(const AV1_COMMON *cm,
3577 struct aom_write_bit_buffer *wb) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003578 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
3579 aom_wb_write_bit(wb, cm->allow_interintra_compound);
3580 } else {
3581 assert(cm->allow_interintra_compound == 0);
3582 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003583 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
3584 aom_wb_write_bit(wb, cm->allow_masked_compound);
3585 } else {
3586 assert(cm->allow_masked_compound == 0);
3587 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003588}
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003589
David Barkerd7c8bd52017-09-25 14:47:29 +01003590static void write_global_motion_params(const WarpedMotionParams *params,
3591 const WarpedMotionParams *ref_params,
Sarah Parker3e579a62017-08-23 16:53:20 -07003592 struct aom_write_bit_buffer *wb,
3593 int allow_hp) {
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01003594 const TransformationType type = params->wmtype;
Sarah Parker3e579a62017-08-23 16:53:20 -07003595
3596 aom_wb_write_bit(wb, type != IDENTITY);
3597 if (type != IDENTITY) {
3598#if GLOBAL_TRANS_TYPES > 4
3599 aom_wb_write_literal(wb, type - 1, GLOBAL_TYPE_BITS);
3600#else
3601 aom_wb_write_bit(wb, type == ROTZOOM);
3602 if (type != ROTZOOM) aom_wb_write_bit(wb, type == TRANSLATION);
3603#endif // GLOBAL_TRANS_TYPES > 4
3604 }
3605
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01003606 if (type >= ROTZOOM) {
3607 aom_wb_write_signed_primitive_refsubexpfin(
3608 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3609 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
3610 (1 << GM_ALPHA_PREC_BITS),
3611 (params->wmmat[2] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
3612 aom_wb_write_signed_primitive_refsubexpfin(
3613 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3614 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF),
3615 (params->wmmat[3] >> GM_ALPHA_PREC_DIFF));
3616 }
3617
3618 if (type >= AFFINE) {
3619 aom_wb_write_signed_primitive_refsubexpfin(
3620 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3621 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF),
3622 (params->wmmat[4] >> GM_ALPHA_PREC_DIFF));
3623 aom_wb_write_signed_primitive_refsubexpfin(
3624 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3625 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
3626 (1 << GM_ALPHA_PREC_BITS),
3627 (params->wmmat[5] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
3628 }
3629
3630 if (type >= TRANSLATION) {
3631 const int trans_bits = (type == TRANSLATION)
3632 ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
3633 : GM_ABS_TRANS_BITS;
3634 const int trans_prec_diff = (type == TRANSLATION)
3635 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
3636 : GM_TRANS_PREC_DIFF;
3637 aom_wb_write_signed_primitive_refsubexpfin(
3638 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
3639 (ref_params->wmmat[0] >> trans_prec_diff),
3640 (params->wmmat[0] >> trans_prec_diff));
3641 aom_wb_write_signed_primitive_refsubexpfin(
3642 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
3643 (ref_params->wmmat[1] >> trans_prec_diff),
3644 (params->wmmat[1] >> trans_prec_diff));
Sarah Parker3e579a62017-08-23 16:53:20 -07003645 }
3646}
3647
3648static void write_global_motion(AV1_COMP *cpi,
3649 struct aom_write_bit_buffer *wb) {
3650 AV1_COMMON *const cm = &cpi->common;
3651 int frame;
3652 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003653 const WarpedMotionParams *ref_params =
3654 cm->error_resilient_mode ? &default_warp_params
3655 : &cm->prev_frame->global_motion[frame];
3656 write_global_motion_params(&cm->global_motion[frame], ref_params, wb,
Sarah Parker3e579a62017-08-23 16:53:20 -07003657 cm->allow_high_precision_mv);
3658 // TODO(sarahparker, debargha): The logic in the commented out code below
3659 // does not work currently and causes mismatches when resize is on.
3660 // Fix it before turning the optimization back on.
3661 /*
3662 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame_buffer(cpi, frame);
3663 if (cpi->source->y_crop_width == ref_buf->y_crop_width &&
3664 cpi->source->y_crop_height == ref_buf->y_crop_height) {
3665 write_global_motion_params(&cm->global_motion[frame],
3666 &cm->prev_frame->global_motion[frame], wb,
3667 cm->allow_high_precision_mv);
3668 } else {
3669 assert(cm->global_motion[frame].wmtype == IDENTITY &&
3670 "Invalid warp type for frames of different resolutions");
3671 }
3672 */
3673 /*
3674 printf("Frame %d/%d: Enc Ref %d: %d %d %d %d\n",
3675 cm->current_video_frame, cm->show_frame, frame,
3676 cm->global_motion[frame].wmmat[0],
3677 cm->global_motion[frame].wmmat[1], cm->global_motion[frame].wmmat[2],
3678 cm->global_motion[frame].wmmat[3]);
3679 */
3680 }
3681}
Sarah Parker3e579a62017-08-23 16:53:20 -07003682
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003683#if !CONFIG_OBU
3684static void write_uncompressed_header_frame(AV1_COMP *cpi,
3685 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003686 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003687 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3688
Yaowu Xuf883b422016-08-30 14:01:10 -07003689 aom_wb_write_literal(wb, AOM_FRAME_MARKER, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003690
3691 write_profile(cm->profile, wb);
3692
Yunqing Wangc2502b52017-07-19 17:44:18 -07003693#if CONFIG_EXT_TILE
3694 aom_wb_write_literal(wb, cm->large_scale_tile, 1);
3695#endif // CONFIG_EXT_TILE
3696
Yaowu Xuc27fc142016-08-22 16:08:15 -07003697 // NOTE: By default all coded frames to be used as a reference
3698 cm->is_reference_frame = 1;
3699
3700 if (cm->show_existing_frame) {
3701 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3702 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3703
3704 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003705 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003706 "Buffer %d does not contain a reconstructed frame",
3707 frame_to_show);
3708 }
3709 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3710
Yaowu Xuf883b422016-08-30 14:01:10 -07003711 aom_wb_write_bit(wb, 1); // show_existing_frame
3712 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003713
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003714#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003715 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003716 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003717 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
3718 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
3719 /* Add a zero byte to prevent emulation of superframe marker */
3720 /* Same logic as when when terminating the entropy coder */
3721 /* Consider to have this logic only one place */
3722 aom_wb_write_literal(wb, 0, 8);
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003723 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003724#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003725
Yaowu Xuc27fc142016-08-22 16:08:15 -07003726 return;
3727 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003728 aom_wb_write_bit(wb, 0); // show_existing_frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07003729 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003730
Yaowu Xuf883b422016-08-30 14:01:10 -07003731 aom_wb_write_bit(wb, cm->frame_type);
3732 aom_wb_write_bit(wb, cm->show_frame);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003733 if (cm->frame_type != KEY_FRAME)
3734 if (!cm->show_frame) aom_wb_write_bit(wb, cm->intra_only);
Yaowu Xuf883b422016-08-30 14:01:10 -07003735 aom_wb_write_bit(wb, cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003736
Pavel Frolov3b95c502017-10-01 21:35:24 +03003737 if (frame_is_intra_only(cm)) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003738#if CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003739 write_sequence_header(cpi, wb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003740#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003741 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003742#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003743 cm->invalid_delta_frame_id_minus1 = 0;
3744 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003745 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003746 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003747 }
3748#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003749
3750#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003751 if (cm->width > cm->seq_params.max_frame_width ||
3752 cm->height > cm->seq_params.max_frame_height) {
3753 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3754 "Frame dimensions are larger than the maximum values");
3755 }
David Barker22171312017-11-20 11:26:04 +00003756#if CONFIG_FRAME_SUPERRES
3757 const int coded_width = cm->superres_upscaled_width;
3758 const int coded_height = cm->superres_upscaled_height;
3759#else
3760 const int coded_width = cm->width;
3761 const int coded_height = cm->height;
3762#endif
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003763 int frame_size_override_flag =
David Barker22171312017-11-20 11:26:04 +00003764 (coded_width != cm->seq_params.max_frame_width ||
3765 coded_height != cm->seq_params.max_frame_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003766 aom_wb_write_bit(wb, frame_size_override_flag);
3767#endif
3768
Yaowu Xuc27fc142016-08-22 16:08:15 -07003769 if (cm->frame_type == KEY_FRAME) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003770 write_bitdepth_colorspace_sampling(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003771#if CONFIG_FRAME_SIZE
3772 write_frame_size(cm, frame_size_override_flag, wb);
3773#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003774 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003775#endif
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07003776 write_sb_size(cm, wb);
3777
hui su24f7b072016-10-12 11:36:24 -07003778 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Hui Su85878782017-11-07 14:56:31 -08003779#if CONFIG_INTRABC
3780 if (cm->allow_screen_content_tools) aom_wb_write_bit(wb, cm->allow_intrabc);
3781#endif // CONFIG_INTRABC
RogerZhou3b635242017-09-19 10:06:46 -07003782#if CONFIG_AMVR
3783 if (cm->allow_screen_content_tools) {
RogerZhou10a03802017-10-26 11:49:48 -07003784 if (cm->seq_force_integer_mv == 2) {
RogerZhou3b635242017-09-19 10:06:46 -07003785 aom_wb_write_bit(wb, 1);
3786 } else {
3787 aom_wb_write_bit(wb, 0);
RogerZhou10a03802017-10-26 11:49:48 -07003788 aom_wb_write_bit(wb, cm->seq_force_integer_mv);
RogerZhou3b635242017-09-19 10:06:46 -07003789 }
3790 }
3791#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003792 } else {
Thomas Daedea6a854b2017-06-22 17:49:11 -07003793#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003794 if (!cm->error_resilient_mode) {
3795 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003796 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003797 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3798 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003799 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003800 cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
3801 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003802 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003803 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3804 }
3805 }
Thomas Daedea6a854b2017-06-22 17:49:11 -07003806#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003807 cpi->refresh_frame_mask = get_refresh_mask(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003808
3809 if (cm->intra_only) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003810 write_bitdepth_colorspace_sampling(cm, wb);
3811
Yaowu Xuf883b422016-08-30 14:01:10 -07003812 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003813#if CONFIG_FRAME_SIZE
3814 write_frame_size(cm, frame_size_override_flag, wb);
3815#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003816 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003817#endif
Pavel Frolovef4af292017-11-01 18:23:02 +03003818 write_sb_size(cm, wb);
Hui Sudf89ee32017-11-21 11:47:58 -08003819 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
3820#if CONFIG_INTRABC
3821 if (cm->allow_screen_content_tools)
3822 aom_wb_write_bit(wb, cm->allow_intrabc);
3823#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07003824 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003825 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003826
Yaowu Xuc27fc142016-08-22 16:08:15 -07003827 if (!cpi->refresh_frame_mask) {
3828 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
3829 // will not be used as a reference
3830 cm->is_reference_frame = 0;
3831 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003832
Zoe Liuf40a9572017-10-13 12:37:19 -07003833 for (MV_REFERENCE_FRAME ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME;
3834 ++ref_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003835 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
Yaowu Xuf883b422016-08-30 14:01:10 -07003836 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003837 REF_FRAMES_LOG2);
Zoe Liu17af2742017-10-06 10:36:42 -07003838#if !CONFIG_FRAME_SIGN_BIAS
Yaowu Xuf883b422016-08-30 14:01:10 -07003839 aom_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
Zoe Liu17af2742017-10-06 10:36:42 -07003840#endif // !CONFIG_FRAME_SIGN_BIAS
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003841#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003842 if (cm->seq_params.frame_id_numbers_present_flag) {
3843 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003844 int frame_id_len = cm->seq_params.frame_id_length;
3845 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003846 int delta_frame_id_minus1 =
3847 ((cm->current_frame_id - cm->ref_frame_id[i] +
3848 (1 << frame_id_len)) %
3849 (1 << frame_id_len)) -
3850 1;
3851 if (delta_frame_id_minus1 < 0 ||
3852 delta_frame_id_minus1 >= (1 << diff_len))
3853 cm->invalid_delta_frame_id_minus1 = 1;
3854 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003855 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003856#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07003857 }
3858
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003859#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003860 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003861 write_frame_size_with_refs(cpi, wb);
3862 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003863 write_frame_size(cm, frame_size_override_flag, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003864 }
3865#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003866 write_frame_size_with_refs(cpi, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003867#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003868
RogerZhou3b635242017-09-19 10:06:46 -07003869#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003870 if (cm->seq_force_integer_mv == 2) {
3871 aom_wb_write_bit(wb, cm->cur_frame_force_integer_mv);
RogerZhou3b635242017-09-19 10:06:46 -07003872 }
RogerZhou10a03802017-10-26 11:49:48 -07003873 if (cm->cur_frame_force_integer_mv) {
3874 cm->allow_high_precision_mv = 0;
3875 } else {
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003876#if !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003877 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003878#endif // !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003879 }
3880#else
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003881#if !CONFIG_EIGHTH_PEL_MV_ONLY
Yaowu Xuf883b422016-08-30 14:01:10 -07003882 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003883#endif // !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003884#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003885 fix_interp_filter(cm, cpi->td.counts);
Angie Chiang5678ad92016-11-21 09:38:40 -08003886 write_frame_interp_filter(cm->interp_filter, wb);
Fangwen Fu8d164de2016-12-14 13:40:54 -08003887#if CONFIG_TEMPMV_SIGNALING
Jingning Hane17ebe92017-11-03 15:25:42 -07003888 if (frame_might_use_prev_frame_mvs(cm))
3889 aom_wb_write_bit(wb, cm->use_ref_frame_mvs);
Fangwen Fu8d164de2016-12-14 13:40:54 -08003890#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003891 }
3892 }
3893
Jingning Hanea255c92017-09-29 08:12:09 -07003894#if CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003895 if (cm->show_frame == 0) {
3896 int arf_offset = AOMMIN(
3897 (MAX_GF_INTERVAL - 1),
3898 cpi->twopass.gf_group.arf_src_offset[cpi->twopass.gf_group.index]);
Jingning Hanc723b342017-08-24 11:19:46 -07003899 int brf_offset =
3900 cpi->twopass.gf_group.brf_src_offset[cpi->twopass.gf_group.index];
3901
3902 arf_offset = AOMMIN((MAX_GF_INTERVAL - 1), arf_offset + brf_offset);
Cheng Chend300f0e2017-12-01 10:46:23 -08003903 aom_wb_write_literal(wb, arf_offset, FRAME_OFFSET_BITS);
Jingning Hanc723b342017-08-24 11:19:46 -07003904 }
Zoe Liuf40a9572017-10-13 12:37:19 -07003905
3906#if CONFIG_EXT_SKIP
3907 if (cm->is_skip_mode_allowed) aom_wb_write_bit(wb, cm->skip_mode_flag);
3908#endif // CONFIG_EXT_SKIP
3909#endif // CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003910
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003911#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003912 if (cm->seq_params.frame_id_numbers_present_flag) {
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003913 cm->refresh_mask =
3914 cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
3915 }
3916#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003917
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01003918#if CONFIG_EXT_TILE
3919 const int might_bwd_adapt =
3920 !(cm->error_resilient_mode || cm->large_scale_tile);
3921#else
3922 const int might_bwd_adapt = !cm->error_resilient_mode;
3923#endif // CONFIG_EXT_TILE
3924 if (might_bwd_adapt) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003925 aom_wb_write_bit(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003926 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
3927 }
Thomas Daededa4d8b92017-06-05 15:44:14 -07003928#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuf883b422016-08-30 14:01:10 -07003929 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
Thomas Daededa4d8b92017-06-05 15:44:14 -07003930#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003931 encode_loopfilter(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003932 encode_quantization(cm, wb);
3933 encode_segmentation(cm, xd, wb);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +01003934#if CONFIG_Q_SEGMENTATION
3935 encode_q_segmentation(cm, wb);
3936#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02003937 {
Thomas Davies28444be2017-10-13 18:12:25 +01003938 int delta_q_allowed = 1;
3939#if !CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003940 int i;
3941 struct segmentation *const seg = &cm->seg;
3942 int segment_quantizer_active = 0;
3943 for (i = 0; i < MAX_SEGMENTS; i++) {
3944 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3945 segment_quantizer_active = 1;
3946 }
3947 }
Rostislav Pehlivanovda067792017-11-14 05:23:12 +00003948#if CONFIG_Q_SEGMENTATION
3949 segment_quantizer_active |= !!seg->q_lvls;
3950#endif
Thomas Davies28444be2017-10-13 18:12:25 +01003951 delta_q_allowed = !segment_quantizer_active;
3952#endif
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01003953
Yaowu Xu288f8162017-10-10 15:03:22 -07003954 if (cm->delta_q_present_flag) assert(cm->base_qindex > 0);
Thomas Davies28444be2017-10-13 18:12:25 +01003955 // Segment quantizer and delta_q both allowed if CONFIG_EXT_DELTA_Q
3956 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02003957 aom_wb_write_bit(wb, cm->delta_q_present_flag);
3958 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01003959 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
Arild Fuldseth07441162016-08-15 15:07:52 +02003960 xd->prev_qindex = cm->base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07003961#if CONFIG_EXT_DELTA_Q
Fangwen Fu231fe422017-04-24 17:52:29 -07003962 aom_wb_write_bit(wb, cm->delta_lf_present_flag);
3963 if (cm->delta_lf_present_flag) {
3964 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_lf_res) - 1, 2);
Cheng Chen880166a2017-10-02 17:48:48 -07003965 xd->prev_delta_lf_from_base = 0;
Cheng Chena97394f2017-09-27 15:05:14 -07003966#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -07003967 aom_wb_write_bit(wb, cm->delta_lf_multi);
Cheng Chena97394f2017-09-27 15:05:14 -07003968 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
3969 xd->prev_delta_lf[lf_id] = 0;
3970#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003971 }
3972#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003973 }
3974 }
3975 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003976 if (!cm->all_lossless) {
3977 encode_cdef(cm, wb);
3978 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003979#if CONFIG_LOOP_RESTORATION
3980 encode_restoration_mode(cm, wb);
3981#endif // CONFIG_LOOP_RESTORATION
3982 write_tx_mode(cm, &cm->tx_mode, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003983
3984 if (cpi->allow_comp_inter_inter) {
3985 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
Zoe Liub05e5d12017-02-07 14:32:53 -08003986#if !CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07003987 const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE;
Zoe Liub05e5d12017-02-07 14:32:53 -08003988#endif // !CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07003989
Yaowu Xuf883b422016-08-30 14:01:10 -07003990 aom_wb_write_bit(wb, use_hybrid_pred);
Zoe Liub05e5d12017-02-07 14:32:53 -08003991#if !CONFIG_REF_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -07003992 if (!use_hybrid_pred) aom_wb_write_bit(wb, use_compound_pred);
Zoe Liub05e5d12017-02-07 14:32:53 -08003993#endif // !CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07003994 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003995 write_compound_tools(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003996
Sarah Parkere68a3e42017-02-16 14:03:24 -08003997 aom_wb_write_bit(wb, cm->reduced_tx_set_used);
Sarah Parkere68a3e42017-02-16 14:03:24 -08003998
Angie Chiang6dbffbf2017-10-06 16:59:54 -07003999#if CONFIG_ADAPT_SCAN
Yunqing Wangea35e652017-11-09 12:33:30 -08004000#if CONFIG_EXT_TILE
4001 if (cm->large_scale_tile)
4002 assert(cm->use_adapt_scan == 0);
4003 else
4004#endif // CONFIG_EXT_TILE
4005 aom_wb_write_bit(wb, cm->use_adapt_scan);
Angie Chiang6dbffbf2017-10-06 16:59:54 -07004006#endif
4007
Sarah Parkerf289f9f2017-09-12 18:50:02 -07004008 if (!frame_is_intra_only(cm)) write_global_motion(cpi, wb);
Sarah Parker3e579a62017-08-23 16:53:20 -07004009
Yaowu Xuc27fc142016-08-22 16:08:15 -07004010 write_tile_info(cm, wb);
4011}
4012
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004013#else
4014// New function based on HLS R18
4015static void write_uncompressed_header_obu(AV1_COMP *cpi,
4016 struct aom_write_bit_buffer *wb) {
4017 AV1_COMMON *const cm = &cpi->common;
4018 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
4019
4020#if CONFIG_EXT_TILE
4021 aom_wb_write_literal(wb, cm->large_scale_tile, 1);
4022#endif // CONFIG_EXT_TILE
4023
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004024 // NOTE: By default all coded frames to be used as a reference
4025 cm->is_reference_frame = 1;
4026
4027 if (cm->show_existing_frame) {
4028 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
4029 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
4030
4031 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
4032 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
4033 "Buffer %d does not contain a reconstructed frame",
4034 frame_to_show);
4035 }
4036 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
4037
4038 aom_wb_write_bit(wb, 1); // show_existing_frame
4039 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
4040
4041#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004042 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02004043 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01004044 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
4045 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
4046 /* Add a zero byte to prevent emulation of superframe marker */
4047 /* Same logic as when when terminating the entropy coder */
4048 /* Consider to have this logic only one place */
4049 aom_wb_write_literal(wb, 0, 8);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004050 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004051#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004052
4053 return;
4054 } else {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004055 aom_wb_write_bit(wb, 0); // show_existing_frame
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004056 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004057
4058 cm->frame_type = cm->intra_only ? INTRA_ONLY_FRAME : cm->frame_type;
4059 aom_wb_write_literal(wb, cm->frame_type, 2);
4060
4061 if (cm->intra_only) cm->frame_type = INTRA_ONLY_FRAME;
4062
4063 aom_wb_write_bit(wb, cm->show_frame);
4064 aom_wb_write_bit(wb, cm->error_resilient_mode);
4065
4066#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004067 cm->invalid_delta_frame_id_minus1 = 0;
4068 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02004069 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01004070 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004071 }
4072#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004073
4074#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01004075 if (cm->width > cm->seq_params.max_frame_width ||
4076 cm->height > cm->seq_params.max_frame_height) {
4077 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
4078 "Frame dimensions are larger than the maximum values");
4079 }
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004080 int frame_size_override_flag =
4081 (cm->width != cm->seq_params.max_frame_width ||
4082 cm->height != cm->seq_params.max_frame_height);
4083 aom_wb_write_bit(wb, frame_size_override_flag);
4084#endif
4085
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004086 if (cm->frame_type == KEY_FRAME) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004087#if CONFIG_FRAME_SIZE
4088 write_frame_size(cm, frame_size_override_flag, wb);
4089#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004090 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004091#endif
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07004092 write_sb_size(cm, wb);
4093
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004094 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
4095#if CONFIG_AMVR
4096 if (cm->allow_screen_content_tools) {
RogerZhou10a03802017-10-26 11:49:48 -07004097 if (cm->seq_force_integer_mv == 2) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004098 aom_wb_write_bit(wb, 1);
4099 } else {
4100 aom_wb_write_bit(wb, 0);
RogerZhou10a03802017-10-26 11:49:48 -07004101 aom_wb_write_bit(wb, cm->seq_force_integer_mv == 0);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004102 }
4103 }
4104#endif
4105 } else if (cm->frame_type == INTRA_ONLY_FRAME) {
4106 if (cm->intra_only) aom_wb_write_bit(wb, cm->allow_screen_content_tools);
4107#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4108 if (!cm->error_resilient_mode) {
4109 if (cm->intra_only) {
4110 aom_wb_write_bit(wb,
4111 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4112 }
4113 }
4114#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004115 cpi->refresh_frame_mask = get_refresh_mask(cpi);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004116
4117 if (cm->intra_only) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004118 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004119#if CONFIG_FRAME_SIZE
4120 write_frame_size(cm, frame_size_override_flag, wb);
4121#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004122 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004123#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004124 }
4125 } else if (cm->frame_type == INTER_FRAME) {
4126 MV_REFERENCE_FRAME ref_frame;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004127#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4128 if (!cm->error_resilient_mode) {
4129 aom_wb_write_bit(wb, cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
4130 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
4131 aom_wb_write_bit(wb,
4132 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4133 }
4134#endif
4135
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004136 cpi->refresh_frame_mask = get_refresh_mask(cpi);
4137 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004138
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004139 if (!cpi->refresh_frame_mask) {
4140 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
4141 // will not be used as a reference
4142 cm->is_reference_frame = 0;
4143 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004144
4145 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4146 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
4147 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
4148 REF_FRAMES_LOG2);
Zoe Liu17af2742017-10-06 10:36:42 -07004149#if !CONFIG_FRAME_SIGN_BIAS
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004150 aom_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
Zoe Liu17af2742017-10-06 10:36:42 -07004151#endif // !CONFIG_FRAME_SIGN_BIAS
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004152#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004153 if (cm->seq_params.frame_id_numbers_present_flag) {
4154 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02004155 int frame_id_len = cm->seq_params.frame_id_length;
4156 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01004157 int delta_frame_id_minus1 =
4158 ((cm->current_frame_id - cm->ref_frame_id[i] +
4159 (1 << frame_id_len)) %
4160 (1 << frame_id_len)) -
4161 1;
4162 if (delta_frame_id_minus1 < 0 ||
4163 delta_frame_id_minus1 >= (1 << diff_len))
4164 cm->invalid_delta_frame_id_minus1 = 1;
4165 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004166 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004167#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004168 }
4169
4170#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004171 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004172 write_frame_size_with_refs(cpi, wb);
4173 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004174 write_frame_size(cm, frame_size_override_flag, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004175 }
4176#else
4177 write_frame_size_with_refs(cpi, wb);
4178#endif
4179
4180#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07004181 if (cm->seq_force_integer_mv == 2) {
4182 aom_wb_write_bit(wb, cm->cur_frame_force_integer_mv == 0);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004183 }
4184#endif
4185 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
4186
4187 fix_interp_filter(cm, cpi->td.counts);
4188 write_frame_interp_filter(cm->interp_filter, wb);
4189#if CONFIG_TEMPMV_SIGNALING
4190 if (frame_might_use_prev_frame_mvs(cm)) {
4191 aom_wb_write_bit(wb, cm->use_prev_frame_mvs);
4192 }
4193#endif
4194 } else if (cm->frame_type == S_FRAME) {
4195 MV_REFERENCE_FRAME ref_frame;
4196
4197#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4198 if (!cm->error_resilient_mode) {
4199 aom_wb_write_bit(wb, cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
4200 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
4201 aom_wb_write_bit(wb,
4202 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4203 }
4204#endif
4205
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004206 if (!cpi->refresh_frame_mask) {
4207 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
4208 // will not be used as a reference
4209 cm->is_reference_frame = 0;
4210 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004211
4212 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4213 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
4214 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
4215 REF_FRAMES_LOG2);
4216 assert(cm->ref_frame_sign_bias[ref_frame] == 0);
4217#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004218 if (cm->seq_params.frame_id_numbers_present_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004219 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02004220 int frame_id_len = cm->seq_params.frame_id_length;
4221 int diff_len = cm->seq_params.delta_frame_id_length;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004222 int delta_frame_id_minus1 =
4223 ((cm->current_frame_id - cm->ref_frame_id[i] +
4224 (1 << frame_id_len)) %
4225 (1 << frame_id_len)) -
4226 1;
4227 if (delta_frame_id_minus1 < 0 ||
4228 delta_frame_id_minus1 >= (1 << diff_len))
4229 cm->invalid_delta_frame_id_minus1 = 1;
4230 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
4231 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004232#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004233 }
4234
4235#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004236 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004237 write_frame_size_with_refs(cpi, wb);
4238 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004239 write_frame_size(cm, frame_size_override_flag, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004240 }
4241#else
4242 write_frame_size_with_refs(cpi, wb);
4243#endif
4244
4245 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
4246
4247 fix_interp_filter(cm, cpi->td.counts);
4248 write_frame_interp_filter(cm->interp_filter, wb);
4249#if CONFIG_TEMPMV_SIGNALING
4250 if (frame_might_use_prev_frame_mvs(cm)) {
4251 aom_wb_write_bit(wb, cm->use_prev_frame_mvs);
4252 }
4253#endif
4254 }
4255
Soo-Chul Hanebdbcb42017-11-02 18:26:21 -04004256#if CONFIG_FRAME_MARKER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004257 if (cm->show_frame == 0) {
4258 int arf_offset = AOMMIN(
4259 (MAX_GF_INTERVAL - 1),
4260 cpi->twopass.gf_group.arf_src_offset[cpi->twopass.gf_group.index]);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004261 int brf_offset =
4262 cpi->twopass.gf_group.brf_src_offset[cpi->twopass.gf_group.index];
4263
4264 arf_offset = AOMMIN((MAX_GF_INTERVAL - 1), arf_offset + brf_offset);
Cheng Chend300f0e2017-12-01 10:46:23 -08004265 aom_wb_write_literal(wb, arf_offset, FRAME_OFFSET_BITS);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004266 }
4267#endif
4268
4269#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004270 if (cm->seq_params.frame_id_numbers_present_flag) {
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004271 cm->refresh_mask =
4272 cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
4273 }
4274#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004275
4276 if (!cm->error_resilient_mode) {
4277 aom_wb_write_bit(
4278 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
4279 }
4280#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4281 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
4282#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004283 encode_loopfilter(cm, wb);
4284 encode_quantization(cm, wb);
4285 encode_segmentation(cm, xd, wb);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +01004286#if CONFIG_Q_SEGMENTATION
4287 encode_q_segmentation(cm, wb);
4288#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004289 {
Thomas Davies28444be2017-10-13 18:12:25 +01004290 int delta_q_allowed = 1;
4291#if !CONFIG_EXT_DELTA_Q
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004292 int i;
4293 struct segmentation *const seg = &cm->seg;
4294 int segment_quantizer_active = 0;
4295 for (i = 0; i < MAX_SEGMENTS; i++) {
4296 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
4297 segment_quantizer_active = 1;
4298 }
4299 }
Rostislav Pehlivanovda067792017-11-14 05:23:12 +00004300#if CONFIG_Q_SEGMENTATION
4301 segment_quantizer_active |= !!seg->q_lvls;
4302#endif
Thomas Davies28444be2017-10-13 18:12:25 +01004303 delta_q_allowed = !segment_quantizer_active;
4304#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004305
4306 if (cm->delta_q_present_flag)
Thomas Davies28444be2017-10-13 18:12:25 +01004307 assert(delta_q_allowed == 1 && cm->base_qindex > 0);
4308 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004309 aom_wb_write_bit(wb, cm->delta_q_present_flag);
4310 if (cm->delta_q_present_flag) {
4311 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
4312 xd->prev_qindex = cm->base_qindex;
4313#if CONFIG_EXT_DELTA_Q
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004314 aom_wb_write_bit(wb, cm->delta_lf_present_flag);
4315 if (cm->delta_lf_present_flag) {
4316 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_lf_res) - 1, 2);
Cheng Chena97394f2017-09-27 15:05:14 -07004317#if CONFIG_LOOPFILTER_LEVEL
4318 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
4319 xd->prev_delta_lf[lf_id] = 0;
4320#endif // CONFIG_LOOPFILTER_LEVEL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004321 xd->prev_delta_lf_from_base = 0;
4322 }
4323#endif // CONFIG_EXT_DELTA_Q
4324 }
4325 }
4326 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004327 if (!cm->all_lossless) {
4328 encode_cdef(cm, wb);
4329 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004330#if CONFIG_LOOP_RESTORATION
4331 encode_restoration_mode(cm, wb);
4332#endif // CONFIG_LOOP_RESTORATION
4333 write_tx_mode(cm, &cm->tx_mode, wb);
4334
4335 if (cpi->allow_comp_inter_inter) {
4336 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
4337#if !CONFIG_REF_ADAPT
4338 const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE;
4339#endif // !CONFIG_REF_ADAPT
4340
4341 aom_wb_write_bit(wb, use_hybrid_pred);
4342#if !CONFIG_REF_ADAPT
4343 if (!use_hybrid_pred) aom_wb_write_bit(wb, use_compound_pred);
4344#endif // !CONFIG_REF_ADAPT
4345 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004346 write_compound_tools(cm, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004347
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004348 aom_wb_write_bit(wb, cm->reduced_tx_set_used);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004349
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004350 if (!frame_is_intra_only(cm)) write_global_motion(cpi, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004351
4352 write_tile_info(cm, wb);
4353}
4354#endif // CONFIG_OBU
4355
Yaowu Xuf883b422016-08-30 14:01:10 -07004356static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data) {
4357 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004358 FRAME_CONTEXT *const fc = cm->fc;
Yaowu Xuf883b422016-08-30 14:01:10 -07004359 aom_writer *header_bc;
Ryanf0e39192017-10-09 09:45:13 -07004360
Thomas Davies80188d12016-10-26 16:08:35 -07004361 const int probwt = cm->num_tg;
Thomas Davies04e5aa72017-06-28 14:36:39 +01004362 (void)probwt;
Thomas Davies04e5aa72017-06-28 14:36:39 +01004363 (void)fc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004364
Yaowu Xuf883b422016-08-30 14:01:10 -07004365 aom_writer real_header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004366 header_bc = &real_header_bc;
Alex Converse30f0e152017-03-28 10:13:27 -07004367 aom_start_encode(header_bc, data);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004368
Debargha Mukherjee801cc922017-09-22 17:22:50 -07004369 if (!frame_is_intra_only(cm)) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004370 if (cm->reference_mode != COMPOUND_REFERENCE &&
4371 cm->allow_interintra_compound) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004372 }
Sarah Parker689b0ca2016-10-11 12:06:33 -07004373 }
Yaowu Xuf883b422016-08-30 14:01:10 -07004374 aom_stop_encode(header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004375 assert(header_bc->pos <= 0xffff);
4376 return header_bc->pos;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004377}
4378
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004379#if !CONFIG_OBU || CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004380static int choose_size_bytes(uint32_t size, int spare_msbs) {
4381 // Choose the number of bytes required to represent size, without
4382 // using the 'spare_msbs' number of most significant bits.
4383
4384 // Make sure we will fit in 4 bytes to start with..
4385 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
4386
4387 // Normalise to 32 bits
4388 size <<= spare_msbs;
4389
4390 if (size >> 24 != 0)
4391 return 4;
4392 else if (size >> 16 != 0)
4393 return 3;
4394 else if (size >> 8 != 0)
4395 return 2;
4396 else
4397 return 1;
4398}
4399
4400static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) {
4401 switch (sz) {
4402 case 1: dst[0] = (uint8_t)(val & 0xff); break;
4403 case 2: mem_put_le16(dst, val); break;
4404 case 3: mem_put_le24(dst, val); break;
4405 case 4: mem_put_le32(dst, val); break;
James Zern06c372d2017-04-20 16:08:29 -07004406 default: assert(0 && "Invalid size"); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004407 }
4408}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004409
Yaowu Xuf883b422016-08-30 14:01:10 -07004410static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004411 const uint32_t data_size, const uint32_t max_tile_size,
4412 const uint32_t max_tile_col_size,
4413 int *const tile_size_bytes,
4414 int *const tile_col_size_bytes) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004415 // Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
4416 int tsb;
4417 int tcsb;
4418
Yaowu Xuc27fc142016-08-22 16:08:15 -07004419#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004420 if (cm->large_scale_tile) {
4421 // The top bit in the tile size field indicates tile copy mode, so we
4422 // have 1 less bit to code the tile size
4423 tsb = choose_size_bytes(max_tile_size, 1);
4424 tcsb = choose_size_bytes(max_tile_col_size, 0);
4425 } else {
4426#endif // CONFIG_EXT_TILE
4427 tsb = choose_size_bytes(max_tile_size, 0);
4428 tcsb = 4; // This is ignored
4429 (void)max_tile_col_size;
4430#if CONFIG_EXT_TILE
4431 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004432#endif // CONFIG_EXT_TILE
4433
4434 assert(tsb > 0);
4435 assert(tcsb > 0);
4436
4437 *tile_size_bytes = tsb;
4438 *tile_col_size_bytes = tcsb;
4439
4440 if (tsb == 4 && tcsb == 4) {
4441 return data_size;
4442 } else {
4443 uint32_t wpos = 0;
4444 uint32_t rpos = 0;
4445
4446#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004447 if (cm->large_scale_tile) {
4448 int tile_row;
4449 int tile_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004450
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004451 for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
4452 // All but the last column has a column header
4453 if (tile_col < cm->tile_cols - 1) {
4454 uint32_t tile_col_size = mem_get_le32(dst + rpos);
4455 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004456
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004457 // Adjust the tile column size by the number of bytes removed
4458 // from the tile size fields.
4459 tile_col_size -= (4 - tsb) * cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004460
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004461 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
4462 wpos += tcsb;
4463 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004464
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004465 for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
4466 // All, including the last row has a header
4467 uint32_t tile_header = mem_get_le32(dst + rpos);
4468 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004469
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004470 // If this is a copy tile, we need to shift the MSB to the
4471 // top bit of the new width, and there is no data to copy.
4472 if (tile_header >> 31 != 0) {
4473 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
4474 mem_put_varsize(dst + wpos, tsb, tile_header);
4475 wpos += tsb;
4476 } else {
4477 mem_put_varsize(dst + wpos, tsb, tile_header);
4478 wpos += tsb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004479
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004480 memmove(dst + wpos, dst + rpos, tile_header);
4481 rpos += tile_header;
4482 wpos += tile_header;
4483 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004484 }
4485 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004486 } else {
4487#endif // CONFIG_EXT_TILE
4488 const int n_tiles = cm->tile_cols * cm->tile_rows;
4489 int n;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004490
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004491 for (n = 0; n < n_tiles; n++) {
4492 int tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004493
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004494 if (n == n_tiles - 1) {
4495 tile_size = data_size - rpos;
4496 } else {
4497 tile_size = mem_get_le32(dst + rpos);
4498 rpos += 4;
4499 mem_put_varsize(dst + wpos, tsb, tile_size);
4500 wpos += tsb;
4501 }
4502
4503 memmove(dst + wpos, dst + rpos, tile_size);
4504
4505 rpos += tile_size;
4506 wpos += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004507 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004508#if CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004509 }
4510#endif // CONFIG_EXT_TILE
4511
4512 assert(rpos > wpos);
4513 assert(rpos == data_size);
4514
4515 return wpos;
4516 }
4517}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004518#endif
4519
4520#if CONFIG_OBU
Soo-Chul Han38427e82017-09-27 15:06:13 -04004521
4522uint32_t write_obu_header(OBU_TYPE obu_type, int obu_extension,
4523 uint8_t *const dst) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004524 struct aom_write_bit_buffer wb = { dst, 0 };
4525 uint32_t size = 0;
4526
Soo-Chul Han38427e82017-09-27 15:06:13 -04004527 // first bit is obu_forbidden_bit according to R19
4528 aom_wb_write_literal(&wb, 0, 1);
4529 aom_wb_write_literal(&wb, (int)obu_type, 4);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004530 aom_wb_write_literal(&wb, 0, 2);
4531 aom_wb_write_literal(&wb, obu_extension ? 1 : 0, 1);
4532 if (obu_extension) {
4533 aom_wb_write_literal(&wb, obu_extension & 0xFF, 8);
4534 }
4535
4536 size = aom_wb_bytes_written(&wb);
4537 return size;
4538}
4539
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004540static uint32_t write_sequence_header_obu(AV1_COMP *cpi, uint8_t *const dst) {
4541 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004542 struct aom_write_bit_buffer wb = { dst, 0 };
4543 uint32_t size = 0;
4544
4545 write_profile(cm->profile, &wb);
4546
4547 aom_wb_write_literal(&wb, 0, 4);
4548
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01004549 write_sequence_header(cpi, &wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004550
4551 // color_config
4552 write_bitdepth_colorspace_sampling(cm, &wb);
4553
4554 size = aom_wb_bytes_written(&wb);
4555 return size;
4556}
4557
4558static uint32_t write_frame_header_obu(AV1_COMP *cpi, uint8_t *const dst) {
4559 AV1_COMMON *const cm = &cpi->common;
4560 struct aom_write_bit_buffer wb = { dst, 0 };
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004561 uint32_t total_size = 0;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004562 uint32_t compressed_hdr_size, uncompressed_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004563
4564 write_uncompressed_header_obu(cpi, &wb);
4565
4566 if (cm->show_existing_frame) {
4567 total_size = aom_wb_bytes_written(&wb);
4568 return total_size;
4569 }
4570
4571 // write the tile length code (Always 4 bytes for now)
4572 aom_wb_write_literal(&wb, 3, 2);
4573
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004574 if (!use_compressed_header(cm)) {
4575 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
4576 compressed_hdr_size = 0;
4577 } else {
4578 // placeholder for the compressed header length
4579 struct aom_write_bit_buffer compr_hdr_len_wb = wb;
4580 aom_wb_write_literal(&wb, 0, 16);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004581
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004582 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
4583 compressed_hdr_size =
4584 write_compressed_header(cpi, dst + uncompressed_hdr_size);
4585 aom_wb_overwrite_literal(&compr_hdr_len_wb, (int)(compressed_hdr_size), 16);
4586 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004587
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004588 total_size = uncompressed_hdr_size + compressed_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004589 return total_size;
4590}
4591
4592static uint32_t write_tile_group_header(uint8_t *const dst, int startTile,
4593 int endTile, int tiles_log2) {
4594 struct aom_write_bit_buffer wb = { dst, 0 };
4595 uint32_t size = 0;
4596
4597 aom_wb_write_literal(&wb, startTile, tiles_log2);
4598 aom_wb_write_literal(&wb, endTile, tiles_log2);
4599
4600 size = aom_wb_bytes_written(&wb);
4601 return size;
4602}
4603
4604static uint32_t write_tiles_in_tg_obus(AV1_COMP *const cpi, uint8_t *const dst,
4605 unsigned int *max_tile_size,
4606 unsigned int *max_tile_col_size,
4607 uint8_t *const frame_header_obu_location,
4608 uint32_t frame_header_obu_size,
4609 int insert_frame_header_obu_flag) {
Thomas Davies4822e142017-10-10 11:30:36 +01004610 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004611 aom_writer mode_bc;
4612 int tile_row, tile_col;
4613 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
4614 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
4615 uint32_t total_size = 0;
4616 const int tile_cols = cm->tile_cols;
4617 const int tile_rows = cm->tile_rows;
4618 unsigned int tile_size = 0;
4619 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
4620 // Fixed size tile groups for the moment
4621 const int num_tg_hdrs = cm->num_tg;
4622 const int tg_size =
4623#if CONFIG_EXT_TILE
4624 (cm->large_scale_tile)
4625 ? 1
4626 :
4627#endif // CONFIG_EXT_TILE
4628 (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
4629 int tile_count = 0;
4630 int curr_tg_data_size = 0;
4631 uint8_t *data = dst;
4632 int new_tg = 1;
4633#if CONFIG_EXT_TILE
4634 const int have_tiles = tile_cols * tile_rows > 1;
4635#endif
4636
Thomas Davies4822e142017-10-10 11:30:36 +01004637#if CONFIG_SIMPLE_BWD_ADAPT
4638 cm->largest_tile_id = 0;
4639#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004640 *max_tile_size = 0;
4641 *max_tile_col_size = 0;
4642
4643#if CONFIG_EXT_TILE
4644 if (cm->large_scale_tile) {
4645 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
4646 TileInfo tile_info;
4647 const int is_last_col = (tile_col == tile_cols - 1);
4648 const uint32_t col_offset = total_size;
4649
4650 av1_tile_set_col(&tile_info, cm, tile_col);
4651
4652 // The last column does not have a column header
4653 if (!is_last_col) total_size += 4;
4654
4655 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
4656 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
4657 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
4658 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
4659 const int data_offset = have_tiles ? 4 : 0;
4660 const int tile_idx = tile_row * tile_cols + tile_col;
4661 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
4662 av1_tile_set_row(&tile_info, cm, tile_row);
4663
4664 buf->data = dst + total_size;
4665
4666 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
4667 // even for the last one, unless no tiling is used at all.
4668 total_size += data_offset;
4669 // Initialise tile context from the frame context
4670 this_tile->tctx = *cm->fc;
4671 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07004672 mode_bc.allow_update_cdf = !cm->large_scale_tile;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004673 aom_start_encode(&mode_bc, buf->data + data_offset);
4674 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
4675 assert(tok == tok_end);
4676 aom_stop_encode(&mode_bc);
4677 tile_size = mode_bc.pos;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004678 buf->size = tile_size;
4679
4680 // Record the maximum tile size we see, so we can compact headers later.
Thomas Davies4822e142017-10-10 11:30:36 +01004681 if (tile_size > *max_tile_size) {
4682 *max_tile_size = tile_size;
4683#if CONFIG_SIMPLE_BWD_ADAPT
4684 cm->largest_tile_id = tile_cols * tile_row + tile_col;
4685#endif
4686 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004687
4688 if (have_tiles) {
4689 // tile header: size of this tile, or copy offset
4690 uint32_t tile_header = tile_size;
4691 const int tile_copy_mode =
4692 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256)
4693 ? 1
4694 : 0;
4695
4696 // If tile_copy_mode = 1, check if this tile is a copy tile.
4697 // Very low chances to have copy tiles on the key frames, so don't
4698 // search on key frames to reduce unnecessary search.
4699 if (cm->frame_type != KEY_FRAME && tile_copy_mode) {
4700 const int idendical_tile_offset =
4701 find_identical_tile(tile_row, tile_col, tile_buffers);
4702
4703 if (idendical_tile_offset > 0) {
4704 tile_size = 0;
4705 tile_header = idendical_tile_offset | 0x80;
4706 tile_header <<= 24;
4707 }
4708 }
4709
4710 mem_put_le32(buf->data, tile_header);
4711 }
4712
4713 total_size += tile_size;
4714 }
4715
4716 if (!is_last_col) {
4717 uint32_t col_size = total_size - col_offset - 4;
4718 mem_put_le32(dst + col_offset, col_size);
4719
4720 // If it is not final packing, record the maximum tile column size we
4721 // see, otherwise, check if the tile size is out of the range.
4722 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
4723 }
4724 }
4725 } else {
4726#endif // CONFIG_EXT_TILE
4727
4728 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
4729 TileInfo tile_info;
4730 const int is_last_row = (tile_row == tile_rows - 1);
4731 av1_tile_set_row(&tile_info, cm, tile_row);
4732
4733 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
4734 const int tile_idx = tile_row * tile_cols + tile_col;
4735 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
4736 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
4737 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
4738 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
4739 const int is_last_col = (tile_col == tile_cols - 1);
4740 const int is_last_tile = is_last_col && is_last_row;
4741 int is_last_tile_in_tg = 0;
4742
4743 if (new_tg) {
4744 if (insert_frame_header_obu_flag && tile_idx) {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004745 // insert a copy of frame header OBU (including
4746 // PRE_OBU_SIZE_BYTES-byte size),
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004747 // except before the first tile group
4748 data = dst + total_size;
4749 memmove(data, frame_header_obu_location, frame_header_obu_size);
4750 total_size += frame_header_obu_size;
4751 }
4752 data = dst + total_size;
4753 // A new tile group begins at this tile. Write the obu header and
4754 // tile group header
Soo-Chul Han38427e82017-09-27 15:06:13 -04004755 curr_tg_data_size =
4756 write_obu_header(OBU_TILE_GROUP, 0, data + PRE_OBU_SIZE_BYTES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004757 if (n_log2_tiles)
4758 curr_tg_data_size += write_tile_group_header(
Soo-Chul Han38427e82017-09-27 15:06:13 -04004759 data + curr_tg_data_size + PRE_OBU_SIZE_BYTES, tile_idx,
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004760 AOMMIN(tile_idx + tg_size - 1, tile_cols * tile_rows - 1),
4761 n_log2_tiles);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004762 total_size += curr_tg_data_size + PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004763 new_tg = 0;
4764 tile_count = 0;
4765 }
4766 tile_count++;
4767 av1_tile_set_col(&tile_info, cm, tile_col);
4768
4769 if (tile_count == tg_size || tile_idx == (tile_cols * tile_rows - 1)) {
4770 is_last_tile_in_tg = 1;
4771 new_tg = 1;
4772 } else {
4773 is_last_tile_in_tg = 0;
4774 }
4775
4776#if CONFIG_DEPENDENT_HORZTILES
4777 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
4778#endif
4779 buf->data = dst + total_size;
4780
Soo-Chul Han38427e82017-09-27 15:06:13 -04004781// The last tile of the tile group does not have a header.
4782#if CONFIG_ADD_4BYTES_OBUSIZE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004783 if (!is_last_tile_in_tg) total_size += 4;
Soo-Chul Han38427e82017-09-27 15:06:13 -04004784#else
4785 total_size += 4;
4786#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004787
4788 // Initialise tile context from the frame context
4789 this_tile->tctx = *cm->fc;
4790 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07004791 mode_bc.allow_update_cdf = 1;
Soo-Chul Han13f0d9c2017-10-22 21:55:52 -04004792#if CONFIG_LOOP_RESTORATION
Rupert Swarbrick76405202017-11-07 16:35:55 +00004793 av1_reset_loop_restoration(&cpi->td.mb.e_mbd);
Soo-Chul Han13f0d9c2017-10-22 21:55:52 -04004794#endif // CONFIG_LOOP_RESTORATION
4795
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004796 aom_start_encode(&mode_bc, dst + total_size);
4797 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
4798#if !CONFIG_LV_MAP
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004799 assert(tok == tok_end);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004800#endif // !CONFIG_LV_MAP
4801 aom_stop_encode(&mode_bc);
4802 tile_size = mode_bc.pos;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004803 assert(tile_size > 0);
4804
4805 curr_tg_data_size += (tile_size + (is_last_tile_in_tg ? 0 : 4));
4806 buf->size = tile_size;
Thomas Davies4822e142017-10-10 11:30:36 +01004807#if CONFIG_SIMPLE_BWD_ADAPT
4808 if (tile_size > *max_tile_size) {
4809 cm->largest_tile_id = tile_cols * tile_row + tile_col;
4810 }
4811#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004812 if (!is_last_tile) {
4813 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
4814 }
Thomas Davies4822e142017-10-10 11:30:36 +01004815
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004816 if (!is_last_tile_in_tg) {
4817 // size of this tile
4818 mem_put_le32(buf->data, tile_size);
4819 } else {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004820#if CONFIG_ADD_4BYTES_OBUSIZE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004821 // write current tile group size
4822 mem_put_le32(data, curr_tg_data_size);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004823#else
4824 mem_put_le32(buf->data, tile_size);
4825#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004826 }
4827
4828 total_size += tile_size;
4829 }
4830 }
4831#if CONFIG_EXT_TILE
4832 }
4833#endif // CONFIG_EXT_TILE
4834 return (uint32_t)total_size;
4835}
4836
4837#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004838
Yaowu Xuf883b422016-08-30 14:01:10 -07004839void av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004840 uint8_t *data = dst;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004841 uint32_t data_size;
Thomas Daviesb25ba502017-07-18 10:18:24 +01004842#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004843 AV1_COMMON *const cm = &cpi->common;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004844 uint32_t compressed_hdr_size = 0;
4845 uint32_t uncompressed_hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07004846 struct aom_write_bit_buffer saved_wb;
Yaowu Xuf883b422016-08-30 14:01:10 -07004847 struct aom_write_bit_buffer wb = { data, 0 };
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004848 const int have_tiles = cm->tile_cols * cm->tile_rows > 1;
4849 int tile_size_bytes;
4850 int tile_col_size_bytes;
Thomas Daviesb25ba502017-07-18 10:18:24 +01004851#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004852 unsigned int max_tile_size;
4853 unsigned int max_tile_col_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004854#if CONFIG_OBU
4855#if !CONFIG_EXT_TILE
4856 AV1_COMMON *const cm = &cpi->common;
4857#endif
4858 uint32_t obu_size;
4859 uint8_t *frame_header_location;
4860 uint32_t frame_header_size;
4861#endif
Thomas Davies80188d12016-10-26 16:08:35 -07004862
Angie Chiangb11aedf2017-03-10 17:31:46 -08004863#if CONFIG_BITSTREAM_DEBUG
4864 bitstream_queue_reset_write();
4865#endif
4866
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004867#if CONFIG_OBU
Soo-Chul Han38427e82017-09-27 15:06:13 -04004868 // The TD is now written outside the frame encode loop
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004869
4870 // write sequence header obu if KEY_FRAME, preceded by 4-byte size
4871 if (cm->frame_type == KEY_FRAME) {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004872 obu_size =
4873 write_obu_header(OBU_SEQUENCE_HEADER, 0, data + PRE_OBU_SIZE_BYTES);
4874 obu_size +=
4875 write_sequence_header_obu(cpi, data + PRE_OBU_SIZE_BYTES + obu_size);
4876#if CONFIG_ADD_4BYTES_OBUSIZE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004877 mem_put_le32(data, obu_size);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004878#endif
4879 data += obu_size + PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004880 }
4881
4882 // write frame header obu, preceded by 4-byte size
Soo-Chul Han38427e82017-09-27 15:06:13 -04004883 frame_header_location = data + PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004884 obu_size = write_obu_header(OBU_FRAME_HEADER, 0, frame_header_location);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004885 frame_header_size =
4886 write_frame_header_obu(cpi, data + PRE_OBU_SIZE_BYTES + obu_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004887 obu_size += frame_header_size;
Soo-Chul Han38427e82017-09-27 15:06:13 -04004888#if CONFIG_ADD_4BYTES_OBUSIZE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004889 mem_put_le32(data, obu_size);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004890#endif
4891 data += obu_size + PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004892
4893 if (cm->show_existing_frame) {
4894 data_size = 0;
4895 } else {
4896 // Each tile group obu will be preceded by 4-byte size of the tile group
4897 // obu
Soo-Chul Han38427e82017-09-27 15:06:13 -04004898 data_size = write_tiles_in_tg_obus(
4899 cpi, data, &max_tile_size, &max_tile_col_size,
4900 frame_header_location - PRE_OBU_SIZE_BYTES,
4901 obu_size + PRE_OBU_SIZE_BYTES, 1 /* cm->error_resilient_mode */);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004902 }
4903
4904#endif
4905
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004906#if CONFIG_EXT_TILE
4907 if (cm->large_scale_tile) {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004908#if !CONFIG_OBU
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004909 write_uncompressed_header_frame(cpi, &wb);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004910#else
4911 write_uncompressed_header_obu(cpi, &wb);
4912#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004913
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004914 if (cm->show_existing_frame) {
4915 *size = aom_wb_bytes_written(&wb);
4916 return;
4917 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004918
4919 // We do not know these in advance. Output placeholder bit.
4920 saved_wb = wb;
4921 // Write tile size magnitudes
4922 if (have_tiles) {
4923 // Note that the last item in the uncompressed header is the data
4924 // describing tile configuration.
4925 // Number of bytes in tile column size - 1
4926 aom_wb_write_literal(&wb, 0, 2);
4927
4928 // Number of bytes in tile size - 1
4929 aom_wb_write_literal(&wb, 0, 2);
4930 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004931
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004932 if (!use_compressed_header(cm)) {
4933 uncompressed_hdr_size = (uint32_t)aom_wb_bytes_written(&wb);
4934 aom_clear_system_state();
4935 compressed_hdr_size = 0;
4936 } else {
4937 // Size of compressed header
4938 aom_wb_write_literal(&wb, 0, 16);
4939 uncompressed_hdr_size = (uint32_t)aom_wb_bytes_written(&wb);
4940 aom_clear_system_state();
4941 // Write the compressed header
4942 compressed_hdr_size =
4943 write_compressed_header(cpi, data + uncompressed_hdr_size);
4944 }
4945 data += uncompressed_hdr_size + compressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004946
Yunqing Wangb041d8a2017-11-15 12:31:18 -08004947#define EXT_TILE_DEBUG 0
4948#if EXT_TILE_DEBUG
4949 {
4950 char fn[20] = "./fh";
4951 fn[4] = cm->current_video_frame / 100 + '0';
4952 fn[5] = (cm->current_video_frame % 100) / 10 + '0';
4953 fn[6] = (cm->current_video_frame % 10) + '0';
4954 fn[7] = '\0';
4955 av1_print_uncompressed_frame_header(
4956 data - uncompressed_hdr_size - compressed_hdr_size,
4957 uncompressed_hdr_size, fn);
4958 }
4959#endif // EXT_TILE_DEBUG
4960#undef EXT_TILE_DEBUG
4961
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004962 // Write the encoded tile data
4963 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
4964 } else {
4965#endif // CONFIG_EXT_TILE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004966#if !CONFIG_OBU
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004967 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004968#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004969#if CONFIG_EXT_TILE
4970 }
4971#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004972#if CONFIG_EXT_TILE
4973 if (cm->large_scale_tile) {
4974 if (have_tiles) {
4975 data_size =
4976 remux_tiles(cm, data, data_size, max_tile_size, max_tile_col_size,
4977 &tile_size_bytes, &tile_col_size_bytes);
4978 }
4979
4980 data += data_size;
4981
4982 // Now fill in the gaps in the uncompressed header.
4983 if (have_tiles) {
4984 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
4985 aom_wb_write_literal(&saved_wb, tile_col_size_bytes - 1, 2);
4986
4987 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
4988 aom_wb_write_literal(&saved_wb, tile_size_bytes - 1, 2);
4989 }
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004990 // TODO(jbb): Figure out what to do if compressed_hdr_size > 16 bits.
4991 assert(compressed_hdr_size <= 0xffff);
Rupert Swarbrick53685902017-10-27 13:35:19 +01004992 // Fill in the compressed header size (but only if we're using one)
4993 if (use_compressed_header(cm)) {
4994 aom_wb_write_literal(&saved_wb, compressed_hdr_size, 16);
4995 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004996 } else {
4997#endif // CONFIG_EXT_TILE
4998 data += data_size;
4999#if CONFIG_EXT_TILE
5000 }
5001#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07005002 *size = data - dst;
5003}