blob: 7386ff3ed9c1452ca708204938d7bd3a263aa877 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070017#include "aom_dsp/aom_dsp_common.h"
Debargha Mukherjee47748b52017-03-24 12:20:49 -070018#include "aom_dsp/binary_codes_writer.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070019#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070020#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070021#include "aom_ports/mem_ops.h"
22#include "aom_ports/system_state.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070023#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070025#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070026
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010027#include "av1/common/cdef.h"
Luc Trudeaud183b642017-11-28 11:42:37 -050028#if CONFIG_CFL
29#include "av1/common/cfl.h"
30#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070031#include "av1/common/entropy.h"
32#include "av1/common/entropymode.h"
33#include "av1/common/entropymv.h"
34#include "av1/common/mvref_common.h"
Thomas Daviesf6936102016-09-05 16:51:31 +010035#include "av1/common/odintrin.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070036#include "av1/common/pred_common.h"
37#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080038#if CONFIG_EXT_INTRA
39#include "av1/common/reconintra.h"
40#endif // CONFIG_EXT_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -070041#include "av1/common/seg_common.h"
42#include "av1/common/tile_common.h"
43
Angie Chiangc8af6112017-03-16 16:11:22 -070044#if CONFIG_LV_MAP
45#include "av1/encoder/encodetxb.h"
46#endif // CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#include "av1/encoder/bitstream.h"
48#include "av1/encoder/cost.h"
49#include "av1/encoder/encodemv.h"
50#include "av1/encoder/mcomp.h"
hui sud13c24a2017-04-07 16:13:07 -070051#include "av1/encoder/palette.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070052#include "av1/encoder/segmentation.h"
53#include "av1/encoder/subexp.h"
54#include "av1/encoder/tokenize.h"
55
Di Chen56586622017-06-09 13:49:44 -070056#define ENC_MISMATCH_DEBUG 0
Zoe Liu85b66462017-04-20 14:28:19 -070057
Yaowu Xuf883b422016-08-30 14:01:10 -070058static INLINE void write_uniform(aom_writer *w, int n, int v) {
hui su37499292017-04-26 09:49:53 -070059 const int l = get_unsigned_bits(n);
60 const int m = (1 << l) - n;
Yaowu Xuc27fc142016-08-22 16:08:15 -070061 if (l == 0) return;
62 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -070063 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070064 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -070065 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
66 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070067 }
68}
69
Yaowu Xuf883b422016-08-30 14:01:10 -070070static struct av1_token interintra_mode_encodings[INTERINTRA_MODES];
Cheng Chen2ef24ea2017-11-29 12:22:24 -080071#if CONFIG_JNT_COMP
72static struct av1_token compound_type_encodings[COMPOUND_TYPES - 1];
73#else
Sarah Parker6fddd182016-11-10 20:57:20 -080074static struct av1_token compound_type_encodings[COMPOUND_TYPES];
Cheng Chen2ef24ea2017-11-29 12:22:24 -080075#endif // CONFIG_JNT_COMP
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070076#if CONFIG_LOOP_RESTORATION
Rupert Swarbrick6c545212017-09-01 17:17:25 +010077static void loop_restoration_write_sb_coeffs(const AV1_COMMON *const cm,
78 MACROBLOCKD *xd,
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +010079 const RestorationUnitInfo *rui,
80 aom_writer *const w, int plane);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070081#endif // CONFIG_LOOP_RESTORATION
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040082#if CONFIG_OBU
83static void write_uncompressed_header_obu(AV1_COMP *cpi,
84 struct aom_write_bit_buffer *wb);
85#else
86static void write_uncompressed_header_frame(AV1_COMP *cpi,
87 struct aom_write_bit_buffer *wb);
88#endif
89
Thomas Davies80188d12016-10-26 16:08:35 -070090static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data);
Debargha Mukherjee2eada612017-09-22 15:37:39 -070091
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040092#if !CONFIG_OBU || CONFIG_EXT_TILE
Thomas Daviesdbfc4f92017-01-18 16:46:09 +000093static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
94 const uint32_t data_size, const uint32_t max_tile_size,
95 const uint32_t max_tile_col_size,
96 int *const tile_size_bytes,
97 int *const tile_col_size_bytes);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040098#endif
Yaowu Xuf883b422016-08-30 14:01:10 -070099void av1_encode_token_init(void) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700100 av1_tokens_from_tree(interintra_mode_encodings, av1_interintra_mode_tree);
Sarah Parker6fddd182016-11-10 20:57:20 -0800101 av1_tokens_from_tree(compound_type_encodings, av1_compound_type_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700102}
103
Jingning Han3e4c6a62017-12-14 14:50:57 -0800104static void write_intra_mode_kf(FRAME_CONTEXT *frame_ctx, const MODE_INFO *mi,
105 const MODE_INFO *above_mi,
106 const MODE_INFO *left_mi, PREDICTION_MODE mode,
107 aom_writer *w) {
Alex Converse28744302017-04-13 14:46:22 -0700108#if CONFIG_INTRABC
109 assert(!is_intrabc_block(&mi->mbmi));
110#endif // CONFIG_INTRABC
Jingning Han9010e202017-12-14 14:48:09 -0800111 (void)mi;
Jingning Han9010e202017-12-14 14:48:09 -0800112 aom_write_symbol(w, mode, get_y_mode_cdf(frame_ctx, above_mi, left_mi),
Jingning Hanf04254f2017-03-08 10:51:35 -0800113 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -0800114}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700115
Thomas Davies1de6c882017-01-11 17:47:49 +0000116static void write_inter_mode(aom_writer *w, PREDICTION_MODE mode,
Zoe Liu7f24e1b2017-03-17 17:42:05 -0700117 FRAME_CONTEXT *ec_ctx, const int16_t mode_ctx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700118 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700119
Thomas Davies149eda52017-06-12 18:11:55 +0100120 aom_write_symbol(w, mode != NEWMV, ec_ctx->newmv_cdf[newmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700121
Jingning Hanf2b87bd2017-05-18 16:27:30 -0700122 if (mode != NEWMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700123 if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700124 assert(mode == GLOBALMV);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700125 return;
126 }
127
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700128 const int16_t zeromv_ctx =
129 (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700130 aom_write_symbol(w, mode != GLOBALMV, ec_ctx->zeromv_cdf[zeromv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700131
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700132 if (mode != GLOBALMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700133 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700134
135 if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6;
136 if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7;
137 if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8;
Thomas Davies149eda52017-06-12 18:11:55 +0100138 aom_write_symbol(w, mode != NEARESTMV, ec_ctx->refmv_cdf[refmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700139 }
140 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700141}
142
Thomas Davies149eda52017-06-12 18:11:55 +0100143static void write_drl_idx(FRAME_CONTEXT *ec_ctx, const MB_MODE_INFO *mbmi,
Yaowu Xuf883b422016-08-30 14:01:10 -0700144 const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) {
145 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700146
147 assert(mbmi->ref_mv_idx < 3);
148
Sebastien Alaiwan34d55662017-11-15 09:36:03 +0100149 const int new_mv = mbmi->mode == NEWMV || mbmi->mode == NEW_NEWMV;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000150 if (new_mv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700151 int idx;
152 for (idx = 0; idx < 2; ++idx) {
Jingning Hanb56b71a2017-12-04 09:14:13 -0800153 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1 ||
154 CONFIG_OPT_REF_MV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700155 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700156 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700157
Thomas Davies149eda52017-06-12 18:11:55 +0100158 aom_write_symbol(w, mbmi->ref_mv_idx != idx, ec_ctx->drl_cdf[drl_ctx],
159 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700160 if (mbmi->ref_mv_idx == idx) return;
161 }
162 }
163 return;
164 }
165
David Barker3dfba992017-04-03 16:10:09 +0100166 if (have_nearmv_in_inter_mode(mbmi->mode)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700167 int idx;
168 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
169 for (idx = 1; idx < 3; ++idx) {
Jingning Hanb56b71a2017-12-04 09:14:13 -0800170 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1 ||
171 CONFIG_OPT_REF_MV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700172 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700173 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
Thomas Davies149eda52017-06-12 18:11:55 +0100174 aom_write_symbol(w, mbmi->ref_mv_idx != (idx - 1),
175 ec_ctx->drl_cdf[drl_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700176 if (mbmi->ref_mv_idx == (idx - 1)) return;
177 }
178 }
179 return;
180 }
181}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700182
Thomas Davies8c08a332017-06-26 17:30:34 +0100183static void write_inter_compound_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
184 aom_writer *w, PREDICTION_MODE mode,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700185 const int16_t mode_ctx) {
Thomas Davies8c08a332017-06-26 17:30:34 +0100186 assert(is_inter_compound_mode(mode));
Thomas Davies8c08a332017-06-26 17:30:34 +0100187 (void)cm;
188 aom_write_symbol(w, INTER_COMPOUND_OFFSET(mode),
189 xd->tile_ctx->inter_compound_mode_cdf[mode_ctx],
190 INTER_COMPOUND_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700191}
Zoe Liu85b66462017-04-20 14:28:19 -0700192
Thomas Davies985bfc32017-06-27 16:51:26 +0100193static void write_tx_size_vartx(const AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700194 const MB_MODE_INFO *mbmi, TX_SIZE tx_size,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700195 int depth, int blk_row, int blk_col,
196 aom_writer *w) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100197 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
198 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700199 const int tx_row = blk_row >> 1;
200 const int tx_col = blk_col >> 1;
Jingning Hanf65b8702016-10-31 12:13:20 -0700201 const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0);
202 const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0);
203
Jingning Han331662e2017-05-30 17:03:32 -0700204 int ctx = txfm_partition_context(xd->above_txfm_context + blk_col,
205 xd->left_txfm_context + blk_row,
Jingning Hanc8b89362016-11-01 10:28:53 -0700206 mbmi->sb_type, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207
Yaowu Xuc27fc142016-08-22 16:08:15 -0700208 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
209
Jingning Han571189c2016-10-24 10:38:43 -0700210 if (depth == MAX_VARTX_DEPTH) {
Jingning Han331662e2017-05-30 17:03:32 -0700211 txfm_partition_update(xd->above_txfm_context + blk_col,
212 xd->left_txfm_context + blk_row, tx_size, tx_size);
Jingning Han94d5bfc2016-10-21 10:14:36 -0700213 return;
214 }
215
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000216 const int write_txfm_partition =
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000217 tx_size == mbmi->inter_tx_size[tx_row][tx_col];
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000218 if (write_txfm_partition) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100219 aom_write_symbol(w, 0, ec_ctx->txfm_partition_cdf[ctx], 2);
Thomas Davies985bfc32017-06-27 16:51:26 +0100220
Jingning Han331662e2017-05-30 17:03:32 -0700221 txfm_partition_update(xd->above_txfm_context + blk_col,
222 xd->left_txfm_context + blk_row, tx_size, tx_size);
Yue Chend6bdd462017-07-19 16:05:43 -0700223 // TODO(yuec): set correct txfm partition update for qttx
Yaowu Xuc27fc142016-08-22 16:08:15 -0700224 } else {
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800225 const TX_SIZE sub_txs = sub_tx_size_map[1][tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700226 const int bsw = tx_size_wide_unit[sub_txs];
227 const int bsh = tx_size_high_unit[sub_txs];
Jingning Hanf64062f2016-11-02 16:22:18 -0700228
Thomas Davies985bfc32017-06-27 16:51:26 +0100229 aom_write_symbol(w, 1, ec_ctx->txfm_partition_cdf[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700230
David Barker16c64e32017-08-23 16:54:59 +0100231 if (sub_txs == TX_4X4) {
Jingning Han331662e2017-05-30 17:03:32 -0700232 txfm_partition_update(xd->above_txfm_context + blk_col,
233 xd->left_txfm_context + blk_row, sub_txs, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700234 return;
235 }
236
Yue Chen0797a202017-10-27 17:24:56 -0700237 assert(bsw > 0 && bsh > 0);
238 for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh)
239 for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
240 int offsetr = blk_row + row;
241 int offsetc = blk_col + col;
242 write_tx_size_vartx(cm, xd, mbmi, sub_txs, depth + 1, offsetr, offsetc,
243 w);
244 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700245 }
246}
247
Yaowu Xuf883b422016-08-30 14:01:10 -0700248static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd,
249 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700250 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
251 const BLOCK_SIZE bsize = mbmi->sb_type;
Thomas Davies15580c52017-03-09 13:53:42 +0000252 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
253 (void)cm;
Rupert Swarbrickfcff0b22017-10-05 09:26:04 +0100254 if (block_signals_txsize(bsize)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700255 const TX_SIZE tx_size = mbmi->tx_size;
Urvang Joshiab8840e2017-10-06 16:38:24 -0700256 const int tx_size_ctx = get_tx_size_context(xd);
Debargha Mukherjee0fa057f2017-12-06 17:06:29 -0800257 const int depth = tx_size_to_depth(tx_size, bsize, 0);
258 const int max_depths = bsize_to_max_depth(bsize, 0);
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800259 const int32_t tx_size_cat = bsize_to_tx_size_cat(bsize, 0);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800260
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800261 assert(depth >= 0 && depth <= max_depths);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800262 assert(!is_inter_block(mbmi));
Yue Chen49587a72016-09-28 17:09:47 -0700263 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700264
Thomas Davies15580c52017-03-09 13:53:42 +0000265 aom_write_symbol(w, depth, ec_ctx->tx_size_cdf[tx_size_cat][tx_size_ctx],
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800266 max_depths + 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700267 }
268}
269
Yaowu Xuf883b422016-08-30 14:01:10 -0700270static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
271 int segment_id, const MODE_INFO *mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700272 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
273 return 1;
274 } else {
275 const int skip = mi->mbmi.skip;
Zoe Liue646daa2017-10-17 15:28:46 -0700276 const int ctx = av1_get_skip_context(xd);
Thomas Davies61e3e372017-04-04 16:10:23 +0100277 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Davies61e3e372017-04-04 16:10:23 +0100278 aom_write_symbol(w, skip, ec_ctx->skip_cdfs[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700279 return skip;
280 }
281}
282
Zoe Liuf40a9572017-10-13 12:37:19 -0700283#if CONFIG_EXT_SKIP
284static int write_skip_mode(const AV1_COMMON *cm, const MACROBLOCKD *xd,
285 int segment_id, const MODE_INFO *mi, aom_writer *w) {
286 if (!cm->skip_mode_flag) return 0;
287 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
288 return 0;
289 }
290 const int skip_mode = mi->mbmi.skip_mode;
291 if (!is_comp_ref_allowed(mi->mbmi.sb_type)) {
292 assert(!skip_mode);
293 return 0;
294 }
295 const int ctx = av1_get_skip_mode_context(xd);
296 aom_write_symbol(w, skip_mode, xd->tile_ctx->skip_mode_cdfs[ctx], 2);
297 return skip_mode;
298}
299#endif // CONFIG_EXT_SKIP
300
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100301static void write_is_inter(const AV1_COMMON *cm, const MACROBLOCKD *xd,
302 int segment_id, aom_writer *w, const int is_inter) {
303 if (!segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
Yue Chen170678a2017-10-17 13:43:10 -0700304 const int ctx = av1_get_intra_inter_context(xd);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100305 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100306 aom_write_symbol(w, is_inter, ec_ctx->intra_inter_cdf[ctx], 2);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100307 }
308}
309
Thomas Daviesd9b57262017-06-27 17:43:25 +0100310static void write_motion_mode(const AV1_COMMON *cm, MACROBLOCKD *xd,
311 const MODE_INFO *mi, aom_writer *w) {
Sarah Parker19234cc2017-03-10 16:43:25 -0800312 const MB_MODE_INFO *mbmi = &mi->mbmi;
Thomas Daviesd9b57262017-06-27 17:43:25 +0100313
Sebastien Alaiwan48795802017-10-30 12:07:13 +0100314 MOTION_MODE last_motion_mode_allowed =
Luc Trudeau2eb9b842017-12-13 11:19:16 -0500315 motion_mode_allowed(cm->global_motion, xd, mi);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000316 switch (last_motion_mode_allowed) {
317 case SIMPLE_TRANSLATION: break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000318 case OBMC_CAUSAL:
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000319 aom_write_symbol(w, mbmi->motion_mode == OBMC_CAUSAL,
320 xd->tile_ctx->obmc_cdf[mbmi->sb_type], 2);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000321 break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000322 default:
Yunqing Wang3afbf3f2017-11-21 20:16:18 -0800323#if CONFIG_EXT_WARPED_MOTION
324 {
325 int wm_ctx = 0;
326 if (mbmi->wm_ctx != -1) {
327 wm_ctx = 1;
328 if (mbmi->mode == NEARESTMV) wm_ctx = 2;
329 }
330 aom_write_symbol(w, mbmi->motion_mode,
331 xd->tile_ctx->motion_mode_cdf[wm_ctx][mbmi->sb_type],
332 MOTION_MODES);
333 }
334#else
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000335 aom_write_symbol(w, mbmi->motion_mode,
336 xd->tile_ctx->motion_mode_cdf[mbmi->sb_type],
337 MOTION_MODES);
Yunqing Wang3afbf3f2017-11-21 20:16:18 -0800338#endif // CONFIG_EXT_WARPED_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -0700339 }
Yue Chen69f18e12016-09-08 14:48:15 -0700340}
Wei-Ting Lin85a8f702017-06-22 13:55:15 -0700341
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000342static void write_delta_qindex(const AV1_COMMON *cm, const MACROBLOCKD *xd,
343 int delta_qindex, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +0200344 int sign = delta_qindex < 0;
345 int abs = sign ? -delta_qindex : delta_qindex;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000346 int rem_bits, thr;
Thomas Daviesf6936102016-09-05 16:51:31 +0100347 int smallval = abs < DELTA_Q_SMALL ? 1 : 0;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000348 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
349 (void)cm;
Thomas Daviesf6936102016-09-05 16:51:31 +0100350
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000351 aom_write_symbol(w, AOMMIN(abs, DELTA_Q_SMALL), ec_ctx->delta_q_cdf,
352 DELTA_Q_PROBS + 1);
Thomas Daviesf6936102016-09-05 16:51:31 +0100353
354 if (!smallval) {
355 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
356 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100357 aom_write_literal(w, rem_bits - 1, 3);
Thomas Daviesf6936102016-09-05 16:51:31 +0100358 aom_write_literal(w, abs - thr, rem_bits);
Arild Fuldseth07441162016-08-15 15:07:52 +0200359 }
360 if (abs > 0) {
361 aom_write_bit(w, sign);
362 }
363}
Thomas Daviesf6936102016-09-05 16:51:31 +0100364
Fangwen Fu231fe422017-04-24 17:52:29 -0700365#if CONFIG_EXT_DELTA_Q
366static void write_delta_lflevel(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Cheng Chena97394f2017-09-27 15:05:14 -0700367#if CONFIG_LOOPFILTER_LEVEL
368 int lf_id,
369#endif
Fangwen Fu231fe422017-04-24 17:52:29 -0700370 int delta_lflevel, aom_writer *w) {
371 int sign = delta_lflevel < 0;
372 int abs = sign ? -delta_lflevel : delta_lflevel;
373 int rem_bits, thr;
374 int smallval = abs < DELTA_LF_SMALL ? 1 : 0;
Fangwen Fu231fe422017-04-24 17:52:29 -0700375 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
376 (void)cm;
Fangwen Fu231fe422017-04-24 17:52:29 -0700377
Cheng Chena97394f2017-09-27 15:05:14 -0700378#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -0700379 if (cm->delta_lf_multi) {
380 assert(lf_id >= 0 && lf_id < FRAME_LF_COUNT);
381 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL),
382 ec_ctx->delta_lf_multi_cdf[lf_id], DELTA_LF_PROBS + 1);
383 } else {
384 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
385 DELTA_LF_PROBS + 1);
386 }
Cheng Chena97394f2017-09-27 15:05:14 -0700387#else
Fangwen Fu231fe422017-04-24 17:52:29 -0700388 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
389 DELTA_LF_PROBS + 1);
Cheng Chena97394f2017-09-27 15:05:14 -0700390#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -0700391
392 if (!smallval) {
393 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
394 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100395 aom_write_literal(w, rem_bits - 1, 3);
Fangwen Fu231fe422017-04-24 17:52:29 -0700396 aom_write_literal(w, abs - thr, rem_bits);
397 }
398 if (abs > 0) {
399 aom_write_bit(w, sign);
400 }
401}
Fangwen Fu231fe422017-04-24 17:52:29 -0700402#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +0200403
Sarah Parker99e7daa2017-08-29 10:30:13 -0700404static void pack_map_tokens(aom_writer *w, const TOKENEXTRA **tp, int n,
405 int num) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700406 const TOKENEXTRA *p = *tp;
hui su40b9e7f2017-07-13 18:15:56 -0700407 write_uniform(w, n, p->token); // The first color index.
408 ++p;
409 --num;
410 for (int i = 0; i < num; ++i) {
Sarah Parker0cf4d9f2017-08-18 13:09:14 -0700411 aom_write_symbol(w, p->token, p->color_map_cdf, n);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700412 ++p;
413 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700414 *tp = p;
415}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700416
James Zern35545dd2017-08-25 18:48:02 -0700417#if !CONFIG_LV_MAP
Thomas Davies9b8393f2017-03-21 11:04:08 +0000418static INLINE void write_coeff_extra(const aom_cdf_prob *const *cdf, int val,
419 int n, aom_writer *w) {
420 // Code the extra bits from LSB to MSB in groups of 4
421 int i = 0;
422 int count = 0;
423 while (count < n) {
424 const int size = AOMMIN(n - count, 4);
425 const int mask = (1 << size) - 1;
426 aom_write_cdf(w, val & mask, cdf[i++], 1 << size);
427 val >>= size;
428 count += size;
429 }
430}
Thomas Davies9b8393f2017-03-21 11:04:08 +0000431
Yaowu Xuf883b422016-08-30 14:01:10 -0700432static void pack_mb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700433 const TOKENEXTRA *const stop,
Angie Chiangd4022822016-11-02 18:30:25 -0700434 aom_bit_depth_t bit_depth, const TX_SIZE tx_size,
435 TOKEN_STATS *token_stats) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700436 const TOKENEXTRA *p = *tp;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700437 int count = 0;
Urvang Joshi80893152017-10-27 11:51:14 -0700438 const int seg_eob = av1_get_max_eob(tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700439
440 while (p < stop && p->token != EOSB_TOKEN) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700441 const int token = p->token;
Yaowu Xuabe52152017-10-20 14:37:54 -0700442 const int8_t eob_val = p->eob_val;
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000443 if (token == BLOCK_Z_TOKEN) {
Thomas Davies1c05c632017-03-15 09:58:34 +0000444 aom_write_symbol(w, 0, *p->head_cdf, HEAD_TOKENS + 1);
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000445 p++;
Jingning Han0481e8b2017-05-27 08:10:17 -0700446 break;
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000447 continue;
448 }
Yaowu Xuc8ab0bc2017-04-11 21:53:22 -0700449
450 const av1_extra_bit *const extra_bits = &av1_extra_bits[token];
Jingning Han24b15c92017-05-17 15:56:48 -0700451 if (eob_val == LAST_EOB) {
Thomas Davies04bdd522017-03-13 22:34:14 +0000452 // Just code a flag indicating whether the value is >1 or 1.
453 aom_write_bit(w, token != ONE_TOKEN);
454 } else {
Jingning Han24b15c92017-05-17 15:56:48 -0700455 int comb_symb = 2 * AOMMIN(token, TWO_TOKEN) - eob_val + p->first_val;
Thomas Davies1c05c632017-03-15 09:58:34 +0000456 aom_write_symbol(w, comb_symb, *p->head_cdf, HEAD_TOKENS + p->first_val);
Thomas Davies04bdd522017-03-13 22:34:14 +0000457 }
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000458 if (token > ONE_TOKEN) {
Thomas Davies1c05c632017-03-15 09:58:34 +0000459 aom_write_symbol(w, token - TWO_TOKEN, *p->tail_cdf, TAIL_TOKENS);
Alex Conversedc62b092016-10-11 16:50:56 -0700460 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700461
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700462 if (extra_bits->base_val) {
463 const int bit_string = p->extra;
464 const int bit_string_length = extra_bits->len; // Length of extra bits to
Thomas Davies9b8393f2017-03-21 11:04:08 +0000465 const int is_cat6 = (extra_bits->base_val == CAT6_MIN_VAL);
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000466 // be written excluding
467 // the sign bit.
Thomas Davies9b8393f2017-03-21 11:04:08 +0000468 int skip_bits = is_cat6
Alex Converseda3d94f2017-03-15 14:54:29 -0700469 ? (int)sizeof(av1_cat6_prob) -
470 av1_get_cat6_extrabits_size(tx_size, bit_depth)
471 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700472
Thomas Davies9b8393f2017-03-21 11:04:08 +0000473 assert(!(bit_string >> (bit_string_length - skip_bits + 1)));
474 if (bit_string_length > 0)
Thomas Davies9b8393f2017-03-21 11:04:08 +0000475 write_coeff_extra(extra_bits->cdf, bit_string >> 1,
476 bit_string_length - skip_bits, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700477
Angie Chiangd4022822016-11-02 18:30:25 -0700478 aom_write_bit_record(w, bit_string & 1, token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700479 }
480 ++p;
481
Yaowu Xuc27fc142016-08-22 16:08:15 -0700482 ++count;
Jingning Han24b15c92017-05-17 15:56:48 -0700483 if (eob_val == EARLY_EOB || count == seg_eob) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700484 }
485
486 *tp = p;
487}
Angie Chiangc8af6112017-03-16 16:11:22 -0700488#endif // !CONFIG_LV_MAP
Yushin Cho258a0242017-03-06 13:53:01 -0800489
Jingning Han4fe5f672017-05-19 15:46:07 -0700490#if CONFIG_LV_MAP
Jingning Hana2285692017-10-25 15:14:31 -0700491static void pack_txb_tokens(aom_writer *w, AV1_COMMON *cm, MACROBLOCK *const x,
Jingning Han4fe5f672017-05-19 15:46:07 -0700492 const TOKENEXTRA **tp,
Jingning Hana2285692017-10-25 15:14:31 -0700493 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
494 MB_MODE_INFO *mbmi, int plane,
Jingning Han4fe5f672017-05-19 15:46:07 -0700495 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
496 int block, int blk_row, int blk_col,
497 TX_SIZE tx_size, TOKEN_STATS *token_stats) {
498 const struct macroblockd_plane *const pd = &xd->plane[plane];
499 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
500 const int tx_row = blk_row >> (1 - pd->subsampling_y);
501 const int tx_col = blk_col >> (1 - pd->subsampling_x);
502 TX_SIZE plane_tx_size;
503 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
504 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
505
506 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
507
508 plane_tx_size =
509 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
510 : mbmi->inter_tx_size[tx_row][tx_col];
511
Debargha Mukherjee891a8772017-11-22 10:09:37 -0800512 if (tx_size == plane_tx_size
513#if DISABLE_VARTX_FOR_CHROMA
514 || pd->subsampling_x || pd->subsampling_y
515#endif // DISABLE_VARTX_FOR_CHROMA
516 ) {
Jingning Han4fe5f672017-05-19 15:46:07 -0700517 TOKEN_STATS tmp_token_stats;
518 init_token_stats(&tmp_token_stats);
519
Jingning Han4fe5f672017-05-19 15:46:07 -0700520 tran_low_t *tcoeff = BLOCK_OFFSET(x->mbmi_ext->tcoeff[plane], block);
521 uint16_t eob = x->mbmi_ext->eobs[plane][block];
522 TXB_CTX txb_ctx = { x->mbmi_ext->txb_skip_ctx[plane][block],
523 x->mbmi_ext->dc_sign_ctx[plane][block] };
Luc Trudeau2eb9b842017-12-13 11:19:16 -0500524 av1_write_coeffs_txb(cm, xd, w, blk_row, blk_col, plane, tx_size, tcoeff,
525 eob, &txb_ctx);
Jingning Han4fe5f672017-05-19 15:46:07 -0700526#if CONFIG_RD_DEBUG
527 token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
528 token_stats->cost += tmp_token_stats.cost;
529#endif
530 } else {
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800531 const TX_SIZE sub_txs = sub_tx_size_map[1][tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700532 const int bsw = tx_size_wide_unit[sub_txs];
533 const int bsh = tx_size_high_unit[sub_txs];
Jingning Han4fe5f672017-05-19 15:46:07 -0700534
Yue Chen0797a202017-10-27 17:24:56 -0700535 assert(bsw > 0 && bsh > 0);
Jingning Han4fe5f672017-05-19 15:46:07 -0700536
Yue Chen0797a202017-10-27 17:24:56 -0700537 for (int r = 0; r < tx_size_high_unit[tx_size]; r += bsh) {
538 for (int c = 0; c < tx_size_wide_unit[tx_size]; c += bsw) {
539 const int offsetr = blk_row + r;
540 const int offsetc = blk_col + c;
541 const int step = bsh * bsw;
Jingning Han4fe5f672017-05-19 15:46:07 -0700542
Yue Chen0797a202017-10-27 17:24:56 -0700543 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
Jingning Han4fe5f672017-05-19 15:46:07 -0700544
Yue Chen0797a202017-10-27 17:24:56 -0700545 pack_txb_tokens(w, cm, x, tp, tok_end, xd, mbmi, plane, plane_bsize,
546 bit_depth, block, offsetr, offsetc, sub_txs,
547 token_stats);
548 block += step;
549 }
Jingning Han4fe5f672017-05-19 15:46:07 -0700550 }
551 }
552}
553#else // CONFIG_LV_MAP
Yaowu Xuf883b422016-08-30 14:01:10 -0700554static void pack_txb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yushin Chod0b77ac2017-10-20 17:33:16 -0700555 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
556 MB_MODE_INFO *mbmi, int plane,
Yaowu Xuf883b422016-08-30 14:01:10 -0700557 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700558 int block, int blk_row, int blk_col,
Angie Chiangd4022822016-11-02 18:30:25 -0700559 TX_SIZE tx_size, TOKEN_STATS *token_stats) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700560 const struct macroblockd_plane *const pd = &xd->plane[plane];
561 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
562 const int tx_row = blk_row >> (1 - pd->subsampling_y);
563 const int tx_col = blk_col >> (1 - pd->subsampling_x);
564 TX_SIZE plane_tx_size;
Jingning Hanf65b8702016-10-31 12:13:20 -0700565 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
566 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700567
568 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
569
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700570 plane_tx_size =
571 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
572 : mbmi->inter_tx_size[tx_row][tx_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700573
Debargha Mukherjee891a8772017-11-22 10:09:37 -0800574 if (tx_size == plane_tx_size
575#if DISABLE_VARTX_FOR_CHROMA
576 || pd->subsampling_x || pd->subsampling_y
577#endif // DISABLE_VARTX_FOR_CHROMA
578 ) {
Angie Chiangd02001d2016-11-06 15:31:49 -0800579 TOKEN_STATS tmp_token_stats;
580 init_token_stats(&tmp_token_stats);
Sebastien Alaiwan9f001f32017-11-28 16:32:33 +0100581 pack_mb_tokens(w, tp, tok_end, bit_depth, tx_size, &tmp_token_stats);
Angie Chiangd02001d2016-11-06 15:31:49 -0800582#if CONFIG_RD_DEBUG
583 token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
584 token_stats->cost += tmp_token_stats.cost;
585#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700586 } else {
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800587 const TX_SIZE sub_txs = sub_tx_size_map[1][tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700588 const int bsw = tx_size_wide_unit[sub_txs];
589 const int bsh = tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700590
Yue Chen0797a202017-10-27 17:24:56 -0700591 assert(bsw > 0 && bsh > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700592
Yue Chen0797a202017-10-27 17:24:56 -0700593 for (int r = 0; r < tx_size_high_unit[tx_size]; r += bsh) {
594 for (int c = 0; c < tx_size_wide_unit[tx_size]; c += bsw) {
595 const int offsetr = blk_row + r;
596 const int offsetc = blk_col + c;
597 const int step = bsh * bsw;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700598
Yue Chen0797a202017-10-27 17:24:56 -0700599 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700600
Yue Chen0797a202017-10-27 17:24:56 -0700601 pack_txb_tokens(w, tp, tok_end, xd, mbmi, plane, plane_bsize, bit_depth,
602 block, offsetr, offsetc, sub_txs, token_stats);
603 block += step;
604 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700605 }
606 }
607}
Jingning Han4fe5f672017-05-19 15:46:07 -0700608#endif // CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700609
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000610#if CONFIG_SPATIAL_SEGMENTATION
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100611static int neg_interleave(int x, int ref, int max) {
612 const int diff = x - ref;
613 if (!ref) return x;
614 if (ref >= (max - 1)) return -diff;
615 if (2 * ref < max) {
616 if (abs(diff) <= ref) {
617 if (diff > 0)
618 return (diff << 1) - 1;
619 else
620 return ((-diff) << 1);
621 }
622 return x;
623 } else {
624 if (abs(diff) < (max - ref)) {
625 if (diff > 0)
626 return (diff << 1) - 1;
627 else
628 return ((-diff) << 1);
629 }
630 return (max - x) - 1;
631 }
632}
633
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000634static void write_segment_id(AV1_COMP *cpi, const MB_MODE_INFO *const mbmi,
635 aom_writer *w, const struct segmentation *seg,
636 struct segmentation_probs *segp, int mi_row,
637 int mi_col, int skip) {
638 AV1_COMMON *const cm = &cpi->common;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100639 int prev_ul = 0; /* Top left segment_id */
640 int prev_l = 0; /* Current left segment_id */
641 int prev_u = 0; /* Current top segment_id */
642
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000643 if (!seg->enabled || !seg->update_map) return;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100644
645 MODE_INFO *const mi = cm->mi + mi_row * cm->mi_stride + mi_col;
646 int tinfo = mi->mbmi.boundary_info;
647 int above = (!(tinfo & TILE_ABOVE_BOUNDARY)) && ((mi_row - 1) >= 0);
648 int left = (!(tinfo & TILE_LEFT_BOUNDARY)) && ((mi_col - 1) >= 0);
649
650 if (above && left)
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000651 prev_ul = get_segment_id(cm, cm->current_frame_seg_map, BLOCK_4X4,
652 mi_row - 1, mi_col - 1);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100653
654 if (above)
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000655 prev_u = get_segment_id(cm, cm->current_frame_seg_map, BLOCK_4X4,
656 mi_row - 1, mi_col - 0);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100657
658 if (left)
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000659 prev_l = get_segment_id(cm, cm->current_frame_seg_map, BLOCK_4X4,
660 mi_row - 0, mi_col - 1);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100661
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000662 int cdf_num = pick_spatial_seg_cdf(prev_ul, prev_u, prev_l);
663 int pred = pick_spatial_seg_pred(prev_ul, prev_u, prev_l);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100664
665 if (skip) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000666 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type, mi_row,
667 mi_col, pred);
668 set_spatial_segment_id(cm, cpi->segmentation_map, mbmi->sb_type, mi_row,
669 mi_col, pred);
670 /* mbmi is read only but we need to update segment_id */
671 ((MB_MODE_INFO *)mbmi)->segment_id = pred;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100672 return;
673 }
674
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000675 int coded_id =
676 neg_interleave(mbmi->segment_id, pred, cm->last_active_segid + 1);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100677
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000678 aom_cdf_prob *pred_cdf = segp->spatial_pred_seg_cdf[cdf_num];
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100679 aom_write_symbol(w, coded_id, pred_cdf, 8);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100680
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000681 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type, mi_row,
682 mi_col, mbmi->segment_id);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100683}
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000684#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700685static void write_segment_id(aom_writer *w, const struct segmentation *seg,
Thomas9ac55082016-09-23 18:04:17 +0100686 struct segmentation_probs *segp, int segment_id) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400687 if (seg->enabled && seg->update_map) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400688 aom_write_symbol(w, segment_id, segp->tree_cdf, MAX_SEGMENTS);
Nathan E. Eggef627e582016-08-19 20:06:51 -0400689 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700690}
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000691#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700692
Thomas Davies315f5782017-06-14 15:14:55 +0100693#define WRITE_REF_BIT(bname, pname) \
Thomas Davies894cc812017-06-22 17:51:33 +0100694 aom_write_symbol(w, bname, av1_get_pred_cdf_##pname(cm, xd), 2)
Thomas Davies0fbd2b72017-09-12 10:49:45 +0100695#define WRITE_REF_BIT2(bname, pname) \
696 aom_write_symbol(w, bname, av1_get_pred_cdf_##pname(xd), 2)
Thomas Davies315f5782017-06-14 15:14:55 +0100697
Yaowu Xuc27fc142016-08-22 16:08:15 -0700698// This function encodes the reference frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700699static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd,
700 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700701 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
702 const int is_compound = has_second_ref(mbmi);
703 const int segment_id = mbmi->segment_id;
704
705 // If segment level coding of this signal is disabled...
706 // or the segment allows multiple reference frame options
707 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
708 assert(!is_compound);
709 assert(mbmi->ref_frame[0] ==
710 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
David Barkerd92f3562017-10-09 17:46:23 +0100711 }
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700712#if CONFIG_SEGMENT_GLOBALMV
David Barkerd92f3562017-10-09 17:46:23 +0100713 else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP) ||
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700714 segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV))
David Barkerd92f3562017-10-09 17:46:23 +0100715#else
716 else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP))
717#endif
718 {
719 assert(!is_compound);
720 assert(mbmi->ref_frame[0] == LAST_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700721 } else {
722 // does the feature use compound prediction or not
723 // (if not specified at the frame/segment level)
724 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Debargha Mukherjee0f248c42017-09-07 12:40:18 -0700725 if (is_comp_ref_allowed(mbmi->sb_type))
Thomas Davies860def62017-06-14 10:00:03 +0100726 aom_write_symbol(w, is_compound, av1_get_reference_mode_cdf(cm, xd), 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700727 } else {
728 assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE));
729 }
730
731 if (is_compound) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700732#if CONFIG_EXT_COMP_REFS
733 const COMP_REFERENCE_TYPE comp_ref_type = has_uni_comp_refs(mbmi)
734 ? UNIDIR_COMP_REFERENCE
735 : BIDIR_COMP_REFERENCE;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100736 aom_write_symbol(w, comp_ref_type, av1_get_comp_reference_type_cdf(xd),
737 2);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700738
739 if (comp_ref_type == UNIDIR_COMP_REFERENCE) {
740 const int bit = mbmi->ref_frame[0] == BWDREF_FRAME;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100741 WRITE_REF_BIT2(bit, uni_comp_ref_p);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700742
Zoe Liuc082bbc2017-05-17 13:31:37 -0700743 if (!bit) {
Zoe Liufcf5fa22017-06-26 16:00:38 -0700744 assert(mbmi->ref_frame[0] == LAST_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100745 const int bit1 = mbmi->ref_frame[1] == LAST3_FRAME ||
746 mbmi->ref_frame[1] == GOLDEN_FRAME;
747 WRITE_REF_BIT2(bit1, uni_comp_ref_p1);
748 if (bit1) {
749 const int bit2 = mbmi->ref_frame[1] == GOLDEN_FRAME;
750 WRITE_REF_BIT2(bit2, uni_comp_ref_p2);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700751 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700752 } else {
753 assert(mbmi->ref_frame[1] == ALTREF_FRAME);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700754 }
755
756 return;
757 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700758
759 assert(comp_ref_type == BIDIR_COMP_REFERENCE);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700760#endif // CONFIG_EXT_COMP_REFS
761
Yaowu Xuc27fc142016-08-22 16:08:15 -0700762 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
763 mbmi->ref_frame[0] == LAST3_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100764 WRITE_REF_BIT(bit, comp_ref_p);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700765
Yaowu Xuc27fc142016-08-22 16:08:15 -0700766 if (!bit) {
Zoe Liu87818282017-11-26 17:09:59 -0800767 const int bit1 = mbmi->ref_frame[0] == LAST2_FRAME;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100768 WRITE_REF_BIT(bit1, comp_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700769 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100770 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
771 WRITE_REF_BIT(bit2, comp_ref_p2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700772 }
Zoe Liu7b1ec7a2017-05-24 22:28:24 -0700773
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100774 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
775 WRITE_REF_BIT(bit_bwd, comp_bwdref_p);
Zoe Liue9b15e22017-07-19 15:53:01 -0700776
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100777 if (!bit_bwd) {
778 WRITE_REF_BIT(mbmi->ref_frame[1] == ALTREF2_FRAME, comp_bwdref_p1);
Zoe Liue9b15e22017-07-19 15:53:01 -0700779 }
Zoe Liue9b15e22017-07-19 15:53:01 -0700780
Yaowu Xuc27fc142016-08-22 16:08:15 -0700781 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -0700782 const int bit0 = (mbmi->ref_frame[0] <= ALTREF_FRAME &&
783 mbmi->ref_frame[0] >= BWDREF_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100784 WRITE_REF_BIT(bit0, single_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700785
786 if (bit0) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100787 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
788 WRITE_REF_BIT(bit1, single_ref_p2);
Zoe Liue9b15e22017-07-19 15:53:01 -0700789
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100790 if (!bit1) {
791 WRITE_REF_BIT(mbmi->ref_frame[0] == ALTREF2_FRAME, single_ref_p6);
Zoe Liue9b15e22017-07-19 15:53:01 -0700792 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700793 } else {
794 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
795 mbmi->ref_frame[0] == GOLDEN_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100796 WRITE_REF_BIT(bit2, single_ref_p3);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700797
798 if (!bit2) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100799 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
800 WRITE_REF_BIT(bit3, single_ref_p4);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700801 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100802 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
803 WRITE_REF_BIT(bit4, single_ref_p5);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700804 }
805 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700806 }
807 }
808}
809
hui su5db97432016-10-14 16:10:14 -0700810#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -0800811static void write_filter_intra_mode_info(const MACROBLOCKD *xd,
hui su5db97432016-10-14 16:10:14 -0700812 const MB_MODE_INFO *const mbmi,
813 aom_writer *w) {
Yue Chen95e13e22017-11-01 23:56:35 -0700814 if (mbmi->mode == DC_PRED && mbmi->palette_mode_info.palette_size[0] == 0 &&
815 av1_filter_intra_allowed_txsize(mbmi->tx_size)) {
Yue Chen4eba69b2017-11-09 22:37:35 -0800816 aom_write_symbol(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[0],
817 xd->tile_ctx->filter_intra_cdfs[mbmi->tx_size], 2);
hui su5db97432016-10-14 16:10:14 -0700818 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[0]) {
819 const FILTER_INTRA_MODE mode =
820 mbmi->filter_intra_mode_info.filter_intra_mode[0];
Yue Chen63ce36f2017-10-10 23:37:31 -0700821 aom_write_symbol(w, mode, xd->tile_ctx->filter_intra_mode_cdf[0],
822 FILTER_INTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700823 }
824 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700825}
hui su5db97432016-10-14 16:10:14 -0700826#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -0700827
hui su5db97432016-10-14 16:10:14 -0700828#if CONFIG_EXT_INTRA
Joe Young3ca43bf2017-10-06 15:12:46 -0700829static void write_intra_angle_info(const MACROBLOCKD *xd,
830 FRAME_CONTEXT *const ec_ctx, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700831 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
832 const BLOCK_SIZE bsize = mbmi->sb_type;
Joe Young830d4ce2017-05-30 17:48:13 -0700833 if (!av1_use_angle_delta(bsize)) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700834
hui su45dc5972016-12-08 17:42:50 -0800835 if (av1_is_directional_mode(mbmi->mode, bsize)) {
Joe Young3ca43bf2017-10-06 15:12:46 -0700836#if CONFIG_EXT_INTRA_MOD
837 aom_write_symbol(w, mbmi->angle_delta[0] + MAX_ANGLE_DELTA,
838 ec_ctx->angle_delta_cdf[mbmi->mode - V_PRED],
839 2 * MAX_ANGLE_DELTA + 1);
840#else
841 (void)ec_ctx;
hui su0a6731f2017-04-26 15:23:47 -0700842 write_uniform(w, 2 * MAX_ANGLE_DELTA + 1,
843 MAX_ANGLE_DELTA + mbmi->angle_delta[0]);
Joe Young3ca43bf2017-10-06 15:12:46 -0700844#endif // CONFIG_EXT_INTRA_MOD
Yaowu Xuc27fc142016-08-22 16:08:15 -0700845 }
846
Luc Trudeaud6d9eee2017-07-12 12:36:50 -0400847 if (av1_is_directional_mode(get_uv_mode(mbmi->uv_mode), bsize)) {
Joe Young3ca43bf2017-10-06 15:12:46 -0700848#if CONFIG_EXT_INTRA_MOD
849 aom_write_symbol(w, mbmi->angle_delta[1] + MAX_ANGLE_DELTA,
850 ec_ctx->angle_delta_cdf[mbmi->uv_mode - V_PRED],
851 2 * MAX_ANGLE_DELTA + 1);
852#else
hui su0a6731f2017-04-26 15:23:47 -0700853 write_uniform(w, 2 * MAX_ANGLE_DELTA + 1,
854 MAX_ANGLE_DELTA + mbmi->angle_delta[1]);
Joe Young3ca43bf2017-10-06 15:12:46 -0700855#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700856 }
857}
858#endif // CONFIG_EXT_INTRA
859
Angie Chiang5678ad92016-11-21 09:38:40 -0800860static void write_mb_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd,
861 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700862 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700863 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
Thomas Davies77c7c402017-01-11 17:58:54 +0000864 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Jingning Han203b1d32017-01-12 16:00:13 -0800865
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700866 if (!av1_is_interp_needed(xd)) {
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100867 assert(mbmi->interp_filters ==
868 av1_broadcast_interp_filter(
869 av1_unswitchable_filter(cm->interp_filter)));
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700870 return;
871 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700872 if (cm->interp_filter == SWITCHABLE) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700873#if CONFIG_DUAL_FILTER
Jingning Han203b1d32017-01-12 16:00:13 -0800874 int dir;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700875 for (dir = 0; dir < 2; ++dir) {
876 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
877 (mbmi->ref_frame[1] > INTRA_FRAME &&
878 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700879 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100880 InterpFilter filter =
881 av1_extract_interp_filter(mbmi->interp_filters, dir);
882 aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
Angie Chiangb9b42a02017-01-20 12:47:36 -0800883 SWITCHABLE_FILTERS);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100884 ++cpi->interp_filter_selected[0][filter];
Angie Chiang38edf682017-02-21 15:13:09 -0800885 } else {
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100886 assert(av1_extract_interp_filter(mbmi->interp_filters, dir) ==
887 EIGHTTAP_REGULAR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700888 }
889 }
890#else
891 {
Yaowu Xuf883b422016-08-30 14:01:10 -0700892 const int ctx = av1_get_pred_context_switchable_interp(xd);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100893 InterpFilter filter = av1_extract_interp_filter(mbmi->interp_filters, 0);
894 aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
895 SWITCHABLE_FILTERS);
896 ++cpi->interp_filter_selected[0][filter];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700897 }
Jingning Han203b1d32017-01-12 16:00:13 -0800898#endif // CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -0700899 }
900}
901
hui su33567b22017-04-30 16:40:19 -0700902// Transmit color values with delta encoding. Write the first value as
903// literal, and the deltas between each value and the previous one. "min_val" is
904// the smallest possible value of the deltas.
905static void delta_encode_palette_colors(const int *colors, int num,
906 int bit_depth, int min_val,
907 aom_writer *w) {
908 if (num <= 0) return;
hui sufa4ff852017-05-15 12:20:50 -0700909 assert(colors[0] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700910 aom_write_literal(w, colors[0], bit_depth);
911 if (num == 1) return;
912 int max_delta = 0;
913 int deltas[PALETTE_MAX_SIZE];
914 memset(deltas, 0, sizeof(deltas));
915 for (int i = 1; i < num; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700916 assert(colors[i] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700917 const int delta = colors[i] - colors[i - 1];
918 deltas[i - 1] = delta;
919 assert(delta >= min_val);
920 if (delta > max_delta) max_delta = delta;
921 }
922 const int min_bits = bit_depth - 3;
923 int bits = AOMMAX(av1_ceil_log2(max_delta + 1 - min_val), min_bits);
hui sufa4ff852017-05-15 12:20:50 -0700924 assert(bits <= bit_depth);
hui su33567b22017-04-30 16:40:19 -0700925 int range = (1 << bit_depth) - colors[0] - min_val;
hui sud13c24a2017-04-07 16:13:07 -0700926 aom_write_literal(w, bits - min_bits, 2);
hui su33567b22017-04-30 16:40:19 -0700927 for (int i = 0; i < num - 1; ++i) {
928 aom_write_literal(w, deltas[i] - min_val, bits);
929 range -= deltas[i];
930 bits = AOMMIN(bits, av1_ceil_log2(range));
hui sud13c24a2017-04-07 16:13:07 -0700931 }
932}
933
hui su33567b22017-04-30 16:40:19 -0700934// Transmit luma palette color values. First signal if each color in the color
935// cache is used. Those colors that are not in the cache are transmitted with
936// delta encoding.
937static void write_palette_colors_y(const MACROBLOCKD *const xd,
938 const PALETTE_MODE_INFO *const pmi,
939 int bit_depth, aom_writer *w) {
940 const int n = pmi->palette_size[0];
hui su33567b22017-04-30 16:40:19 -0700941 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700942 const int n_cache = av1_get_palette_cache(xd, 0, color_cache);
hui su33567b22017-04-30 16:40:19 -0700943 int out_cache_colors[PALETTE_MAX_SIZE];
944 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
945 const int n_out_cache =
946 av1_index_color_cache(color_cache, n_cache, pmi->palette_colors, n,
947 cache_color_found, out_cache_colors);
948 int n_in_cache = 0;
949 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
950 const int found = cache_color_found[i];
951 aom_write_bit(w, found);
952 n_in_cache += found;
953 }
954 assert(n_in_cache + n_out_cache == n);
955 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 1, w);
956}
957
958// Write chroma palette color values. U channel is handled similarly to the luma
959// channel. For v channel, either use delta encoding or transmit raw values
960// directly, whichever costs less.
961static void write_palette_colors_uv(const MACROBLOCKD *const xd,
962 const PALETTE_MODE_INFO *const pmi,
hui sud13c24a2017-04-07 16:13:07 -0700963 int bit_depth, aom_writer *w) {
hui sud13c24a2017-04-07 16:13:07 -0700964 const int n = pmi->palette_size[1];
hui sud13c24a2017-04-07 16:13:07 -0700965 const uint16_t *colors_u = pmi->palette_colors + PALETTE_MAX_SIZE;
966 const uint16_t *colors_v = pmi->palette_colors + 2 * PALETTE_MAX_SIZE;
hui sud13c24a2017-04-07 16:13:07 -0700967 // U channel colors.
hui su33567b22017-04-30 16:40:19 -0700968 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700969 const int n_cache = av1_get_palette_cache(xd, 1, color_cache);
hui su33567b22017-04-30 16:40:19 -0700970 int out_cache_colors[PALETTE_MAX_SIZE];
971 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
972 const int n_out_cache = av1_index_color_cache(
973 color_cache, n_cache, colors_u, n, cache_color_found, out_cache_colors);
974 int n_in_cache = 0;
975 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
976 const int found = cache_color_found[i];
977 aom_write_bit(w, found);
978 n_in_cache += found;
hui sud13c24a2017-04-07 16:13:07 -0700979 }
hui su33567b22017-04-30 16:40:19 -0700980 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 0, w);
981
982 // V channel colors. Don't use color cache as the colors are not sorted.
hui sud13c24a2017-04-07 16:13:07 -0700983 const int max_val = 1 << bit_depth;
984 int zero_count = 0, min_bits_v = 0;
985 int bits_v =
986 av1_get_palette_delta_bits_v(pmi, bit_depth, &zero_count, &min_bits_v);
987 const int rate_using_delta =
988 2 + bit_depth + (bits_v + 1) * (n - 1) - zero_count;
989 const int rate_using_raw = bit_depth * n;
990 if (rate_using_delta < rate_using_raw) { // delta encoding
hui sufa4ff852017-05-15 12:20:50 -0700991 assert(colors_v[0] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700992 aom_write_bit(w, 1);
993 aom_write_literal(w, bits_v - min_bits_v, 2);
994 aom_write_literal(w, colors_v[0], bit_depth);
hui su33567b22017-04-30 16:40:19 -0700995 for (int i = 1; i < n; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700996 assert(colors_v[i] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700997 if (colors_v[i] == colors_v[i - 1]) { // No need to signal sign bit.
998 aom_write_literal(w, 0, bits_v);
999 continue;
1000 }
1001 const int delta = abs((int)colors_v[i] - colors_v[i - 1]);
1002 const int sign_bit = colors_v[i] < colors_v[i - 1];
1003 if (delta <= max_val - delta) {
1004 aom_write_literal(w, delta, bits_v);
1005 aom_write_bit(w, sign_bit);
1006 } else {
1007 aom_write_literal(w, max_val - delta, bits_v);
1008 aom_write_bit(w, !sign_bit);
1009 }
1010 }
1011 } else { // Transmit raw values.
1012 aom_write_bit(w, 0);
hui sufa4ff852017-05-15 12:20:50 -07001013 for (int i = 0; i < n; ++i) {
1014 assert(colors_v[i] < (1 << bit_depth));
1015 aom_write_literal(w, colors_v[i], bit_depth);
1016 }
hui sud13c24a2017-04-07 16:13:07 -07001017 }
1018}
hui sud13c24a2017-04-07 16:13:07 -07001019
Yaowu Xuf883b422016-08-30 14:01:10 -07001020static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1021 const MODE_INFO *const mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001022 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1023 const MODE_INFO *const above_mi = xd->above_mi;
1024 const MODE_INFO *const left_mi = xd->left_mi;
1025 const BLOCK_SIZE bsize = mbmi->sb_type;
Hui Su473cf892017-11-08 18:14:31 -08001026 assert(av1_allow_palette(cm->allow_screen_content_tools, bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001027 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
Rupert Swarbrick6f9cd942017-08-02 15:57:18 +01001028 const int block_palette_idx = bsize - BLOCK_8X8;
1029
Yaowu Xuc27fc142016-08-22 16:08:15 -07001030 if (mbmi->mode == DC_PRED) {
Urvang Joshi23a61112017-01-30 14:59:27 -08001031 const int n = pmi->palette_size[0];
1032 int palette_y_mode_ctx = 0;
hui su40b9e7f2017-07-13 18:15:56 -07001033 if (above_mi) {
Urvang Joshi23a61112017-01-30 14:59:27 -08001034 palette_y_mode_ctx +=
1035 (above_mi->mbmi.palette_mode_info.palette_size[0] > 0);
hui su40b9e7f2017-07-13 18:15:56 -07001036 }
1037 if (left_mi) {
Urvang Joshi23a61112017-01-30 14:59:27 -08001038 palette_y_mode_ctx +=
1039 (left_mi->mbmi.palette_mode_info.palette_size[0] > 0);
hui su40b9e7f2017-07-13 18:15:56 -07001040 }
Thomas Davies59f92312017-08-23 00:33:12 +01001041 aom_write_symbol(
1042 w, n > 0,
1043 xd->tile_ctx->palette_y_mode_cdf[block_palette_idx][palette_y_mode_ctx],
1044 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001045 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +01001046 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Rupert Swarbrick6f9cd942017-08-02 15:57:18 +01001047 xd->tile_ctx->palette_y_size_cdf[block_palette_idx],
Thomas Daviesce7272d2017-07-04 16:11:08 +01001048 PALETTE_SIZES);
hui su33567b22017-04-30 16:40:19 -07001049 write_palette_colors_y(xd, pmi, cm->bit_depth, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001050 }
1051 }
1052
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001053 const int uv_dc_pred =
1054#if CONFIG_MONO_VIDEO
Imdad Sardharwalla317002f2017-12-05 16:24:56 +00001055 av1_num_planes(cm) > 1 &&
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001056#endif
1057 mbmi->uv_mode == UV_DC_PRED;
1058 if (uv_dc_pred) {
Urvang Joshi23a61112017-01-30 14:59:27 -08001059 const int n = pmi->palette_size[1];
1060 const int palette_uv_mode_ctx = (pmi->palette_size[0] > 0);
Thomas Davies59f92312017-08-23 00:33:12 +01001061 aom_write_symbol(w, n > 0,
1062 xd->tile_ctx->palette_uv_mode_cdf[palette_uv_mode_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001063 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +01001064 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Rupert Swarbrick6f9cd942017-08-02 15:57:18 +01001065 xd->tile_ctx->palette_uv_size_cdf[block_palette_idx],
Thomas Daviesce7272d2017-07-04 16:11:08 +01001066 PALETTE_SIZES);
hui su33567b22017-04-30 16:40:19 -07001067 write_palette_colors_uv(xd, pmi, cm->bit_depth, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001068 }
1069 }
1070}
1071
Angie Chiangc31ea682017-04-13 16:20:54 -07001072void av1_write_tx_type(const AV1_COMMON *const cm, const MACROBLOCKD *xd,
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001073#if CONFIG_TXK_SEL
Luc Trudeau2eb9b842017-12-13 11:19:16 -05001074 int blk_row, int blk_col, int plane, TX_SIZE tx_size,
Angie Chiangc31ea682017-04-13 16:20:54 -07001075#endif
1076 aom_writer *w) {
1077 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Jingning Han2a4da942016-11-03 18:31:30 -07001078 const int is_inter = is_inter_block(mbmi);
Jingning Han243b66b2017-06-23 12:11:47 -07001079#if !CONFIG_TXK_SEL
Debargha Mukherjee5577bd12017-11-20 16:04:26 -08001080 const TX_SIZE mtx_size =
1081 get_max_rect_tx_size(xd->mi[0]->mbmi.sb_type, is_inter);
Sarah Parker90024e42017-10-06 16:50:47 -07001082 const TX_SIZE tx_size =
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -08001083 is_inter ? AOMMAX(sub_tx_size_map[1][mtx_size], mbmi->min_tx_size)
Sarah Parker90024e42017-10-06 16:50:47 -07001084 : mbmi->tx_size;
Jingning Han243b66b2017-06-23 12:11:47 -07001085#endif // !CONFIG_TXK_SEL
Thomas Daviescef09622017-01-11 17:27:12 +00001086 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviescef09622017-01-11 17:27:12 +00001087
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001088#if !CONFIG_TXK_SEL
Angie Chiangc31ea682017-04-13 16:20:54 -07001089 TX_TYPE tx_type = mbmi->tx_type;
1090#else
1091 // Only y plane's tx_type is transmitted
Angie Chiang39b06eb2017-04-14 09:52:29 -07001092 if (plane > 0) return;
1093 PLANE_TYPE plane_type = get_plane_type(plane);
Luc Trudeau2eb9b842017-12-13 11:19:16 -05001094 TX_TYPE tx_type = av1_get_tx_type(plane_type, xd, blk_row, blk_col, tx_size);
Angie Chiangc31ea682017-04-13 16:20:54 -07001095#endif
1096
Jingning Han2a4da942016-11-03 18:31:30 -07001097 if (!FIXED_TX_TYPE) {
Rupert Swarbrickffbff572017-12-12 11:27:46 +00001098 const TX_SIZE square_tx_size = get_min_tx_size(tx_size);
Jingning Han2a4da942016-11-03 18:31:30 -07001099 const BLOCK_SIZE bsize = mbmi->sb_type;
Sarah Parkere68a3e42017-02-16 14:03:24 -08001100 if (get_ext_tx_types(tx_size, bsize, is_inter, cm->reduced_tx_set_used) >
1101 1 &&
Yue Cheneeacc4c2017-01-17 17:29:17 -08001102 ((!cm->seg.enabled && cm->base_qindex > 0) ||
1103 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
Jingning Han641b1ad2016-11-04 09:58:36 -07001104 !mbmi->skip &&
Jingning Han2a4da942016-11-03 18:31:30 -07001105 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
Hui Suddbcde22017-09-18 17:22:02 -07001106 const TxSetType tx_set_type = get_ext_tx_set_type(
1107 tx_size, bsize, is_inter, cm->reduced_tx_set_used);
Sarah Parkere68a3e42017-02-16 14:03:24 -08001108 const int eset =
1109 get_ext_tx_set(tx_size, bsize, is_inter, cm->reduced_tx_set_used);
Sarah Parker784596d2017-06-23 08:41:26 -07001110 // eset == 0 should correspond to a set with only DCT_DCT and there
1111 // is no need to send the tx_type
1112 assert(eset > 0);
Hui Suddbcde22017-09-18 17:22:02 -07001113 assert(av1_ext_tx_used[tx_set_type][tx_type]);
Jingning Han2a4da942016-11-03 18:31:30 -07001114 if (is_inter) {
Hui Suddbcde22017-09-18 17:22:02 -07001115 aom_write_symbol(w, av1_ext_tx_ind[tx_set_type][tx_type],
Sarah Parker784596d2017-06-23 08:41:26 -07001116 ec_ctx->inter_ext_tx_cdf[eset][square_tx_size],
Hui Suddbcde22017-09-18 17:22:02 -07001117 av1_num_ext_tx_set[tx_set_type]);
Jingning Han2a4da942016-11-03 18:31:30 -07001118 } else if (ALLOW_INTRA_EXT_TX) {
Yue Chen57b8ff62017-10-10 23:37:31 -07001119#if CONFIG_FILTER_INTRA
1120 PREDICTION_MODE intra_dir;
1121 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[0])
1122 intra_dir = fimode_to_intradir[mbmi->filter_intra_mode_info
1123 .filter_intra_mode[0]];
1124 else
1125 intra_dir = mbmi->mode;
1126 aom_write_symbol(
1127 w, av1_ext_tx_ind[tx_set_type][tx_type],
1128 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][intra_dir],
1129 av1_num_ext_tx_set[tx_set_type]);
1130#else
Sarah Parker784596d2017-06-23 08:41:26 -07001131 aom_write_symbol(
Hui Suddbcde22017-09-18 17:22:02 -07001132 w, av1_ext_tx_ind[tx_set_type][tx_type],
Sarah Parker784596d2017-06-23 08:41:26 -07001133 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][mbmi->mode],
Hui Suddbcde22017-09-18 17:22:02 -07001134 av1_num_ext_tx_set[tx_set_type]);
Yue Chen57b8ff62017-10-10 23:37:31 -07001135#endif
Jingning Han2a4da942016-11-03 18:31:30 -07001136 }
Lester Lu432012f2017-08-17 14:39:29 -07001137 }
Jingning Han2a4da942016-11-03 18:31:30 -07001138 }
1139}
1140
Jingning Hanf04254f2017-03-08 10:51:35 -08001141static void write_intra_mode(FRAME_CONTEXT *frame_ctx, BLOCK_SIZE bsize,
1142 PREDICTION_MODE mode, aom_writer *w) {
Hui Su814f41e2017-10-02 12:21:24 -07001143 aom_write_symbol(w, mode, frame_ctx->y_mode_cdf[size_group_lookup[bsize]],
Jingning Hanf04254f2017-03-08 10:51:35 -08001144 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -08001145}
1146
1147static void write_intra_uv_mode(FRAME_CONTEXT *frame_ctx,
Luc Trudeaud6d9eee2017-07-12 12:36:50 -04001148 UV_PREDICTION_MODE uv_mode,
1149 PREDICTION_MODE y_mode, aom_writer *w) {
Luc Trudeau6e1cd782017-06-21 13:52:36 -04001150#if !CONFIG_CFL
Hui Su814f41e2017-10-02 12:21:24 -07001151 uv_mode = get_uv_mode(uv_mode);
Luc Trudeau6e1cd782017-06-21 13:52:36 -04001152#endif
1153 aom_write_symbol(w, uv_mode, frame_ctx->uv_mode_cdf[y_mode], UV_INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -08001154}
1155
Luc Trudeauf5334002017-04-25 12:21:26 -04001156#if CONFIG_CFL
David Michael Barrf6eaa152017-07-19 19:42:28 +09001157static void write_cfl_alphas(FRAME_CONTEXT *const ec_ctx, int idx,
1158 int joint_sign, aom_writer *w) {
1159 aom_write_symbol(w, joint_sign, ec_ctx->cfl_sign_cdf, CFL_JOINT_SIGNS);
1160 // Magnitudes are only signaled for nonzero codes.
1161 if (CFL_SIGN_U(joint_sign) != CFL_SIGN_ZERO) {
1162 aom_cdf_prob *cdf_u = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_U(joint_sign)];
1163 aom_write_symbol(w, CFL_IDX_U(idx), cdf_u, CFL_ALPHABET_SIZE);
1164 }
1165 if (CFL_SIGN_V(joint_sign) != CFL_SIGN_ZERO) {
1166 aom_cdf_prob *cdf_v = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_V(joint_sign)];
1167 aom_write_symbol(w, CFL_IDX_V(idx), cdf_v, CFL_ALPHABET_SIZE);
1168 }
Luc Trudeauf5334002017-04-25 12:21:26 -04001169}
1170#endif
1171
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001172static void write_cdef(AV1_COMMON *cm, aom_writer *w, int skip, int mi_col,
1173 int mi_row) {
1174 if (cm->all_lossless) return;
1175
1176 const int m = ~((1 << (6 - MI_SIZE_LOG2)) - 1);
1177 const MB_MODE_INFO *mbmi =
1178 &cm->mi_grid_visible[(mi_row & m) * cm->mi_stride + (mi_col & m)]->mbmi;
1179 // Initialise when at top left part of the superblock
1180 if (!(mi_row & (cm->mib_size - 1)) &&
1181 !(mi_col & (cm->mib_size - 1))) { // Top left?
1182#if CONFIG_EXT_PARTITION
1183 cm->cdef_preset[0] = cm->cdef_preset[1] = cm->cdef_preset[2] =
1184 cm->cdef_preset[3] = -1;
1185#else
1186 cm->cdef_preset = -1;
1187#endif
1188 }
1189
1190// Emit CDEF param at first non-skip coding block
1191#if CONFIG_EXT_PARTITION
1192 const int mask = 1 << (6 - MI_SIZE_LOG2);
1193 const int index = cm->sb_size == BLOCK_128X128
1194 ? !!(mi_col & mask) + 2 * !!(mi_row & mask)
1195 : 0;
1196 if (cm->cdef_preset[index] == -1 && !skip) {
1197 aom_write_literal(w, mbmi->cdef_strength, cm->cdef_bits);
1198 cm->cdef_preset[index] = mbmi->cdef_strength;
1199 }
1200#else
1201 if (cm->cdef_preset == -1 && !skip) {
1202 aom_write_literal(w, mbmi->cdef_strength, cm->cdef_bits);
1203 cm->cdef_preset = mbmi->cdef_strength;
1204 }
1205#endif
1206}
1207
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001208static void write_inter_segment_id(AV1_COMP *cpi, aom_writer *w,
1209 const struct segmentation *const seg,
1210 struct segmentation_probs *const segp,
1211 int mi_row, int mi_col, int skip,
1212 int preskip) {
1213 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1214 const MODE_INFO *mi = xd->mi[0];
1215 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1216#if CONFIG_SPATIAL_SEGMENTATION
1217 AV1_COMMON *const cm = &cpi->common;
1218#else
1219 (void)mi_row;
1220 (void)mi_col;
1221 (void)skip;
1222 (void)preskip;
1223#endif
1224
1225 if (seg->update_map) {
1226#if CONFIG_SPATIAL_SEGMENTATION
1227 if (preskip) {
1228 if (!cm->preskip_segid) return;
1229 } else {
1230 if (cm->preskip_segid) return;
1231 if (skip) {
1232 int prev_segid = mbmi->segment_id;
1233 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1234
1235 if (seg->temporal_update) {
1236 const int pred_flag = mbmi->seg_id_predicted;
1237 const int pred_context = av1_get_pred_context_seg_id(xd);
1238 unsigned(*temporal_predictor_count)[2] = cm->counts.seg.pred;
1239 unsigned *t_unpred_seg_counts = cm->counts.seg.tree_mispred;
1240
1241 temporal_predictor_count[pred_context][pred_flag]--;
1242 if (!pred_flag) t_unpred_seg_counts[prev_segid]--;
1243
1244 ((MB_MODE_INFO *)mbmi)->seg_id_predicted = 0;
1245 temporal_predictor_count[pred_context][0]--;
1246 t_unpred_seg_counts[mbmi->segment_id]--;
1247 }
1248 return;
1249 }
1250 }
1251#endif
1252 if (seg->temporal_update) {
1253 const int pred_flag = mbmi->seg_id_predicted;
1254 aom_cdf_prob *pred_cdf = av1_get_pred_cdf_seg_id(segp, xd);
1255 aom_write_symbol(w, pred_flag, pred_cdf, 2);
1256 if (!pred_flag) {
1257#if CONFIG_SPATIAL_SEGMENTATION
1258 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1259#else
1260 write_segment_id(w, seg, segp, mbmi->segment_id);
1261#endif
1262 }
1263#if CONFIG_SPATIAL_SEGMENTATION
1264 if (pred_flag) {
1265 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type,
1266 mi_row, mi_col, mbmi->segment_id);
1267 }
1268#endif
1269 } else {
1270#if CONFIG_SPATIAL_SEGMENTATION
1271 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1272#else
1273 write_segment_id(w, seg, segp, mbmi->segment_id);
1274#endif
1275 }
1276 }
1277}
1278
Angie Chiangc31ea682017-04-13 16:20:54 -07001279static void pack_inter_mode_mvs(AV1_COMP *cpi, const int mi_row,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001280 const int mi_col, aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001281 AV1_COMMON *const cm = &cpi->common;
Arild Fuldseth07441162016-08-15 15:07:52 +02001282 MACROBLOCK *const x = &cpi->td.mb;
1283 MACROBLOCKD *const xd = &x->e_mbd;
Thomas Davies24523292017-01-11 16:56:47 +00001284 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Angie Chiangc31ea682017-04-13 16:20:54 -07001285 const MODE_INFO *mi = xd->mi[0];
Thomas Davies24523292017-01-11 16:56:47 +00001286
Yaowu Xuc27fc142016-08-22 16:08:15 -07001287 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001288 struct segmentation_probs *const segp = &ec_ctx->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001289 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1290 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
1291 const PREDICTION_MODE mode = mbmi->mode;
1292 const int segment_id = mbmi->segment_id;
1293 const BLOCK_SIZE bsize = mbmi->sb_type;
1294 const int allow_hp = cm->allow_high_precision_mv;
1295 const int is_inter = is_inter_block(mbmi);
1296 const int is_compound = has_second_ref(mbmi);
1297 int skip, ref;
David Barker45390c12017-02-20 14:44:40 +00001298 (void)mi_row;
1299 (void)mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001300
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001301 write_inter_segment_id(cpi, w, seg, segp, mi_row, mi_col, 0, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001302
Zoe Liuf40a9572017-10-13 12:37:19 -07001303#if CONFIG_EXT_SKIP
1304 write_skip_mode(cm, xd, segment_id, mi, w);
1305
1306 if (mbmi->skip_mode) {
1307 skip = mbmi->skip;
1308 assert(skip);
1309 } else {
1310#endif // CONFIG_EXT_SKIP
1311 skip = write_skip(cm, xd, segment_id, mi, w);
1312#if CONFIG_EXT_SKIP
1313 }
1314#endif // CONFIG_EXT_SKIP
1315
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001316#if CONFIG_SPATIAL_SEGMENTATION
1317 write_inter_segment_id(cpi, w, seg, segp, mi_row, mi_col, skip, 0);
1318#endif
Zoe Liuf40a9572017-10-13 12:37:19 -07001319
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001320 write_cdef(cm, w, skip, mi_col, mi_row);
1321
Arild Fuldseth07441162016-08-15 15:07:52 +02001322 if (cm->delta_q_present_flag) {
Pavel Frolov1dbe92d2017-11-02 01:49:19 +03001323 int super_block_upper_left = ((mi_row & (cm->mib_size - 1)) == 0) &&
1324 ((mi_col & (cm->mib_size - 1)) == 0);
Pavel Frolovbfa2b8c2017-11-01 20:08:44 +03001325 if ((bsize != cm->sb_size || skip == 0) && super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001326 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001327 int reduced_delta_qindex =
1328 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001329 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001330 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001331#if CONFIG_EXT_DELTA_Q
Cheng Chena97394f2017-09-27 15:05:14 -07001332#if CONFIG_LOOPFILTER_LEVEL
1333 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07001334 if (cm->delta_lf_multi) {
1335 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id) {
1336 int reduced_delta_lflevel =
1337 (mbmi->curr_delta_lf[lf_id] - xd->prev_delta_lf[lf_id]) /
1338 cm->delta_lf_res;
1339 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
1340 xd->prev_delta_lf[lf_id] = mbmi->curr_delta_lf[lf_id];
1341 }
1342 } else {
Cheng Chena97394f2017-09-27 15:05:14 -07001343 int reduced_delta_lflevel =
Cheng Chen880166a2017-10-02 17:48:48 -07001344 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
Cheng Chena97394f2017-09-27 15:05:14 -07001345 cm->delta_lf_res;
Cheng Chen880166a2017-10-02 17:48:48 -07001346 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
1347 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -07001348 }
1349 }
1350#else
Fangwen Fu231fe422017-04-24 17:52:29 -07001351 if (cm->delta_lf_present_flag) {
1352 int reduced_delta_lflevel =
1353 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
1354 cm->delta_lf_res;
1355 write_delta_lflevel(cm, xd, reduced_delta_lflevel, w);
1356 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
1357 }
Cheng Chena97394f2017-09-27 15:05:14 -07001358#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07001359#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001360 }
1361 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001362
Zoe Liuf40a9572017-10-13 12:37:19 -07001363#if CONFIG_EXT_SKIP
1364 if (!mbmi->skip_mode)
1365#endif // CONFIG_EXT_SKIP
1366 write_is_inter(cm, xd, mbmi->segment_id, w, is_inter);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001367
Debargha Mukherjee4def76a2017-10-19 13:38:35 -07001368 if (cm->tx_mode == TX_MODE_SELECT && block_signals_txsize(bsize) &&
Yaowu Xuc27fc142016-08-22 16:08:15 -07001369 !(is_inter && skip) && !xd->lossless[segment_id]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001370 if (is_inter) { // This implies skip flag is 0.
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001371 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, bsize, 0);
Jingning Hanf64062f2016-11-02 16:22:18 -07001372 const int bh = tx_size_high_unit[max_tx_size];
1373 const int bw = tx_size_wide_unit[max_tx_size];
Jingning Han9ca05b72017-01-03 14:41:36 -08001374 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1375 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001376 int idx, idy;
Jingning Hanfe45b212016-11-22 10:30:23 -08001377 for (idy = 0; idy < height; idy += bh)
1378 for (idx = 0; idx < width; idx += bw)
Debargha Mukherjeeedc73462017-10-31 15:13:32 -07001379 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, 0, idy, idx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001380 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001381 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001382 write_selected_tx_size(cm, xd, w);
1383 }
1384 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001385 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001386 }
1387
Zoe Liuf40a9572017-10-13 12:37:19 -07001388#if CONFIG_EXT_SKIP
Zoe Liu104d62e2017-12-07 12:44:45 -08001389 if (mbmi->skip_mode) {
1390#if CONFIG_JNT_COMP && SKIP_MODE_WITH_JNT_COMP
1391 const int cur_offset = (int)cm->frame_offset;
Zoe Liu4b847e12017-12-07 12:44:45 -08001392 int ref_offset[2];
1393 get_skip_mode_ref_offsets(cm, ref_offset);
1394 const int cur_to_ref0 = cur_offset - ref_offset[0];
1395 const int cur_to_ref1 = abs(cur_offset - ref_offset[1]);
1396 if (cur_to_ref0 != cur_to_ref1 && xd->all_one_sided_refs) {
Zoe Liu104d62e2017-12-07 12:44:45 -08001397 const int comp_index_ctx = get_comp_index_context(cm, xd);
1398 aom_write_symbol(w, mbmi->compound_idx,
1399 ec_ctx->compound_index_cdf[comp_index_ctx], 2);
1400 }
1401#endif // CONFIG_JNT_COMP && SKIP_MODE_WITH_JNT_COMP
1402 return;
1403 }
Zoe Liuf40a9572017-10-13 12:37:19 -07001404#endif // CONFIG_EXT_SKIP
1405
Yaowu Xuc27fc142016-08-22 16:08:15 -07001406 if (!is_inter) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001407 write_intra_mode(ec_ctx, bsize, mode, w);
Jingning Hand3a64432017-04-06 17:04:17 -07001408 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001409 xd->plane[1].subsampling_y)) {
Jingning Hanf04254f2017-03-08 10:51:35 -08001410 write_intra_uv_mode(ec_ctx, mbmi->uv_mode, mode, w);
Jingning Han0b7cbe62017-03-08 10:22:47 -08001411
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001412#if CONFIG_CFL
Luc Trudeau6e1cd782017-06-21 13:52:36 -04001413 if (mbmi->uv_mode == UV_CFL_PRED) {
Luc Trudeaue425f472017-12-08 14:48:46 -05001414 if (!is_cfl_allowed(mbmi)) {
Luc Trudeau4d6ea542017-11-22 21:24:42 -05001415 aom_internal_error(
1416 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1417 "Chroma from Luma (CfL) cannot be signaled for a %dx%d block.",
1418 block_size_wide[bsize], block_size_high[bsize]);
1419 }
David Michael Barr23198662017-06-19 23:19:48 +09001420 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001421 }
1422#endif
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001423 }
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001424
Yaowu Xuc27fc142016-08-22 16:08:15 -07001425#if CONFIG_EXT_INTRA
Joe Young3ca43bf2017-10-06 15:12:46 -07001426 write_intra_angle_info(xd, ec_ctx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001427#endif // CONFIG_EXT_INTRA
Hui Sue87fb232017-10-05 15:00:15 -07001428 if (av1_allow_palette(cm->allow_screen_content_tools, bsize))
Yaowu Xuc27fc142016-08-22 16:08:15 -07001429 write_palette_mode_info(cm, xd, mi, w);
hui su5db97432016-10-14 16:10:14 -07001430#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -08001431 write_filter_intra_mode_info(xd, mbmi, w);
hui su5db97432016-10-14 16:10:14 -07001432#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001433 } else {
Yaowu Xub0d0d002016-11-22 09:26:43 -08001434 int16_t mode_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001435 write_ref_frames(cm, xd, w);
1436
Yaowu Xuc27fc142016-08-22 16:08:15 -07001437 if (is_compound)
1438 mode_ctx = mbmi_ext->compound_mode_context[mbmi->ref_frame[0]];
1439 else
Yaowu Xuf883b422016-08-30 14:01:10 -07001440 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1441 mbmi->ref_frame, bsize, -1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001442
1443 // If segment skip is not enabled code the mode.
1444 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001445 if (is_inter_compound_mode(mode))
1446 write_inter_compound_mode(cm, xd, w, mode, mode_ctx);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001447 else if (is_inter_singleref_mode(mode))
1448 write_inter_mode(w, mode, ec_ctx, mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001449
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001450 if (mode == NEWMV || mode == NEW_NEWMV || have_nearmv_in_inter_mode(mode))
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001451 write_drl_idx(ec_ctx, mbmi, mbmi_ext, w);
1452 else
1453 assert(mbmi->ref_mv_idx == 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001454 }
1455
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001456 if (mode == NEWMV || mode == NEW_NEWMV) {
1457 int_mv ref_mv;
1458 for (ref = 0; ref < 1 + is_compound; ++ref) {
1459 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1460 int nmv_ctx =
1461 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1462 mbmi_ext->ref_mv_stack[rf_type], ref, mbmi->ref_mv_idx);
1463 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1464 ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0];
1465 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv, nmvc,
1466 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001467 }
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001468 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
1469 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1470 int nmv_ctx =
1471 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1472 mbmi_ext->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
1473 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1474 av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv,
1475 &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv, nmvc,
1476 allow_hp);
1477 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
1478 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1479 int nmv_ctx =
1480 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1481 mbmi_ext->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
1482 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1483 av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv,
1484 &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv, nmvc,
1485 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001486 }
1487
Yaowu Xuc27fc142016-08-22 16:08:15 -07001488 if (cpi->common.reference_mode != COMPOUND_REFERENCE &&
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001489 cpi->common.allow_interintra_compound && is_interintra_allowed(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001490 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
1491 const int bsize_group = size_group_lookup[bsize];
Thomas Daviescff91712017-07-07 11:49:55 +01001492 aom_write_symbol(w, interintra, ec_ctx->interintra_cdf[bsize_group], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001493 if (interintra) {
Thomas Davies299ff042017-06-27 13:41:59 +01001494 aom_write_symbol(w, mbmi->interintra_mode,
1495 ec_ctx->interintra_mode_cdf[bsize_group],
1496 INTERINTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001497 if (is_interintra_wedge_used(bsize)) {
Thomas Daviescff91712017-07-07 11:49:55 +01001498 aom_write_symbol(w, mbmi->use_wedge_interintra,
1499 ec_ctx->wedge_interintra_cdf[bsize], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001500 if (mbmi->use_wedge_interintra) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001501 aom_write_literal(w, mbmi->interintra_wedge_index,
1502 get_wedge_bits_lookup(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001503 assert(mbmi->interintra_wedge_sign == 0);
1504 }
1505 }
1506 }
1507 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001508
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001509 if (mbmi->ref_frame[1] != INTRA_FRAME) write_motion_mode(cm, xd, mi, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001510
Cheng Chen33a13d92017-11-28 16:49:59 -08001511#if CONFIG_JNT_COMP
1512 // First write idx to indicate current compound inter prediction mode group
1513 // Group A (0): jnt_comp, compound_average
1514 // Group B (1): interintra, compound_segment, wedge
1515 if (has_second_ref(mbmi)) {
Zoe Liu5f11e912017-12-05 23:23:56 -08001516 const int masked_compound_used =
1517 is_any_masked_compound_used(bsize) && cm->allow_masked_compound;
Cheng Chen5a881722017-11-30 17:05:10 -08001518
Zoe Liu5f11e912017-12-05 23:23:56 -08001519 if (masked_compound_used) {
Cheng Chen5a881722017-11-30 17:05:10 -08001520 const int ctx_comp_group_idx = get_comp_group_idx_context(xd);
1521 aom_write_symbol(w, mbmi->comp_group_idx,
1522 ec_ctx->comp_group_idx_cdf[ctx_comp_group_idx], 2);
Zoe Liu5f11e912017-12-05 23:23:56 -08001523 } else {
1524 assert(mbmi->comp_group_idx == 0);
Cheng Chen5a881722017-11-30 17:05:10 -08001525 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001526
1527 if (mbmi->comp_group_idx == 0) {
1528 if (mbmi->compound_idx)
1529 assert(mbmi->interinter_compound_type == COMPOUND_AVERAGE);
1530
1531 const int comp_index_ctx = get_comp_index_context(cm, xd);
1532 aom_write_symbol(w, mbmi->compound_idx,
1533 ec_ctx->compound_index_cdf[comp_index_ctx], 2);
1534 } else {
Zoe Liu5f11e912017-12-05 23:23:56 -08001535 assert(cpi->common.reference_mode != SINGLE_REFERENCE &&
1536 is_inter_compound_mode(mbmi->mode) &&
1537 mbmi->motion_mode == SIMPLE_TRANSLATION);
1538 assert(masked_compound_used);
1539 // compound_segment, wedge
Cheng Chen33a13d92017-11-28 16:49:59 -08001540 assert(mbmi->interinter_compound_type == COMPOUND_WEDGE ||
1541 mbmi->interinter_compound_type == COMPOUND_SEG);
Cheng Chen33a13d92017-11-28 16:49:59 -08001542
Zoe Liu5f11e912017-12-05 23:23:56 -08001543 if (is_interinter_compound_used(COMPOUND_WEDGE, bsize))
1544 aom_write_symbol(w, mbmi->interinter_compound_type - 1,
1545 ec_ctx->compound_type_cdf[bsize],
1546 COMPOUND_TYPES - 1);
1547
1548 if (mbmi->interinter_compound_type == COMPOUND_WEDGE) {
1549 assert(is_interinter_compound_used(COMPOUND_WEDGE, bsize));
1550 aom_write_literal(w, mbmi->wedge_index, get_wedge_bits_lookup(bsize));
1551 aom_write_bit(w, mbmi->wedge_sign);
1552 } else {
1553 assert(mbmi->interinter_compound_type == COMPOUND_SEG);
1554 aom_write_literal(w, mbmi->mask_type, MAX_SEG_MASK_BITS);
Cheng Chen33a13d92017-11-28 16:49:59 -08001555 }
1556 }
1557 }
1558#else // CONFIG_JNT_COMP
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001559 if (cpi->common.reference_mode != SINGLE_REFERENCE &&
Zoe Liu85b66462017-04-20 14:28:19 -07001560 is_inter_compound_mode(mbmi->mode) &&
Zoe Liu85b66462017-04-20 14:28:19 -07001561 mbmi->motion_mode == SIMPLE_TRANSLATION &&
Zoe Liu85b66462017-04-20 14:28:19 -07001562 is_any_masked_compound_used(bsize)) {
Cheng Chen33a13d92017-11-28 16:49:59 -08001563 if (cm->allow_masked_compound) {
Sarah Parker680b9b12017-08-16 18:55:34 -07001564 if (!is_interinter_compound_used(COMPOUND_WEDGE, bsize))
1565 aom_write_bit(w, mbmi->interinter_compound_type == COMPOUND_AVERAGE);
1566 else
Cheng Chenbdd6ca82017-10-23 22:34:25 -07001567 aom_write_symbol(w, mbmi->interinter_compound_type,
1568 ec_ctx->compound_type_cdf[bsize], COMPOUND_TYPES);
Sarah Parker680b9b12017-08-16 18:55:34 -07001569 if (is_interinter_compound_used(COMPOUND_WEDGE, bsize) &&
1570 mbmi->interinter_compound_type == COMPOUND_WEDGE) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001571 aom_write_literal(w, mbmi->wedge_index, get_wedge_bits_lookup(bsize));
1572 aom_write_bit(w, mbmi->wedge_sign);
1573 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001574 if (mbmi->interinter_compound_type == COMPOUND_SEG) {
Cheng Chenbdd6ca82017-10-23 22:34:25 -07001575 aom_write_literal(w, mbmi->mask_type, MAX_SEG_MASK_BITS);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001576 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001577 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001578 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001579#endif // CONFIG_JNT_COMP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001580
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07001581 write_mb_interp_filter(cpi, xd, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001582 }
1583
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001584#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001585 av1_write_tx_type(cm, xd, w);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001586#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07001587}
1588
Hui Suc2232cf2017-10-11 17:32:56 -07001589#if CONFIG_INTRABC
1590static void write_intrabc_info(AV1_COMMON *cm, MACROBLOCKD *xd,
1591 const MB_MODE_INFO_EXT *mbmi_ext,
1592 int enable_tx_size, aom_writer *w) {
1593 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1594 int use_intrabc = is_intrabc_block(mbmi);
1595 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1596 aom_write_symbol(w, use_intrabc, ec_ctx->intrabc_cdf, 2);
1597 if (use_intrabc) {
1598 assert(mbmi->mode == DC_PRED);
1599 assert(mbmi->uv_mode == UV_DC_PRED);
Hui Su12546aa2017-10-13 16:10:01 -07001600 if ((enable_tx_size && !mbmi->skip)) {
Hui Su12546aa2017-10-13 16:10:01 -07001601 const BLOCK_SIZE bsize = mbmi->sb_type;
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001602 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, bsize, 0);
Hui Su12546aa2017-10-13 16:10:01 -07001603 const int bh = tx_size_high_unit[max_tx_size];
1604 const int bw = tx_size_wide_unit[max_tx_size];
1605 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1606 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Hui Su12546aa2017-10-13 16:10:01 -07001607 int idx, idy;
1608 for (idy = 0; idy < height; idy += bh) {
1609 for (idx = 0; idx < width; idx += bw) {
Debargha Mukherjeeedc73462017-10-31 15:13:32 -07001610 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, 0, idy, idx, w);
Hui Su12546aa2017-10-13 16:10:01 -07001611 }
1612 }
Hui Su12546aa2017-10-13 16:10:01 -07001613 } else {
Hui Su12546aa2017-10-13 16:10:01 -07001614 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, mbmi->skip, xd);
Hui Su12546aa2017-10-13 16:10:01 -07001615 }
Hui Suc2232cf2017-10-11 17:32:56 -07001616 int_mv dv_ref = mbmi_ext->ref_mvs[INTRA_FRAME][0];
1617 av1_encode_dv(w, &mbmi->mv[0].as_mv, &dv_ref.as_mv, &ec_ctx->ndvc);
Sebastien Alaiwan3bac9922017-11-02 12:34:41 +01001618#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001619 av1_write_tx_type(cm, xd, w);
Sebastien Alaiwan3bac9922017-11-02 12:34:41 +01001620#endif // !CONFIG_TXK_SEL
Hui Suc2232cf2017-10-11 17:32:56 -07001621 }
1622}
1623#endif // CONFIG_INTRABC
1624
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001625static void write_mb_modes_kf(AV1_COMP *cpi, MACROBLOCKD *xd,
Alex Converse44c2bad2017-05-11 09:36:10 -07001626#if CONFIG_INTRABC
1627 const MB_MODE_INFO_EXT *mbmi_ext,
1628#endif // CONFIG_INTRABC
Jingning Han36fe3202017-02-20 22:31:49 -08001629 const int mi_row, const int mi_col,
Angie Chiangc31ea682017-04-13 16:20:54 -07001630 aom_writer *w) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001631 AV1_COMMON *const cm = &cpi->common;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001632 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001633 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001634 struct segmentation_probs *const segp = &ec_ctx->seg;
Angie Chiangc31ea682017-04-13 16:20:54 -07001635 const MODE_INFO *const mi = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001636 const MODE_INFO *const above_mi = xd->above_mi;
1637 const MODE_INFO *const left_mi = xd->left_mi;
1638 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1639 const BLOCK_SIZE bsize = mbmi->sb_type;
David Barker45390c12017-02-20 14:44:40 +00001640 (void)mi_row;
1641 (void)mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001642
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001643#if CONFIG_SPATIAL_SEGMENTATION
1644 if (cm->preskip_segid && seg->update_map)
1645 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1646#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001647 if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001648#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001649
Alex Converse619576b2017-05-10 15:14:18 -07001650 const int skip = write_skip(cm, xd, mbmi->segment_id, mi, w);
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001651
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001652#if CONFIG_SPATIAL_SEGMENTATION
1653 if (!cm->preskip_segid && seg->update_map)
1654 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, skip);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +01001655#endif
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001656
1657 write_cdef(cm, w, skip, mi_col, mi_row);
1658
Arild Fuldseth07441162016-08-15 15:07:52 +02001659 if (cm->delta_q_present_flag) {
Pavel Frolov1dbe92d2017-11-02 01:49:19 +03001660 int super_block_upper_left = ((mi_row & (cm->mib_size - 1)) == 0) &&
1661 ((mi_col & (cm->mib_size - 1)) == 0);
Pavel Frolovbfa2b8c2017-11-01 20:08:44 +03001662 if ((bsize != cm->sb_size || skip == 0) && super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001663 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001664 int reduced_delta_qindex =
1665 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001666 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001667 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001668#if CONFIG_EXT_DELTA_Q
Cheng Chena97394f2017-09-27 15:05:14 -07001669#if CONFIG_LOOPFILTER_LEVEL
1670 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07001671 if (cm->delta_lf_multi) {
1672 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id) {
1673 int reduced_delta_lflevel =
1674 (mbmi->curr_delta_lf[lf_id] - xd->prev_delta_lf[lf_id]) /
1675 cm->delta_lf_res;
1676 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
1677 xd->prev_delta_lf[lf_id] = mbmi->curr_delta_lf[lf_id];
1678 }
1679 } else {
Cheng Chena97394f2017-09-27 15:05:14 -07001680 int reduced_delta_lflevel =
Cheng Chen880166a2017-10-02 17:48:48 -07001681 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
Cheng Chena97394f2017-09-27 15:05:14 -07001682 cm->delta_lf_res;
Cheng Chen880166a2017-10-02 17:48:48 -07001683 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
1684 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -07001685 }
1686 }
1687#else
Fangwen Fu231fe422017-04-24 17:52:29 -07001688 if (cm->delta_lf_present_flag) {
1689 int reduced_delta_lflevel =
1690 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
1691 cm->delta_lf_res;
1692 write_delta_lflevel(cm, xd, reduced_delta_lflevel, w);
1693 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
1694 }
Cheng Chena97394f2017-09-27 15:05:14 -07001695#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07001696#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001697 }
1698 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001699
Alex Conversef71808c2017-06-06 12:21:17 -07001700 int enable_tx_size = cm->tx_mode == TX_MODE_SELECT &&
Rupert Swarbrickfcff0b22017-10-05 09:26:04 +01001701 block_signals_txsize(bsize) &&
Alex Conversef71808c2017-06-06 12:21:17 -07001702 !xd->lossless[mbmi->segment_id];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001703
Alex Converse28744302017-04-13 14:46:22 -07001704#if CONFIG_INTRABC
RogerZhouca865462017-10-05 15:06:27 -07001705 if (av1_allow_intrabc(bsize, cm)) {
Hui Suc2232cf2017-10-11 17:32:56 -07001706 write_intrabc_info(cm, xd, mbmi_ext, enable_tx_size, w);
1707 if (is_intrabc_block(mbmi)) return;
Alex Converse28744302017-04-13 14:46:22 -07001708 }
1709#endif // CONFIG_INTRABC
Hui Suc2232cf2017-10-11 17:32:56 -07001710
Alex Conversef71808c2017-06-06 12:21:17 -07001711 if (enable_tx_size) write_selected_tx_size(cm, xd, w);
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001712#if CONFIG_INTRABC
Hui Su12546aa2017-10-13 16:10:01 -07001713 if (cm->allow_screen_content_tools)
1714 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, mbmi->skip, xd);
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001715#endif // CONFIG_INTRABC
Alex Converse28744302017-04-13 14:46:22 -07001716
Jingning Han3e4c6a62017-12-14 14:50:57 -08001717 write_intra_mode_kf(ec_ctx, mi, above_mi, left_mi, mbmi->mode, w);
Jingning Han0b7cbe62017-03-08 10:22:47 -08001718
Jingning Hand3a64432017-04-06 17:04:17 -07001719 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
Luc Trudeau2c317902017-04-28 11:06:50 -04001720 xd->plane[1].subsampling_y)) {
Jingning Hanf04254f2017-03-08 10:51:35 -08001721 write_intra_uv_mode(ec_ctx, mbmi->uv_mode, mbmi->mode, w);
Jingning Han0b7cbe62017-03-08 10:22:47 -08001722
Luc Trudeauf5334002017-04-25 12:21:26 -04001723#if CONFIG_CFL
Luc Trudeau6e1cd782017-06-21 13:52:36 -04001724 if (mbmi->uv_mode == UV_CFL_PRED) {
Luc Trudeaue425f472017-12-08 14:48:46 -05001725 if (!is_cfl_allowed(mbmi)) {
Luc Trudeau4d6ea542017-11-22 21:24:42 -05001726 aom_internal_error(
1727 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1728 "Chroma from Luma (CfL) cannot be signaled for a %dx%d block.",
1729 block_size_wide[bsize], block_size_high[bsize]);
1730 }
David Michael Barr23198662017-06-19 23:19:48 +09001731 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Luc Trudeauf5334002017-04-25 12:21:26 -04001732 }
Luc Trudeauf5334002017-04-25 12:21:26 -04001733#endif
Luc Trudeau2c317902017-04-28 11:06:50 -04001734 }
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07001735
Yaowu Xuc27fc142016-08-22 16:08:15 -07001736#if CONFIG_EXT_INTRA
Joe Young3ca43bf2017-10-06 15:12:46 -07001737 write_intra_angle_info(xd, ec_ctx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001738#endif // CONFIG_EXT_INTRA
Hui Sue87fb232017-10-05 15:00:15 -07001739 if (av1_allow_palette(cm->allow_screen_content_tools, bsize))
Yaowu Xuc27fc142016-08-22 16:08:15 -07001740 write_palette_mode_info(cm, xd, mi, w);
hui su5db97432016-10-14 16:10:14 -07001741#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -08001742 write_filter_intra_mode_info(xd, mbmi, w);
hui su5db97432016-10-14 16:10:14 -07001743#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001744
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001745#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001746 av1_write_tx_type(cm, xd, w);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001747#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07001748}
1749
Angie Chiangd4022822016-11-02 18:30:25 -07001750#if CONFIG_RD_DEBUG
1751static void dump_mode_info(MODE_INFO *mi) {
1752 printf("\nmi->mbmi.mi_row == %d\n", mi->mbmi.mi_row);
1753 printf("&& mi->mbmi.mi_col == %d\n", mi->mbmi.mi_col);
1754 printf("&& mi->mbmi.sb_type == %d\n", mi->mbmi.sb_type);
1755 printf("&& mi->mbmi.tx_size == %d\n", mi->mbmi.tx_size);
Jingning Han2fac8a42017-12-14 16:26:00 -08001756 printf("&& mi->mbmi.mode == %d\n", mi->mbmi.mode);
Angie Chiangd4022822016-11-02 18:30:25 -07001757}
Angie Chiangd02001d2016-11-06 15:31:49 -08001758static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats,
1759 int plane) {
1760 if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) {
1761 int r, c;
1762 printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n",
1763 plane, rd_stats->txb_coeff_cost[plane], token_stats->cost);
Angie Chiangd02001d2016-11-06 15:31:49 -08001764 printf("rd txb_coeff_cost_map\n");
1765 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1766 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1767 printf("%d ", rd_stats->txb_coeff_cost_map[plane][r][c]);
1768 }
1769 printf("\n");
1770 }
1771
1772 printf("pack txb_coeff_cost_map\n");
1773 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1774 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1775 printf("%d ", token_stats->txb_coeff_cost_map[r][c]);
1776 }
1777 printf("\n");
1778 }
Angie Chiangd02001d2016-11-06 15:31:49 -08001779 return 1;
1780 }
1781 return 0;
1782}
Angie Chiangd4022822016-11-02 18:30:25 -07001783#endif
1784
Di Chen56586622017-06-09 13:49:44 -07001785#if ENC_MISMATCH_DEBUG
1786static void enc_dump_logs(AV1_COMP *cpi, int mi_row, int mi_col) {
1787 AV1_COMMON *const cm = &cpi->common;
1788 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1789 MODE_INFO *m;
1790 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1791 m = xd->mi[0];
1792 if (is_inter_block(&m->mbmi)) {
Zoe Liuf40a9572017-10-13 12:37:19 -07001793#define FRAME_TO_CHECK 11
Zoe Liu17af2742017-10-06 10:36:42 -07001794 if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
Di Chen56586622017-06-09 13:49:44 -07001795 const MB_MODE_INFO *const mbmi = &m->mbmi;
1796 const BLOCK_SIZE bsize = mbmi->sb_type;
1797
1798 int_mv mv[2];
1799 int is_comp_ref = has_second_ref(&m->mbmi);
1800 int ref;
1801
1802 for (ref = 0; ref < 1 + is_comp_ref; ++ref)
1803 mv[ref].as_mv = m->mbmi.mv[ref].as_mv;
1804
1805 if (!is_comp_ref) {
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001806 mv[1].as_int = 0;
Di Chen56586622017-06-09 13:49:44 -07001807 }
Di Chen56586622017-06-09 13:49:44 -07001808
Di Chen56586622017-06-09 13:49:44 -07001809 MACROBLOCK *const x = &cpi->td.mb;
Di Chen56586622017-06-09 13:49:44 -07001810 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
Zoe Liuf40a9572017-10-13 12:37:19 -07001811 const int16_t mode_ctx =
1812 is_comp_ref ? mbmi_ext->compound_mode_context[mbmi->ref_frame[0]]
1813 : av1_mode_context_analyzer(mbmi_ext->mode_context,
1814 mbmi->ref_frame, bsize, -1);
1815
Di Chen56586622017-06-09 13:49:44 -07001816 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
1817 int16_t zeromv_ctx = -1;
1818 int16_t refmv_ctx = -1;
Zoe Liuf40a9572017-10-13 12:37:19 -07001819
Di Chen56586622017-06-09 13:49:44 -07001820 if (mbmi->mode != NEWMV) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -07001821 zeromv_ctx = (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Di Chen56586622017-06-09 13:49:44 -07001822 if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -07001823 assert(mbmi->mode == GLOBALMV);
Di Chen56586622017-06-09 13:49:44 -07001824 }
Sarah Parker2b9ec2e2017-10-30 17:34:08 -07001825 if (mbmi->mode != GLOBALMV) {
Di Chen56586622017-06-09 13:49:44 -07001826 refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
1827 if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6;
1828 if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7;
1829 if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8;
1830 }
1831 }
1832
Zoe Liuf40a9572017-10-13 12:37:19 -07001833#if CONFIG_EXT_SKIP
1834 printf(
1835 "=== ENCODER ===: "
1836 "Frame=%d, (mi_row,mi_col)=(%d,%d), skip_mode=%d, mode=%d, bsize=%d, "
1837 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
1838 "ref[1]=%d, motion_mode=%d, mode_ctx=%d, "
1839 "newmv_ctx=%d, zeromv_ctx=%d, refmv_ctx=%d, tx_size=%d\n",
1840 cm->current_video_frame, mi_row, mi_col, mbmi->skip_mode, mbmi->mode,
1841 bsize, cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col,
1842 mv[1].as_mv.row, mv[1].as_mv.col, mbmi->ref_frame[0],
1843 mbmi->ref_frame[1], mbmi->motion_mode, mode_ctx, newmv_ctx,
1844 zeromv_ctx, refmv_ctx, mbmi->tx_size);
1845#else
Di Chen56586622017-06-09 13:49:44 -07001846 printf(
1847 "=== ENCODER ===: "
1848 "Frame=%d, (mi_row,mi_col)=(%d,%d), mode=%d, bsize=%d, "
1849 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
Zoe Liuf40a9572017-10-13 12:37:19 -07001850 "ref[1]=%d, motion_mode=%d, mode_ctx=%d, "
1851 "newmv_ctx=%d, zeromv_ctx=%d, refmv_ctx=%d, tx_size=%d\n",
Di Chen56586622017-06-09 13:49:44 -07001852 cm->current_video_frame, mi_row, mi_col, mbmi->mode, bsize,
1853 cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col, mv[1].as_mv.row,
1854 mv[1].as_mv.col, mbmi->ref_frame[0], mbmi->ref_frame[1],
Zoe Liuf40a9572017-10-13 12:37:19 -07001855 mbmi->motion_mode, mode_ctx, newmv_ctx, zeromv_ctx, refmv_ctx,
1856 mbmi->tx_size);
1857#endif // CONFIG_EXT_SKIP
Di Chen56586622017-06-09 13:49:44 -07001858 }
1859 }
1860}
1861#endif // ENC_MISMATCH_DEBUG
1862
Yue Chen64550b62017-01-12 12:18:22 -08001863static void write_mbmi_b(AV1_COMP *cpi, const TileInfo *const tile,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001864 aom_writer *w, int mi_row, int mi_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001865 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001866 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1867 MODE_INFO *m;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001868 int bh, bw;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001869 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1870 m = xd->mi[0];
1871
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001872 assert(m->mbmi.sb_type <= cm->sb_size ||
Rupert Swarbrick72678572017-08-02 12:05:26 +01001873 (m->mbmi.sb_type >= BLOCK_SIZES && m->mbmi.sb_type < BLOCK_SIZES_ALL));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001874
Jingning Hanc709e1f2016-12-06 14:48:09 -08001875 bh = mi_size_high[m->mbmi.sb_type];
1876 bw = mi_size_wide[m->mbmi.sb_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001877
1878 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1879
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001880 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001881#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001882 cm->dependent_horz_tiles,
1883#endif // CONFIG_DEPENDENT_HORZTILES
1884 cm->mi_rows, cm->mi_cols);
Yushin Cho77bba8d2016-11-04 16:36:56 -07001885
Yaowu Xuc27fc142016-08-22 16:08:15 -07001886 if (frame_is_intra_only(cm)) {
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001887#if CONFIG_INTRABC
Hui Su12546aa2017-10-13 16:10:01 -07001888 if (cm->allow_screen_content_tools) {
1889 xd->above_txfm_context =
1890 cm->above_txfm_context + (mi_col << TX_UNIT_WIDE_LOG2);
1891 xd->left_txfm_context = xd->left_txfm_context_buffer +
1892 ((mi_row & MAX_MIB_MASK) << TX_UNIT_HIGH_LOG2);
1893 }
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001894#endif // CONFIG_INTRABC
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001895 write_mb_modes_kf(cpi, xd,
Alex Converse44c2bad2017-05-11 09:36:10 -07001896#if CONFIG_INTRABC
1897 cpi->td.mb.mbmi_ext,
1898#endif // CONFIG_INTRABC
1899 mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001900 } else {
Jingning Han331662e2017-05-30 17:03:32 -07001901 xd->above_txfm_context =
1902 cm->above_txfm_context + (mi_col << TX_UNIT_WIDE_LOG2);
1903 xd->left_txfm_context = xd->left_txfm_context_buffer +
1904 ((mi_row & MAX_MIB_MASK) << TX_UNIT_HIGH_LOG2);
Angie Chiang38edf682017-02-21 15:13:09 -08001905 // has_subpel_mv_component needs the ref frame buffers set up to look
1906 // up if they are scaled. has_subpel_mv_component is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07001907 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
1908 set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
Zoe Liu85b66462017-04-20 14:28:19 -07001909
Di Chen56586622017-06-09 13:49:44 -07001910#if ENC_MISMATCH_DEBUG
Di Chen56586622017-06-09 13:49:44 -07001911 enc_dump_logs(cpi, mi_row, mi_col);
1912#endif // ENC_MISMATCH_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001913
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001914 pack_inter_mode_mvs(cpi, mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001915 }
Yue Chen64550b62017-01-12 12:18:22 -08001916}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001917
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001918static void write_inter_txb_coeff(AV1_COMMON *const cm, MACROBLOCK *const x,
1919 MB_MODE_INFO *const mbmi, aom_writer *w,
1920 const TOKENEXTRA **tok,
1921 const TOKENEXTRA *const tok_end,
1922 TOKEN_STATS *token_stats, const int row,
1923 const int col, int *block, const int plane) {
1924 MACROBLOCKD *const xd = &x->e_mbd;
1925 const struct macroblockd_plane *const pd = &xd->plane[plane];
Debargha Mukherjee19619882017-11-22 13:13:14 -08001926 const BLOCK_SIZE bsize = mbmi->sb_type;
1927 const BLOCK_SIZE bsizec =
1928 scale_chroma_bsize(bsize, pd->subsampling_x, pd->subsampling_y);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001929
Debargha Mukherjee5d149e12017-12-14 12:49:51 -08001930 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsizec, pd);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001931
Debargha Mukherjee19619882017-11-22 13:13:14 -08001932 TX_SIZE max_tx_size = get_vartx_max_txsize(
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001933 xd, plane_bsize, pd->subsampling_x || pd->subsampling_y);
Debargha Mukherjee19619882017-11-22 13:13:14 -08001934#if DISABLE_VARTX_FOR_CHROMA == 2
1935 // If the luma transform size is split at least one level, split the chroma
1936 // by one level. Otherwise use the largest possible trasnform size for
1937 // chroma.
1938 if (plane && (pd->subsampling_x || pd->subsampling_y)) {
1939 const TX_SIZE l_max_tx_size = get_vartx_max_txsize(xd, bsizec, 0);
1940 const int is_split =
1941 (l_max_tx_size != mbmi->inter_tx_size[0][0] && bsize == bsizec &&
1942 txsize_to_bsize[l_max_tx_size] == bsizec);
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -08001943 if (is_split) max_tx_size = sub_tx_size_map[1][max_tx_size];
Debargha Mukherjee19619882017-11-22 13:13:14 -08001944 }
1945#endif // DISABLE_VARTX_FOR_CHROMA == 2
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001946 const int step =
1947 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
1948 const int bkw = tx_size_wide_unit[max_tx_size];
1949 const int bkh = tx_size_high_unit[max_tx_size];
1950
1951 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
1952 int mu_blocks_wide = block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
1953 int mu_blocks_high = block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
1954
1955 int blk_row, blk_col;
1956
1957 const int num_4x4_w = block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
1958 const int num_4x4_h = block_size_high[plane_bsize] >> tx_size_wide_log2[0];
1959
1960 const int unit_height = AOMMIN(mu_blocks_high + row, num_4x4_h);
1961 const int unit_width = AOMMIN(mu_blocks_wide + col, num_4x4_w);
1962 for (blk_row = row; blk_row < unit_height; blk_row += bkh) {
1963 for (blk_col = col; blk_col < unit_width; blk_col += bkw) {
1964 pack_txb_tokens(w,
1965#if CONFIG_LV_MAP
1966 cm, x,
1967#endif
1968 tok, tok_end, xd, mbmi, plane, plane_bsize, cm->bit_depth,
1969 *block, blk_row, blk_col, max_tx_size, token_stats);
1970 *block += step;
1971 }
1972 }
1973}
1974
Yue Chen64550b62017-01-12 12:18:22 -08001975static void write_tokens_b(AV1_COMP *cpi, const TileInfo *const tile,
1976 aom_writer *w, const TOKENEXTRA **tok,
1977 const TOKENEXTRA *const tok_end, int mi_row,
1978 int mi_col) {
1979 AV1_COMMON *const cm = &cpi->common;
1980 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Wei-Ting Lin1d46d902017-06-26 15:57:18 -07001981 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1982 MODE_INFO *const m = *(cm->mi_grid_visible + mi_offset);
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001983 MB_MODE_INFO *const mbmi = &m->mbmi;
Yue Chen64550b62017-01-12 12:18:22 -08001984 int plane;
1985 int bh, bw;
Yushin Cho258a0242017-03-06 13:53:01 -08001986 MACROBLOCK *const x = &cpi->td.mb;
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001987#if CONFIG_LV_MAP
Yue Chen64550b62017-01-12 12:18:22 -08001988 (void)tok;
1989 (void)tok_end;
1990#endif
Wei-Ting Lin1d46d902017-06-26 15:57:18 -07001991 xd->mi = cm->mi_grid_visible + mi_offset;
Yue Chen64550b62017-01-12 12:18:22 -08001992
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001993 assert(mbmi->sb_type <= cm->sb_size ||
Rupert Swarbrick72678572017-08-02 12:05:26 +01001994 (mbmi->sb_type >= BLOCK_SIZES && mbmi->sb_type < BLOCK_SIZES_ALL));
Yue Chen64550b62017-01-12 12:18:22 -08001995
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001996 bh = mi_size_high[mbmi->sb_type];
1997 bw = mi_size_wide[mbmi->sb_type];
Yue Chen64550b62017-01-12 12:18:22 -08001998 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1999
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002000 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002001#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002002 cm->dependent_horz_tiles,
2003#endif // CONFIG_DEPENDENT_HORZTILES
2004 cm->mi_rows, cm->mi_cols);
Yue Chen64550b62017-01-12 12:18:22 -08002005
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002006 const int num_planes = av1_num_planes(cm);
2007 for (plane = 0; plane < AOMMIN(2, num_planes); ++plane) {
Fangwen Fub3be9262017-03-06 15:34:28 -08002008 const uint8_t palette_size_plane =
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002009 mbmi->palette_mode_info.palette_size[plane];
Zoe Liuf40a9572017-10-13 12:37:19 -07002010#if CONFIG_EXT_SKIP
2011 assert(!mbmi->skip_mode || !palette_size_plane);
2012#endif // CONFIG_EXT_SKIP
Fangwen Fub3be9262017-03-06 15:34:28 -08002013 if (palette_size_plane > 0) {
Alex Converseed37d012017-04-24 11:15:24 -07002014#if CONFIG_INTRABC
2015 assert(mbmi->use_intrabc == 0);
2016#endif
Fangwen Fub3be9262017-03-06 15:34:28 -08002017 int rows, cols;
hui su9bc1d8d2017-03-24 12:36:03 -07002018 assert(mbmi->sb_type >= BLOCK_8X8);
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002019 av1_get_block_dimensions(mbmi->sb_type, plane, xd, NULL, NULL, &rows,
Fangwen Fub3be9262017-03-06 15:34:28 -08002020 &cols);
2021 assert(*tok < tok_end);
Sarah Parker99e7daa2017-08-29 10:30:13 -07002022 pack_map_tokens(w, tok, palette_size_plane, rows * cols);
Jingning Han13648e72017-08-17 09:21:53 -07002023#if !CONFIG_LV_MAP
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002024 assert(*tok < tok_end + mbmi->skip);
Jingning Han13648e72017-08-17 09:21:53 -07002025#endif // !CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07002026 }
Fangwen Fub3be9262017-03-06 15:34:28 -08002027 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002028
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002029 if (!mbmi->skip) {
Yushin Chod0b77ac2017-10-20 17:33:16 -07002030#if !CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07002031 assert(*tok < tok_end);
Yushin Cho258a0242017-03-06 13:53:01 -08002032#endif
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002033 for (plane = 0; plane < num_planes; ++plane) {
Debargha Mukherjee3aa28112017-11-25 07:03:31 -08002034 const struct macroblockd_plane *const pd = &xd->plane[plane];
2035 if (!is_chroma_reference(mi_row, mi_col, mbmi->sb_type, pd->subsampling_x,
2036 pd->subsampling_y)) {
Jingning Han13648e72017-08-17 09:21:53 -07002037#if !CONFIG_LV_MAP
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002038 (*tok)++;
Jingning Han13648e72017-08-17 09:21:53 -07002039#endif // !CONFIG_LV_MAP
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002040 continue;
2041 }
Debargha Mukherjee3aa28112017-11-25 07:03:31 -08002042 const BLOCK_SIZE bsize = mbmi->sb_type;
2043 const BLOCK_SIZE bsizec =
2044 scale_chroma_bsize(bsize, pd->subsampling_x, pd->subsampling_y);
Debargha Mukherjee5d149e12017-12-14 12:49:51 -08002045 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsizec, pd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002046
Jingning Han42a0fb32016-10-31 10:43:31 -07002047 const int num_4x4_w =
2048 block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
2049 const int num_4x4_h =
2050 block_size_high[plane_bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002051 int row, col;
Angie Chiangd4022822016-11-02 18:30:25 -07002052 TOKEN_STATS token_stats;
Angie Chiangd02001d2016-11-06 15:31:49 -08002053 init_token_stats(&token_stats);
Angie Chiangd4022822016-11-02 18:30:25 -07002054
Jingning Hanc2b797f2017-07-19 09:37:11 -07002055 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
2056 int mu_blocks_wide =
2057 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
2058 int mu_blocks_high =
2059 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
2060
2061 mu_blocks_wide = AOMMIN(num_4x4_w, mu_blocks_wide);
2062 mu_blocks_high = AOMMIN(num_4x4_h, mu_blocks_high);
2063
Jingning Hanfe45b212016-11-22 10:30:23 -08002064 if (is_inter_block(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002065 int block = 0;
Jingning Hanc2b797f2017-07-19 09:37:11 -07002066 for (row = 0; row < num_4x4_h; row += mu_blocks_high) {
2067 for (col = 0; col < num_4x4_w; col += mu_blocks_wide) {
Jingning Hane5e8f4d2017-11-20 20:11:04 -08002068 write_inter_txb_coeff(cm, x, mbmi, w, tok, tok_end, &token_stats,
2069 row, col, &block, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002070 }
2071 }
Angie Chiangd02001d2016-11-06 15:31:49 -08002072#if CONFIG_RD_DEBUG
Angie Chiang3963d632016-11-10 18:41:40 -08002073 if (mbmi->sb_type >= BLOCK_8X8 &&
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002074 rd_token_stats_mismatch(&mbmi->rd_stats, &token_stats, plane)) {
Angie Chiangd02001d2016-11-06 15:31:49 -08002075 dump_mode_info(m);
2076 assert(0);
2077 }
Jingning Hanfe45b212016-11-22 10:30:23 -08002078#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07002079 } else {
Jingning Han5ab7ed42017-05-18 16:15:52 -07002080#if CONFIG_LV_MAP
Angie Chiang140b3332017-12-12 17:29:25 -08002081 av1_write_coeffs_mb(cm, x, w, plane, bsize);
Jingning Han5ab7ed42017-05-18 16:15:52 -07002082#else
hui su0c6244b2017-07-12 17:11:43 -07002083 const TX_SIZE tx = av1_get_tx_size(plane, xd);
Jingning Han42a0fb32016-10-31 10:43:31 -07002084 const int bkw = tx_size_wide_unit[tx];
2085 const int bkh = tx_size_high_unit[tx];
Jingning Han5b701742017-07-19 14:39:07 -07002086 int blk_row, blk_col;
2087
2088 for (row = 0; row < num_4x4_h; row += mu_blocks_high) {
2089 for (col = 0; col < num_4x4_w; col += mu_blocks_wide) {
2090 const int unit_height = AOMMIN(mu_blocks_high + row, num_4x4_h);
2091 const int unit_width = AOMMIN(mu_blocks_wide + col, num_4x4_w);
2092
2093 for (blk_row = row; blk_row < unit_height; blk_row += bkh) {
2094 for (blk_col = col; blk_col < unit_width; blk_col += bkw) {
Jingning Han5b701742017-07-19 14:39:07 -07002095 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx,
2096 &token_stats);
Jingning Han5b701742017-07-19 14:39:07 -07002097 }
2098 }
Fangwen Fu33bcd112017-02-07 16:42:41 -08002099 }
2100 }
Jingning Han5ab7ed42017-05-18 16:15:52 -07002101#endif // CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07002102 }
Angie Chiangd4022822016-11-02 18:30:25 -07002103
Yushin Chod0b77ac2017-10-20 17:33:16 -07002104#if !CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07002105 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2106 (*tok)++;
Yushin Cho258a0242017-03-06 13:53:01 -08002107#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002108 }
2109 }
2110}
2111
Yue Chen64550b62017-01-12 12:18:22 -08002112static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
2113 aom_writer *w, const TOKENEXTRA **tok,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02002114 const TOKENEXTRA *const tok_end, int mi_row,
2115 int mi_col) {
2116 write_mbmi_b(cpi, tile, w, mi_row, mi_col);
Jingning Hanf5a4d3b2017-08-27 23:01:19 -07002117
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02002118 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
Yue Chen64550b62017-01-12 12:18:22 -08002119}
2120
Yaowu Xuf883b422016-08-30 14:01:10 -07002121static void write_partition(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002122 const MACROBLOCKD *const xd, int hbs, int mi_row,
2123 int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize,
Yaowu Xuf883b422016-08-30 14:01:10 -07002124 aom_writer *w) {
Alex Converse55c6bde2017-01-12 15:55:31 -08002125 const int is_partition_point = bsize >= BLOCK_8X8;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00002126
Jingning Hanbf9c6b72016-12-14 14:50:45 -08002127 if (!is_partition_point) return;
2128
Rupert Swarbrickeb123932017-11-22 15:20:47 +00002129 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2130 const int has_cols = (mi_col + hbs) < cm->mi_cols;
2131 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
2132 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
2133
2134 if (!has_rows && !has_cols) {
2135 assert(p == PARTITION_SPLIT);
2136 return;
2137 }
2138
Yaowu Xuc27fc142016-08-22 16:08:15 -07002139 if (has_rows && has_cols) {
Rupert Swarbrickeb123932017-11-22 15:20:47 +00002140 aom_write_symbol(w, p, ec_ctx->partition_cdf[ctx],
2141 partition_cdf_length(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002142 } else if (!has_rows && has_cols) {
2143 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002144 assert(bsize > BLOCK_8X8);
2145 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00002146 partition_gather_vert_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002147 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Rupert Swarbrickeb123932017-11-22 15:20:47 +00002148 } else {
2149 assert(has_rows && !has_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002150 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002151 assert(bsize > BLOCK_8X8);
2152 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00002153 partition_gather_horz_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07002154 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002155 }
2156}
2157
Yaowu Xuf883b422016-08-30 14:01:10 -07002158static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile,
2159 aom_writer *const w, const TOKENEXTRA **tok,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02002160 const TOKENEXTRA *const tok_end, int mi_row,
2161 int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002162 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002163 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Jingning Hanc709e1f2016-12-06 14:48:09 -08002164 const int hbs = mi_size_wide[bsize] / 2;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002165#if CONFIG_EXT_PARTITION_TYPES
2166 const int quarter_step = mi_size_wide[bsize] / 4;
2167 int i;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002168#if CONFIG_EXT_PARTITION_TYPES_AB
2169 const int qbs = mi_size_wide[bsize] / 4;
2170#endif // CONFIG_EXT_PARTITION_TYPES_AB
2171#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002172 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
2173 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
Jingning Han52261842016-12-14 12:17:49 -08002174
Yaowu Xuc27fc142016-08-22 16:08:15 -07002175 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2176
2177 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002178 switch (partition) {
2179 case PARTITION_NONE:
2180 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2181 break;
2182 case PARTITION_HORZ:
2183 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2184 if (mi_row + hbs < cm->mi_rows)
2185 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2186 break;
2187 case PARTITION_VERT:
2188 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2189 if (mi_col + hbs < cm->mi_cols)
2190 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2191 break;
2192 case PARTITION_SPLIT:
2193 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
2194 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs, subsize);
2195 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col, subsize);
2196 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs,
2197 subsize);
2198 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002199#if CONFIG_EXT_PARTITION_TYPES
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002200#if CONFIG_EXT_PARTITION_TYPES_AB
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002201 case PARTITION_HORZ_A:
2202 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2203 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + qbs, mi_col);
2204 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2205 break;
2206 case PARTITION_HORZ_B:
2207 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2208 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2209 if (mi_row + 3 * qbs < cm->mi_rows)
2210 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + 3 * qbs, mi_col);
2211 break;
2212 case PARTITION_VERT_A:
2213 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2214 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + qbs);
2215 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2216 break;
2217 case PARTITION_VERT_B:
2218 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2219 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2220 if (mi_col + 3 * qbs < cm->mi_cols)
2221 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + 3 * qbs);
2222 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002223#else
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002224 case PARTITION_HORZ_A:
2225 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2226 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2227 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2228 break;
2229 case PARTITION_HORZ_B:
2230 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2231 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2232 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
2233 break;
2234 case PARTITION_VERT_A:
2235 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2236 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2237 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2238 break;
2239 case PARTITION_VERT_B:
2240 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2241 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2242 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
2243 break;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01002244#endif
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002245 case PARTITION_HORZ_4:
2246 for (i = 0; i < 4; ++i) {
2247 int this_mi_row = mi_row + i * quarter_step;
2248 if (i > 0 && this_mi_row >= cm->mi_rows) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002249
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002250 write_modes_b(cpi, tile, w, tok, tok_end, this_mi_row, mi_col);
2251 }
2252 break;
2253 case PARTITION_VERT_4:
2254 for (i = 0; i < 4; ++i) {
2255 int this_mi_col = mi_col + i * quarter_step;
2256 if (i > 0 && this_mi_col >= cm->mi_cols) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01002257
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002258 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, this_mi_col);
2259 }
2260 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002261#endif // CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002262 default: assert(0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002263 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002264
2265// update partition context
2266#if CONFIG_EXT_PARTITION_TYPES
2267 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
2268#else
2269 if (bsize >= BLOCK_8X8 &&
2270 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
2271 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01002272#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002273
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002274#if CONFIG_LOOP_RESTORATION
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002275 for (int plane = 0; plane < av1_num_planes(cm); ++plane) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01002276 int rcol0, rcol1, rrow0, rrow1, tile_tl_idx;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002277 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
2278 &rcol0, &rcol1, &rrow0, &rrow1,
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01002279 &tile_tl_idx)) {
2280 const int rstride = cm->rst_info[plane].horz_units_per_tile;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002281 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
2282 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
Rupert Swarbrickbcb65fe2017-10-25 17:15:28 +01002283 const int rtile_idx = tile_tl_idx + rcol + rrow * rstride;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002284 const RestorationUnitInfo *rui =
2285 &cm->rst_info[plane].unit_info[rtile_idx];
2286 loop_restoration_write_sb_coeffs(cm, xd, rui, w, plane);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002287 }
2288 }
2289 }
2290 }
2291#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002292}
2293
Yaowu Xuf883b422016-08-30 14:01:10 -07002294static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile,
2295 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002296 const TOKENEXTRA *const tok_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002297 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002298 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
2299 const int mi_row_start = tile->mi_row_start;
2300 const int mi_row_end = tile->mi_row_end;
2301 const int mi_col_start = tile->mi_col_start;
2302 const int mi_col_end = tile->mi_col_end;
2303 int mi_row, mi_col;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002304
2305#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08002306 if (!cm->dependent_horz_tiles || mi_row_start == 0 ||
2307 tile->tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002308 av1_zero_above_context(cm, mi_col_start, mi_col_end);
2309 }
2310#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002311 av1_zero_above_context(cm, mi_col_start, mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002312#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02002313 if (cpi->common.delta_q_present_flag) {
2314 xd->prev_qindex = cpi->common.base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07002315#if CONFIG_EXT_DELTA_Q
2316 if (cpi->common.delta_lf_present_flag) {
Cheng Chena97394f2017-09-27 15:05:14 -07002317#if CONFIG_LOOPFILTER_LEVEL
2318 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
2319 xd->prev_delta_lf[lf_id] = 0;
2320#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07002321 xd->prev_delta_lf_from_base = 0;
2322 }
2323#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02002324 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002325
2326 for (mi_row = mi_row_start; mi_row < mi_row_end; mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002327 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002328
2329 for (mi_col = mi_col_start; mi_col < mi_col_end; mi_col += cm->mib_size) {
Sebastien Alaiwan6534ba82017-10-13 20:35:14 +02002330 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, cm->sb_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002331 }
2332 }
2333}
2334
Yaowu Xuc27fc142016-08-22 16:08:15 -07002335#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002336static void encode_restoration_mode(AV1_COMMON *cm,
2337 struct aom_write_bit_buffer *wb) {
Hui Su27df8342017-11-07 15:16:05 -08002338#if CONFIG_INTRABC
2339 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
2340#endif // CONFIG_INTRABC
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002341 int all_none = 1, chroma_none = 1;
2342 for (int p = 0; p < av1_num_planes(cm); ++p) {
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00002343 RestorationInfo *rsi = &cm->rst_info[p];
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002344 if (rsi->frame_restoration_type != RESTORE_NONE) {
2345 all_none = 0;
2346 chroma_none &= p == 0;
2347 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002348 switch (rsi->frame_restoration_type) {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07002349 case RESTORE_NONE:
2350 aom_wb_write_bit(wb, 0);
2351 aom_wb_write_bit(wb, 0);
2352 break;
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07002353 case RESTORE_WIENER:
2354 aom_wb_write_bit(wb, 1);
2355 aom_wb_write_bit(wb, 0);
2356 break;
2357 case RESTORE_SGRPROJ:
2358 aom_wb_write_bit(wb, 1);
2359 aom_wb_write_bit(wb, 1);
2360 break;
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07002361 case RESTORE_SWITCHABLE:
2362 aom_wb_write_bit(wb, 0);
2363 aom_wb_write_bit(wb, 1);
2364 break;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002365 default: assert(0);
2366 }
2367 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002368 if (!all_none) {
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00002369 RestorationInfo *rsi = &cm->rst_info[0];
2370 const int qsize = RESTORATION_TILESIZE_MAX >> 2;
2371 const int hsize = RESTORATION_TILESIZE_MAX >> 1;
2372 aom_wb_write_bit(wb, rsi->restoration_unit_size != qsize);
2373 if (rsi->restoration_unit_size != qsize) {
2374 aom_wb_write_bit(wb, rsi->restoration_unit_size != hsize);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002375 }
2376 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002377
2378 if (av1_num_planes(cm) > 1) {
2379 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
2380 if (s && !chroma_none) {
2381 aom_wb_write_bit(wb,
2382 cm->rst_info[1].restoration_unit_size !=
2383 cm->rst_info[0].restoration_unit_size);
2384 assert(cm->rst_info[1].restoration_unit_size ==
2385 cm->rst_info[0].restoration_unit_size ||
2386 cm->rst_info[1].restoration_unit_size ==
2387 (cm->rst_info[0].restoration_unit_size >> s));
2388 assert(cm->rst_info[2].restoration_unit_size ==
2389 cm->rst_info[1].restoration_unit_size);
2390 } else if (!s) {
2391 assert(cm->rst_info[1].restoration_unit_size ==
2392 cm->rst_info[0].restoration_unit_size);
2393 assert(cm->rst_info[2].restoration_unit_size ==
2394 cm->rst_info[1].restoration_unit_size);
2395 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07002396 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002397}
2398
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002399static void write_wiener_filter(int wiener_win, const WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002400 WienerInfo *ref_wiener_info, aom_writer *wb) {
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002401 if (wiener_win == WIENER_WIN)
2402 aom_write_primitive_refsubexpfin(
2403 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2404 WIENER_FILT_TAP0_SUBEXP_K,
2405 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV,
2406 wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV);
2407 else
2408 assert(wiener_info->vfilter[0] == 0 &&
2409 wiener_info->vfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002410 aom_write_primitive_refsubexpfin(
2411 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2412 WIENER_FILT_TAP1_SUBEXP_K,
2413 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV,
2414 wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV);
2415 aom_write_primitive_refsubexpfin(
2416 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2417 WIENER_FILT_TAP2_SUBEXP_K,
2418 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV,
2419 wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV);
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002420 if (wiener_win == WIENER_WIN)
2421 aom_write_primitive_refsubexpfin(
2422 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2423 WIENER_FILT_TAP0_SUBEXP_K,
2424 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV,
2425 wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV);
2426 else
2427 assert(wiener_info->hfilter[0] == 0 &&
2428 wiener_info->hfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002429 aom_write_primitive_refsubexpfin(
2430 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2431 WIENER_FILT_TAP1_SUBEXP_K,
2432 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV,
2433 wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV);
2434 aom_write_primitive_refsubexpfin(
2435 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2436 WIENER_FILT_TAP2_SUBEXP_K,
2437 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV,
2438 wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV);
2439 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002440}
2441
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002442static void write_sgrproj_filter(const SgrprojInfo *sgrproj_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002443 SgrprojInfo *ref_sgrproj_info,
2444 aom_writer *wb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002445 aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002446 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1,
2447 SGRPROJ_PRJ_SUBEXP_K,
2448 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
2449 sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0);
2450 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1,
2451 SGRPROJ_PRJ_SUBEXP_K,
2452 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
2453 sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1);
2454 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002455}
2456
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002457static void loop_restoration_write_sb_coeffs(const AV1_COMMON *const cm,
2458 MACROBLOCKD *xd,
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002459 const RestorationUnitInfo *rui,
2460 aom_writer *const w, int plane) {
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002461 const RestorationInfo *rsi = cm->rst_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002462 RestorationType frame_rtype = rsi->frame_restoration_type;
2463 if (frame_rtype == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002464
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002465 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
2466 WienerInfo *wiener_info = xd->wiener_info + plane;
2467 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002468 RestorationType unit_rtype = rui->restoration_type;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002469
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002470 if (frame_rtype == RESTORE_SWITCHABLE) {
2471 aom_write_symbol(w, unit_rtype, xd->tile_ctx->switchable_restore_cdf,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002472 RESTORE_SWITCHABLE_TYPES);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002473 switch (unit_rtype) {
2474 case RESTORE_WIENER:
2475 write_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, w);
2476 break;
2477 case RESTORE_SGRPROJ:
2478 write_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, w);
2479 break;
2480 default: assert(unit_rtype == RESTORE_NONE); break;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002481 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002482 } else if (frame_rtype == RESTORE_WIENER) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002483 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002484 xd->tile_ctx->wiener_restore_cdf, 2);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002485 if (unit_rtype != RESTORE_NONE) {
2486 write_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, w);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002487 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002488 } else if (frame_rtype == RESTORE_SGRPROJ) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002489 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002490 xd->tile_ctx->sgrproj_restore_cdf, 2);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002491 if (unit_rtype != RESTORE_NONE) {
2492 write_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, w);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002493 }
2494 }
2495}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002496#endif // CONFIG_LOOP_RESTORATION
2497
Yaowu Xuf883b422016-08-30 14:01:10 -07002498static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Cheng Chenf18ba022017-12-06 10:16:27 -08002499#if CONFIG_INTRABC && !CONFIG_LPF_SB
Hui Su27df8342017-11-07 15:16:05 -08002500 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
Cheng Chenf18ba022017-12-06 10:16:27 -08002501#endif // CONFIG_INTRABC && !CONFIG_LPF_SB
Yaowu Xuc27fc142016-08-22 16:08:15 -07002502 int i;
2503 struct loopfilter *lf = &cm->lf;
2504
Cheng Chen179479f2017-08-04 10:56:39 -07002505// Encode the loop filter level and type
Cheng Chen13fc8192017-08-19 11:49:28 -07002506#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen76224b02017-12-15 12:21:01 -08002507 aom_wb_write_literal(wb, lf->filter_level[0], 6);
2508 aom_wb_write_literal(wb, lf->filter_level[1], 6);
2509 if (av1_num_planes(cm) > 1) {
2510 if (lf->filter_level[0] || lf->filter_level[1]) {
2511 aom_wb_write_literal(wb, lf->filter_level_u, 6);
2512 aom_wb_write_literal(wb, lf->filter_level_v, 6);
Cheng Chen765e34e2017-12-11 11:43:35 -08002513 }
Cheng Chene94df5c2017-07-19 17:25:33 -07002514 }
Cheng Chena7345512017-12-05 15:36:05 -08002515#else
Cheng Chen179479f2017-08-04 10:56:39 -07002516 aom_wb_write_literal(wb, lf->filter_level, 6);
Cheng Chena7345512017-12-05 15:36:05 -08002517#endif // CONFIG_LOOPFILTER_LEVEL
Yaowu Xuf883b422016-08-30 14:01:10 -07002518 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002519
2520 // Write out loop filter deltas applied at the MB level based on mode or
2521 // ref frame (if they are enabled).
Yaowu Xuf883b422016-08-30 14:01:10 -07002522 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002523
2524 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002525 aom_wb_write_bit(wb, lf->mode_ref_delta_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002526 if (lf->mode_ref_delta_update) {
2527 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
2528 const int delta = lf->ref_deltas[i];
2529 const int changed = delta != lf->last_ref_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002530 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002531 if (changed) {
2532 lf->last_ref_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002533 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002534 }
2535 }
2536
2537 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
2538 const int delta = lf->mode_deltas[i];
2539 const int changed = delta != lf->last_mode_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002540 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002541 if (changed) {
2542 lf->last_mode_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002543 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002544 }
2545 }
2546 }
2547 }
2548}
2549
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002550static void encode_cdef(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Hui Su27df8342017-11-07 15:16:05 -08002551#if CONFIG_INTRABC
2552 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
2553#endif // CONFIG_INTRABC
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002554 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02002555#if CONFIG_CDEF_SINGLEPASS
2556 aom_wb_write_literal(wb, cm->cdef_pri_damping - 3, 2);
2557 assert(cm->cdef_pri_damping == cm->cdef_sec_damping);
2558#else
Steinar Midtskogen94de0aa2017-08-02 10:30:12 +02002559 aom_wb_write_literal(wb, cm->cdef_pri_damping - 5, 1);
2560 aom_wb_write_literal(wb, cm->cdef_sec_damping - 3, 2);
Steinar Midtskogen59782122017-07-20 08:49:43 +02002561#endif
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002562 aom_wb_write_literal(wb, cm->cdef_bits, 2);
2563 for (i = 0; i < cm->nb_cdef_strengths; i++) {
2564 aom_wb_write_literal(wb, cm->cdef_strengths[i], CDEF_STRENGTH_BITS);
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002565 if (cm->subsampling_x == cm->subsampling_y && av1_num_planes(cm) > 1)
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02002566 aom_wb_write_literal(wb, cm->cdef_uv_strengths[i], CDEF_STRENGTH_BITS);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002567 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002568}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002569
Yaowu Xuf883b422016-08-30 14:01:10 -07002570static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002571 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002572 aom_wb_write_bit(wb, 1);
2573 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002574 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002575 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002576 }
2577}
2578
Yaowu Xuf883b422016-08-30 14:01:10 -07002579static void encode_quantization(const AV1_COMMON *const cm,
2580 struct aom_write_bit_buffer *wb) {
2581 aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002582 write_delta_q(wb, cm->y_dc_delta_q);
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002583 if (av1_num_planes(cm) > 1) {
2584 int diff_uv_delta = (cm->u_dc_delta_q != cm->v_dc_delta_q) ||
2585 (cm->u_ac_delta_q != cm->v_ac_delta_q);
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002586#if CONFIG_EXT_QM
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002587 if (cm->separate_uv_delta_q) aom_wb_write_bit(wb, diff_uv_delta);
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002588#else
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002589 assert(!diff_uv_delta);
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002590#endif
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002591 write_delta_q(wb, cm->u_dc_delta_q);
2592 write_delta_q(wb, cm->u_ac_delta_q);
2593 if (diff_uv_delta) {
2594 write_delta_q(wb, cm->v_dc_delta_q);
2595 write_delta_q(wb, cm->v_ac_delta_q);
2596 }
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002597 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002598#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07002599 aom_wb_write_bit(wb, cm->using_qmatrix);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002600 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002601 aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS);
2602 aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002603 }
2604#endif
2605}
2606
Yaowu Xuf883b422016-08-30 14:01:10 -07002607static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
2608 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002609 int i, j;
2610 const struct segmentation *seg = &cm->seg;
2611
Yaowu Xuf883b422016-08-30 14:01:10 -07002612 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002613 if (!seg->enabled) return;
2614
2615 // Segmentation map
2616 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002617 aom_wb_write_bit(wb, seg->update_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002618 } else {
2619 assert(seg->update_map == 1);
2620 }
2621 if (seg->update_map) {
2622 // Select the coding strategy (temporal or spatial)
Yaowu Xuf883b422016-08-30 14:01:10 -07002623 av1_choose_segmap_coding_method(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002624
2625 // Write out the chosen coding method.
2626 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002627 aom_wb_write_bit(wb, seg->temporal_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002628 } else {
2629 assert(seg->temporal_update == 0);
2630 }
2631 }
2632
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00002633#if CONFIG_SPATIAL_SEGMENTATION
2634 cm->preskip_segid = 0;
2635#endif
2636
Yaowu Xuc27fc142016-08-22 16:08:15 -07002637 // Segmentation data
Yaowu Xuf883b422016-08-30 14:01:10 -07002638 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002639 if (seg->update_data) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002640 for (i = 0; i < MAX_SEGMENTS; i++) {
2641 for (j = 0; j < SEG_LVL_MAX; j++) {
2642 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002643 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002644 if (active) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00002645#if CONFIG_SPATIAL_SEGMENTATION
2646 cm->preskip_segid |= j >= SEG_LVL_REF_FRAME;
2647 cm->last_active_segid = i;
2648#endif
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002649 const int data_max = av1_seg_feature_data_max(j);
2650 const int data_min = -data_max;
2651 const int ubits = get_unsigned_bits(data_max);
2652 const int data = clamp(get_segdata(seg, i, j), data_min, data_max);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002653
Yaowu Xuf883b422016-08-30 14:01:10 -07002654 if (av1_is_segfeature_signed(j)) {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002655 aom_wb_write_inv_signed_literal(wb, data, ubits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002656 } else {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002657 aom_wb_write_literal(wb, data, ubits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002658 }
2659 }
2660 }
2661 }
2662 }
2663}
2664
Thomas Daedef636d5c2017-06-29 13:48:27 -07002665static void write_tx_mode(AV1_COMMON *cm, TX_MODE *mode,
Yue Cheneeacc4c2017-01-17 17:29:17 -08002666 struct aom_write_bit_buffer *wb) {
Thomas Daedef636d5c2017-06-29 13:48:27 -07002667 if (cm->all_lossless) {
Yue Cheneeacc4c2017-01-17 17:29:17 -08002668 *mode = ONLY_4X4;
2669 return;
2670 }
Debargha Mukherjee923b73d2017-10-31 18:11:34 -07002671#if CONFIG_SIMPLIFY_TX_MODE
2672 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
2673#else
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08002674#if CONFIG_TX64X64
Yue Cheneeacc4c2017-01-17 17:29:17 -08002675 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
2676 if (*mode != TX_MODE_SELECT) {
2677 aom_wb_write_literal(wb, AOMMIN(*mode, ALLOW_32X32), 2);
2678 if (*mode >= ALLOW_32X32) aom_wb_write_bit(wb, *mode == ALLOW_64X64);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08002679 }
2680#else
Yue Cheneeacc4c2017-01-17 17:29:17 -08002681 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
2682 if (*mode != TX_MODE_SELECT) aom_wb_write_literal(wb, *mode, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08002683#endif // CONFIG_TX64X64
Debargha Mukherjee923b73d2017-10-31 18:11:34 -07002684#endif // CONFIG_SIMPLIFY_TX_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002685}
2686
Angie Chiang5678ad92016-11-21 09:38:40 -08002687static void write_frame_interp_filter(InterpFilter filter,
2688 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002689 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002690 if (filter != SWITCHABLE)
Angie Chiang6305abe2016-10-24 12:24:44 -07002691 aom_wb_write_literal(wb, filter, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002692}
2693
Yaowu Xuf883b422016-08-30 14:01:10 -07002694static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002695 if (cm->interp_filter == SWITCHABLE) {
2696 // Check to see if only one of the filters is actually used
2697 int count[SWITCHABLE_FILTERS];
2698 int i, j, c = 0;
2699 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2700 count[i] = 0;
2701 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
2702 count[i] += counts->switchable_interp[j][i];
2703 c += (count[i] > 0);
2704 }
2705 if (c == 1) {
2706 // Only one filter is used. So set the filter at frame level
2707 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2708 if (count[i]) {
Sarah Parker4c10a3c2017-04-10 19:37:59 -07002709 if (i == EIGHTTAP_REGULAR || WARP_WM_NEIGHBORS_WITH_OBMC)
Debargha Mukherjee604d8462017-04-06 15:27:00 -07002710 cm->interp_filter = i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002711 break;
2712 }
2713 }
2714 }
2715 }
2716}
2717
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002718#if CONFIG_MAX_TILE
2719
2720// Same function as write_uniform but writing to uncompresses header wb
2721static void wb_write_uniform(struct aom_write_bit_buffer *wb, int n, int v) {
2722 const int l = get_unsigned_bits(n);
2723 const int m = (1 << l) - n;
2724 if (l == 0) return;
2725 if (v < m) {
2726 aom_wb_write_literal(wb, v, l - 1);
2727 } else {
2728 aom_wb_write_literal(wb, m + ((v - m) >> 1), l - 1);
2729 aom_wb_write_literal(wb, (v - m) & 1, 1);
2730 }
2731}
2732
2733static void write_tile_info_max_tile(const AV1_COMMON *const cm,
2734 struct aom_write_bit_buffer *wb) {
Dominic Symes917d6c02017-10-11 18:00:52 +02002735 int width_mi = ALIGN_POWER_OF_TWO(cm->mi_cols, cm->mib_size_log2);
2736 int height_mi = ALIGN_POWER_OF_TWO(cm->mi_rows, cm->mib_size_log2);
2737 int width_sb = width_mi >> cm->mib_size_log2;
2738 int height_sb = height_mi >> cm->mib_size_log2;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002739 int size_sb, i;
2740
2741 aom_wb_write_bit(wb, cm->uniform_tile_spacing_flag);
2742
2743 if (cm->uniform_tile_spacing_flag) {
2744 // Uniform spaced tiles with power-of-two number of rows and columns
2745 // tile columns
2746 int ones = cm->log2_tile_cols - cm->min_log2_tile_cols;
2747 while (ones--) {
2748 aom_wb_write_bit(wb, 1);
2749 }
2750 if (cm->log2_tile_cols < cm->max_log2_tile_cols) {
2751 aom_wb_write_bit(wb, 0);
2752 }
2753
2754 // rows
2755 ones = cm->log2_tile_rows - cm->min_log2_tile_rows;
2756 while (ones--) {
2757 aom_wb_write_bit(wb, 1);
2758 }
2759 if (cm->log2_tile_rows < cm->max_log2_tile_rows) {
2760 aom_wb_write_bit(wb, 0);
2761 }
2762 } else {
2763 // Explicit tiles with configurable tile widths and heights
2764 // columns
2765 for (i = 0; i < cm->tile_cols; i++) {
2766 size_sb = cm->tile_col_start_sb[i + 1] - cm->tile_col_start_sb[i];
2767 wb_write_uniform(wb, AOMMIN(width_sb, MAX_TILE_WIDTH_SB), size_sb - 1);
2768 width_sb -= size_sb;
2769 }
2770 assert(width_sb == 0);
2771
2772 // rows
2773 for (i = 0; i < cm->tile_rows; i++) {
2774 size_sb = cm->tile_row_start_sb[i + 1] - cm->tile_row_start_sb[i];
2775 wb_write_uniform(wb, AOMMIN(height_sb, cm->max_tile_height_sb),
2776 size_sb - 1);
2777 height_sb -= size_sb;
2778 }
2779 assert(height_sb == 0);
2780 }
2781}
2782#endif
2783
Yaowu Xuf883b422016-08-30 14:01:10 -07002784static void write_tile_info(const AV1_COMMON *const cm,
2785 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002786#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002787 if (cm->large_scale_tile) {
2788 const int tile_width =
2789 ALIGN_POWER_OF_TWO(cm->tile_width, cm->mib_size_log2) >>
2790 cm->mib_size_log2;
2791 const int tile_height =
2792 ALIGN_POWER_OF_TWO(cm->tile_height, cm->mib_size_log2) >>
2793 cm->mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002794
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002795 assert(tile_width > 0);
2796 assert(tile_height > 0);
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08002797
Yaowu Xuc27fc142016-08-22 16:08:15 -07002798// Write the tile sizes
2799#if CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002800 if (cm->sb_size == BLOCK_128X128) {
2801 assert(tile_width <= 32);
2802 assert(tile_height <= 32);
2803 aom_wb_write_literal(wb, tile_width - 1, 5);
2804 aom_wb_write_literal(wb, tile_height - 1, 5);
2805 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002806#endif // CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002807 assert(tile_width <= 64);
2808 assert(tile_height <= 64);
2809 aom_wb_write_literal(wb, tile_width - 1, 6);
2810 aom_wb_write_literal(wb, tile_height - 1, 6);
2811#if CONFIG_EXT_PARTITION
2812 }
2813#endif // CONFIG_EXT_PARTITION
2814 } else {
2815#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002816
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002817#if CONFIG_MAX_TILE
2818 write_tile_info_max_tile(cm, wb);
2819#else
2820 int min_log2_tile_cols, max_log2_tile_cols, ones;
2821 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002822
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002823 // columns
2824 ones = cm->log2_tile_cols - min_log2_tile_cols;
2825 while (ones--) aom_wb_write_bit(wb, 1);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002826
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002827 if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0);
2828
2829 // rows
2830 aom_wb_write_bit(wb, cm->log2_tile_rows != 0);
2831 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1);
2832#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002833#if CONFIG_DEPENDENT_HORZTILES
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002834 if (cm->tile_rows > 1) aom_wb_write_bit(wb, cm->dependent_horz_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002835#endif
2836#if CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002837 }
Fangwen Fu70bcb892017-05-06 17:05:19 -07002838#endif // CONFIG_EXT_TILE
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002839
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002840#if CONFIG_LOOPFILTERING_ACROSS_TILES
Yunqing Wang42015d12017-10-17 15:43:49 -07002841 if (cm->tile_cols * cm->tile_rows > 1)
2842 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_enabled);
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002843#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002844}
2845
Zoe Liu8dd1c982017-09-11 10:14:35 -07002846#if USE_GF16_MULTI_LAYER
2847static int get_refresh_mask_gf16(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002848 int refresh_mask = 0;
2849
Zoe Liu8dd1c982017-09-11 10:14:35 -07002850 if (cpi->refresh_last_frame || cpi->refresh_golden_frame ||
2851 cpi->refresh_bwd_ref_frame || cpi->refresh_alt2_ref_frame ||
2852 cpi->refresh_alt_ref_frame) {
2853 assert(cpi->refresh_fb_idx >= 0 && cpi->refresh_fb_idx < REF_FRAMES);
2854 refresh_mask |= (1 << cpi->refresh_fb_idx);
2855 }
2856
2857 return refresh_mask;
2858}
2859#endif // USE_GF16_MULTI_LAYER
Zoe Liu8dd1c982017-09-11 10:14:35 -07002860
2861static int get_refresh_mask(AV1_COMP *cpi) {
Yi Luo2e6a9ab2017-09-15 08:13:59 -07002862 int refresh_mask = 0;
Zoe Liu8dd1c982017-09-11 10:14:35 -07002863#if USE_GF16_MULTI_LAYER
2864 if (cpi->rc.baseline_gf_interval == 16) return get_refresh_mask_gf16(cpi);
2865#endif // USE_GF16_MULTI_LAYER
2866
Yaowu Xuc27fc142016-08-22 16:08:15 -07002867 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
2868 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
2869 // the 3 LAST reference frames will be updated accordingly, i.e.:
2870 // (1) The original virtual index for LAST3_FRAME will become the new virtual
2871 // index for LAST_FRAME; and
2872 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
2873 // shifted and become the new virtual indexes for LAST2_FRAME and
2874 // LAST3_FRAME.
2875 refresh_mask |=
2876 (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]);
Zoe Liue9b15e22017-07-19 15:53:01 -07002877
Zoe Liue9b15e22017-07-19 15:53:01 -07002878 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
2879 refresh_mask |= (cpi->refresh_alt2_ref_frame << cpi->alt2_fb_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002880
Yaowu Xuf883b422016-08-30 14:01:10 -07002881 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002882 // We have decided to preserve the previously existing golden frame as our
2883 // new ARF frame. However, in the short term we leave it in the GF slot and,
2884 // if we're updating the GF with the current decoded frame, we save it
2885 // instead to the ARF slot.
Yaowu Xuf883b422016-08-30 14:01:10 -07002886 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
Yaowu Xuc27fc142016-08-22 16:08:15 -07002887 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
2888 // there so that it can be done outside of the recode loop.
2889 // Note: This is highly specific to the use of ARF as a forward reference,
2890 // and this needs to be generalized as other uses are implemented
2891 // (like RTC/temporal scalability).
2892 return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx);
2893 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -07002894 const int arf_idx = cpi->alt_fb_idx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002895 return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) |
2896 (cpi->refresh_alt_ref_frame << arf_idx);
2897 }
2898}
2899
2900#if CONFIG_EXT_TILE
2901static INLINE int find_identical_tile(
2902 const int tile_row, const int tile_col,
2903 TileBufferEnc (*const tile_buffers)[1024]) {
2904 const MV32 candidate_offset[1] = { { 1, 0 } };
2905 const uint8_t *const cur_tile_data =
2906 tile_buffers[tile_row][tile_col].data + 4;
Jingning Han99ffce62017-04-25 15:48:41 -07002907 const size_t cur_tile_size = tile_buffers[tile_row][tile_col].size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002908
2909 int i;
2910
2911 if (tile_row == 0) return 0;
2912
2913 // (TODO: yunqingwang) For now, only above tile is checked and used.
2914 // More candidates such as left tile can be added later.
2915 for (i = 0; i < 1; i++) {
2916 int row_offset = candidate_offset[0].row;
2917 int col_offset = candidate_offset[0].col;
2918 int row = tile_row - row_offset;
2919 int col = tile_col - col_offset;
2920 uint8_t tile_hdr;
2921 const uint8_t *tile_data;
2922 TileBufferEnc *candidate;
2923
2924 if (row < 0 || col < 0) continue;
2925
2926 tile_hdr = *(tile_buffers[row][col].data);
2927
2928 // Read out tcm bit
2929 if ((tile_hdr >> 7) == 1) {
2930 // The candidate is a copy tile itself
2931 row_offset += tile_hdr & 0x7f;
2932 row = tile_row - row_offset;
2933 }
2934
2935 candidate = &tile_buffers[row][col];
2936
2937 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
2938
2939 tile_data = candidate->data + 4;
2940
2941 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
2942
2943 // Identical tile found
2944 assert(row_offset > 0);
2945 return row_offset;
2946 }
2947
2948 // No identical tile found
2949 return 0;
2950}
2951#endif // CONFIG_EXT_TILE
2952
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002953#if !CONFIG_OBU || CONFIG_EXT_TILE
Yaowu Xuf883b422016-08-30 14:01:10 -07002954static uint32_t write_tiles(AV1_COMP *const cpi, uint8_t *const dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002955 unsigned int *max_tile_size,
2956 unsigned int *max_tile_col_size) {
Thomas Davies4822e142017-10-10 11:30:36 +01002957 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuf883b422016-08-30 14:01:10 -07002958 aom_writer mode_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002959 int tile_row, tile_col;
2960 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
clang-format67948d32016-09-07 22:40:40 -07002961 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
James Zern71a37de2017-04-20 16:03:13 -07002962 uint32_t total_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002963 const int tile_cols = cm->tile_cols;
2964 const int tile_rows = cm->tile_rows;
Thomas Daviesaf6df172016-11-09 14:04:18 +00002965 unsigned int tile_size = 0;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002966 const int have_tiles = tile_cols * tile_rows > 1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002967 struct aom_write_bit_buffer wb = { dst, 0 };
Thomas Davies80188d12016-10-26 16:08:35 -07002968 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07002969 uint32_t compressed_hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07002970 // Fixed size tile groups for the moment
2971 const int num_tg_hdrs = cm->num_tg;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002972 const int tg_size =
2973#if CONFIG_EXT_TILE
2974 (cm->large_scale_tile)
2975 ? 1
2976 :
2977#endif // CONFIG_EXT_TILE
2978 (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
Thomas Davies80188d12016-10-26 16:08:35 -07002979 int tile_count = 0;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00002980 int tg_count = 1;
2981 int tile_size_bytes = 4;
2982 int tile_col_size_bytes;
James Zern71a37de2017-04-20 16:03:13 -07002983 uint32_t uncompressed_hdr_size = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07002984 struct aom_write_bit_buffer tg_params_wb;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00002985 struct aom_write_bit_buffer tile_size_bytes_wb;
James Zern71a37de2017-04-20 16:03:13 -07002986 uint32_t saved_offset;
Thomas Daviesaf6df172016-11-09 14:04:18 +00002987 int mtu_size = cpi->oxcf.mtu;
2988 int curr_tg_data_size = 0;
2989 int hdr_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002990
2991 *max_tile_size = 0;
2992 *max_tile_col_size = 0;
2993
2994// All tile size fields are output on 4 bytes. A call to remux_tiles will
2995// later compact the data if smaller headers are adequate.
2996
Thomas Davies4822e142017-10-10 11:30:36 +01002997#if CONFIG_SIMPLE_BWD_ADAPT
2998 cm->largest_tile_id = 0;
2999#endif
3000
Yaowu Xuc27fc142016-08-22 16:08:15 -07003001#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003002 if (cm->large_scale_tile) {
3003 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3004 TileInfo tile_info;
3005 const int is_last_col = (tile_col == tile_cols - 1);
3006 const uint32_t col_offset = total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003007
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003008 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003009
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003010 // The last column does not have a column header
3011 if (!is_last_col) total_size += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003012
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003013 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3014 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
3015 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3016 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3017 const int data_offset = have_tiles ? 4 : 0;
3018 const int tile_idx = tile_row * tile_cols + tile_col;
3019 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
3020 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003021
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003022 buf->data = dst + total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003023
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003024 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
3025 // even for the last one, unless no tiling is used at all.
3026 total_size += data_offset;
3027 // Initialise tile context from the frame context
3028 this_tile->tctx = *cm->fc;
3029 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07003030 mode_bc.allow_update_cdf = !cm->large_scale_tile;
Rupert Swarbrick7546b302017-10-26 10:45:26 +01003031#if CONFIG_LOOP_RESTORATION
Rupert Swarbrick76405202017-11-07 16:35:55 +00003032 av1_reset_loop_restoration(&cpi->td.mb.e_mbd);
Rupert Swarbrick7546b302017-10-26 10:45:26 +01003033#endif // CONFIG_LOOP_RESTORATION
3034
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003035 aom_start_encode(&mode_bc, buf->data + data_offset);
3036 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
3037 assert(tok == tok_end);
3038 aom_stop_encode(&mode_bc);
3039 tile_size = mode_bc.pos;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003040 buf->size = tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003041
Thomas Davies4822e142017-10-10 11:30:36 +01003042#if CONFIG_SIMPLE_BWD_ADAPT
3043 if (tile_size > *max_tile_size) {
3044 cm->largest_tile_id = tile_cols * tile_row + tile_col;
3045 }
3046#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003047 // Record the maximum tile size we see, so we can compact headers later.
3048 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003049
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003050 if (have_tiles) {
3051 // tile header: size of this tile, or copy offset
3052 uint32_t tile_header = tile_size;
3053 const int tile_copy_mode =
3054 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256)
3055 ? 1
3056 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003057
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003058 // If tile_copy_mode = 1, check if this tile is a copy tile.
3059 // Very low chances to have copy tiles on the key frames, so don't
3060 // search on key frames to reduce unnecessary search.
3061 if (cm->frame_type != KEY_FRAME && tile_copy_mode) {
3062 const int idendical_tile_offset =
3063 find_identical_tile(tile_row, tile_col, tile_buffers);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003064
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003065 if (idendical_tile_offset > 0) {
3066 tile_size = 0;
3067 tile_header = idendical_tile_offset | 0x80;
3068 tile_header <<= 24;
3069 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003070 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003071
3072 mem_put_le32(buf->data, tile_header);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003073 }
3074
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003075 total_size += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003076 }
3077
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003078 if (!is_last_col) {
3079 uint32_t col_size = total_size - col_offset - 4;
3080 mem_put_le32(dst + col_offset, col_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003081
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003082 // If it is not final packing, record the maximum tile column size we
3083 // see, otherwise, check if the tile size is out of the range.
3084 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
3085 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003086 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003087 } else {
3088#endif // CONFIG_EXT_TILE
Soo-Chul Han38427e82017-09-27 15:06:13 -04003089
3090#if !CONFIG_OBU
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003091 write_uncompressed_header_frame(cpi, &wb);
Soo-Chul Han38427e82017-09-27 15:06:13 -04003092#else
3093 write_uncompressed_header_obu(cpi, &wb);
3094#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003095
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003096 if (cm->show_existing_frame) {
3097 total_size = aom_wb_bytes_written(&wb);
3098 return (uint32_t)total_size;
3099 }
Jingning Hand3f441c2017-03-06 09:12:54 -08003100
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003101 // Write the tile length code
3102 tile_size_bytes_wb = wb;
3103 aom_wb_write_literal(&wb, 3, 2);
Thomas Davies80188d12016-10-26 16:08:35 -07003104
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003105 /* Write a placeholder for the number of tiles in each tile group */
3106 tg_params_wb = wb;
3107 saved_offset = wb.bit_offset;
3108 if (have_tiles) {
Imdad Sardharwalla857c99b2017-11-21 15:53:31 +00003109 aom_wb_write_literal(&wb, 3, n_log2_tiles);
3110 aom_wb_write_literal(&wb, (1 << n_log2_tiles) - 1, n_log2_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003111 }
Thomas Davies80188d12016-10-26 16:08:35 -07003112
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003113 if (!use_compressed_header(cm)) {
3114 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
3115 compressed_hdr_size = 0;
3116 } else {
3117 /* Write a placeholder for the compressed header length */
3118 struct aom_write_bit_buffer comp_hdr_len_wb = wb;
3119 aom_wb_write_literal(&wb, 0, 16);
Thomas Davies80188d12016-10-26 16:08:35 -07003120
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003121 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
3122 compressed_hdr_size =
3123 write_compressed_header(cpi, dst + uncompressed_hdr_size);
3124 aom_wb_overwrite_literal(&comp_hdr_len_wb, (int)(compressed_hdr_size),
3125 16);
3126 }
3127
3128 hdr_size = uncompressed_hdr_size + compressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003129 total_size += hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07003130
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003131 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3132 TileInfo tile_info;
3133 const int is_last_row = (tile_row == tile_rows - 1);
3134 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003135
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003136 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3137 const int tile_idx = tile_row * tile_cols + tile_col;
3138 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
3139 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
3140 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3141 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3142 const int is_last_col = (tile_col == tile_cols - 1);
3143 const int is_last_tile = is_last_col && is_last_row;
Thomas Daviesaf6df172016-11-09 14:04:18 +00003144
Thomas Daviesb25ba502017-07-18 10:18:24 +01003145 if ((!mtu_size && tile_count > tg_size) ||
3146 (mtu_size && tile_count && curr_tg_data_size >= mtu_size)) {
3147 // New tile group
3148 tg_count++;
3149 // We've exceeded the packet size
3150 if (tile_count > 1) {
3151 /* The last tile exceeded the packet size. The tile group size
3152 should therefore be tile_count-1.
3153 Move the last tile and insert headers before it
3154 */
3155 uint32_t old_total_size = total_size - tile_size - 4;
3156 memmove(dst + old_total_size + hdr_size, dst + old_total_size,
3157 (tile_size + 4) * sizeof(uint8_t));
3158 // Copy uncompressed header
3159 memmove(dst + old_total_size, dst,
3160 uncompressed_hdr_size * sizeof(uint8_t));
3161 // Write the number of tiles in the group into the last uncompressed
3162 // header before the one we've just inserted
3163 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
3164 n_log2_tiles);
3165 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 2,
3166 n_log2_tiles);
3167 // Update the pointer to the last TG params
3168 tg_params_wb.bit_offset = saved_offset + 8 * old_total_size;
3169 // Copy compressed header
3170 memmove(dst + old_total_size + uncompressed_hdr_size,
3171 dst + uncompressed_hdr_size,
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003172 compressed_hdr_size * sizeof(uint8_t));
Thomas Daviesb25ba502017-07-18 10:18:24 +01003173 total_size += hdr_size;
3174 tile_count = 1;
3175 curr_tg_data_size = hdr_size + tile_size + 4;
3176 } else {
3177 // We exceeded the packet size in just one tile
3178 // Copy uncompressed header
3179 memmove(dst + total_size, dst,
3180 uncompressed_hdr_size * sizeof(uint8_t));
3181 // Write the number of tiles in the group into the last uncompressed
3182 // header
3183 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
3184 n_log2_tiles);
3185 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1,
3186 n_log2_tiles);
3187 tg_params_wb.bit_offset = saved_offset + 8 * total_size;
3188 // Copy compressed header
3189 memmove(dst + total_size + uncompressed_hdr_size,
3190 dst + uncompressed_hdr_size,
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003191 compressed_hdr_size * sizeof(uint8_t));
Thomas Daviesb25ba502017-07-18 10:18:24 +01003192 total_size += hdr_size;
3193 tile_count = 0;
3194 curr_tg_data_size = hdr_size;
3195 }
Thomas Daviesaf6df172016-11-09 14:04:18 +00003196 }
Thomas Daviesb25ba502017-07-18 10:18:24 +01003197 tile_count++;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003198 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003199
Thomas Daviesb25ba502017-07-18 10:18:24 +01003200#if CONFIG_DEPENDENT_HORZTILES
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003201 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
Fangwen Fu73126c02017-02-08 22:37:47 -08003202#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003203 buf->data = dst + total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003204
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003205 // The last tile does not have a header.
3206 if (!is_last_tile) total_size += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003207
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003208 // Initialise tile context from the frame context
3209 this_tile->tctx = *cm->fc;
3210 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07003211 mode_bc.allow_update_cdf = 1;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01003212#if CONFIG_LOOP_RESTORATION
Rupert Swarbrick76405202017-11-07 16:35:55 +00003213 av1_reset_loop_restoration(&cpi->td.mb.e_mbd);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01003214#endif // CONFIG_LOOP_RESTORATION
3215
Alex Converse30f0e152017-03-28 10:13:27 -07003216 aom_start_encode(&mode_bc, dst + total_size);
3217 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
Jingning Han223b90e2017-04-04 09:48:37 -07003218#if !CONFIG_LV_MAP
Alex Converse30f0e152017-03-28 10:13:27 -07003219 assert(tok == tok_end);
Jingning Han223b90e2017-04-04 09:48:37 -07003220#endif // !CONFIG_LV_MAP
Alex Converse30f0e152017-03-28 10:13:27 -07003221 aom_stop_encode(&mode_bc);
3222 tile_size = mode_bc.pos;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003223 assert(tile_size > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003224
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003225 curr_tg_data_size += tile_size + 4;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003226 buf->size = tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003227
Thomas Davies4822e142017-10-10 11:30:36 +01003228#if CONFIG_SIMPLE_BWD_ADAPT
3229 if (tile_size > *max_tile_size) {
3230 cm->largest_tile_id = tile_cols * tile_row + tile_col;
3231 }
3232#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003233 if (!is_last_tile) {
3234 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
3235 // size of this tile
3236 mem_put_le32(buf->data, tile_size);
3237 }
3238
3239 total_size += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003240 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003241 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003242 // Write the final tile group size
3243 if (n_log2_tiles) {
Dominic Symesf58f1112017-09-25 12:47:40 +02003244 aom_wb_overwrite_literal(
3245 &tg_params_wb, (tile_cols * tile_rows) - tile_count, n_log2_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003246 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1, n_log2_tiles);
3247 }
3248 // Remux if possible. TODO (Thomas Davies): do this for more than one tile
3249 // group
3250 if (have_tiles && tg_count == 1) {
Debargha Mukherjee2eada612017-09-22 15:37:39 -07003251 int data_size =
3252 total_size - (uncompressed_hdr_size + compressed_hdr_size);
3253 data_size =
3254 remux_tiles(cm, dst + uncompressed_hdr_size + compressed_hdr_size,
3255 data_size, *max_tile_size, *max_tile_col_size,
3256 &tile_size_bytes, &tile_col_size_bytes);
3257 total_size = data_size + uncompressed_hdr_size + compressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003258 aom_wb_overwrite_literal(&tile_size_bytes_wb, tile_size_bytes - 1, 2);
3259 }
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00003260
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003261#if CONFIG_EXT_TILE
3262 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003263#endif // CONFIG_EXT_TILE
3264 return (uint32_t)total_size;
3265}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003266#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003267
Yaowu Xuf883b422016-08-30 14:01:10 -07003268static void write_render_size(const AV1_COMMON *cm,
3269 struct aom_write_bit_buffer *wb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003270 const int scaling_active = !av1_resize_unscaled(cm);
Yaowu Xuf883b422016-08-30 14:01:10 -07003271 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003272 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003273 aom_wb_write_literal(wb, cm->render_width - 1, 16);
3274 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003275 }
3276}
3277
Urvang Joshi94ad3702017-12-06 11:38:08 -08003278#if CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07003279static void write_superres_scale(const AV1_COMMON *const cm,
3280 struct aom_write_bit_buffer *wb) {
Fergus Simpsone7508412017-03-14 18:14:09 -07003281 // First bit is whether to to scale or not
Urvang Joshide71d142017-10-05 12:12:15 -07003282 if (cm->superres_scale_denominator == SCALE_NUMERATOR) {
Fergus Simpsone7508412017-03-14 18:14:09 -07003283 aom_wb_write_bit(wb, 0); // no scaling
3284 } else {
3285 aom_wb_write_bit(wb, 1); // scaling, write scale factor
Urvang Joshi83010182017-10-27 12:36:02 -07003286 assert(cm->superres_scale_denominator >= SUPERRES_SCALE_DENOMINATOR_MIN);
3287 assert(cm->superres_scale_denominator <
3288 SUPERRES_SCALE_DENOMINATOR_MIN + (1 << SUPERRES_SCALE_BITS));
Fergus Simpsone7508412017-03-14 18:14:09 -07003289 aom_wb_write_literal(
Urvang Joshide71d142017-10-05 12:12:15 -07003290 wb, cm->superres_scale_denominator - SUPERRES_SCALE_DENOMINATOR_MIN,
Fergus Simpsone7508412017-03-14 18:14:09 -07003291 SUPERRES_SCALE_BITS);
3292 }
3293}
Urvang Joshi94ad3702017-12-06 11:38:08 -08003294#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07003295
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003296#if CONFIG_FRAME_SIZE
3297static void write_frame_size(const AV1_COMMON *cm, int frame_size_override,
David Barker22171312017-11-20 11:26:04 +00003298 struct aom_write_bit_buffer *wb)
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003299#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003300static void write_frame_size(const AV1_COMMON *cm,
David Barker22171312017-11-20 11:26:04 +00003301 struct aom_write_bit_buffer *wb)
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003302#endif
David Barker22171312017-11-20 11:26:04 +00003303{
Urvang Joshi94ad3702017-12-06 11:38:08 -08003304#if CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003305 const int coded_width = cm->superres_upscaled_width - 1;
3306 const int coded_height = cm->superres_upscaled_height - 1;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003307#else
David Barker22171312017-11-20 11:26:04 +00003308 const int coded_width = cm->width - 1;
3309 const int coded_height = cm->height - 1;
Urvang Joshi94ad3702017-12-06 11:38:08 -08003310#endif // CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003311
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003312#if CONFIG_FRAME_SIZE
3313 if (frame_size_override) {
3314 const SequenceHeader *seq_params = &cm->seq_params;
3315 int num_bits_width = seq_params->num_bits_width;
3316 int num_bits_height = seq_params->num_bits_height;
David Barker22171312017-11-20 11:26:04 +00003317 aom_wb_write_literal(wb, coded_width, num_bits_width);
3318 aom_wb_write_literal(wb, coded_height, num_bits_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003319 }
3320#else
David Barker22171312017-11-20 11:26:04 +00003321 aom_wb_write_literal(wb, coded_width, 16);
3322 aom_wb_write_literal(wb, coded_height, 16);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003323#endif
David Barker22171312017-11-20 11:26:04 +00003324
Urvang Joshi94ad3702017-12-06 11:38:08 -08003325#if CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003326 write_superres_scale(cm, wb);
Urvang Joshi94ad3702017-12-06 11:38:08 -08003327#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003328 write_render_size(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003329}
3330
Yaowu Xuf883b422016-08-30 14:01:10 -07003331static void write_frame_size_with_refs(AV1_COMP *cpi,
3332 struct aom_write_bit_buffer *wb) {
3333 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003334 int found = 0;
3335
3336 MV_REFERENCE_FRAME ref_frame;
3337 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3338 YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
3339
3340 if (cfg != NULL) {
Urvang Joshi94ad3702017-12-06 11:38:08 -08003341#if CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003342 found = cm->superres_upscaled_width == cfg->y_crop_width &&
3343 cm->superres_upscaled_height == cfg->y_crop_height;
3344#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003345 found =
3346 cm->width == cfg->y_crop_width && cm->height == cfg->y_crop_height;
Urvang Joshi94ad3702017-12-06 11:38:08 -08003347#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003348 found &= cm->render_width == cfg->render_width &&
3349 cm->render_height == cfg->render_height;
3350 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003351 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003352 if (found) {
Urvang Joshi94ad3702017-12-06 11:38:08 -08003353#if CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003354 write_superres_scale(cm, wb);
Urvang Joshi94ad3702017-12-06 11:38:08 -08003355#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003356 break;
3357 }
3358 }
3359
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003360#if CONFIG_FRAME_SIZE
3361 if (!found) {
3362 int frame_size_override = 1; // Allways equal to 1 in this function
3363 write_frame_size(cm, frame_size_override, wb);
3364 }
3365#else
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003366 if (!found) write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003367#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003368}
3369
Yaowu Xuc27fc142016-08-22 16:08:15 -07003370static void write_profile(BITSTREAM_PROFILE profile,
Yaowu Xuf883b422016-08-30 14:01:10 -07003371 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003372 switch (profile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003373 case PROFILE_0: aom_wb_write_literal(wb, 0, 2); break;
3374 case PROFILE_1: aom_wb_write_literal(wb, 2, 2); break;
3375 case PROFILE_2: aom_wb_write_literal(wb, 1, 2); break;
3376 case PROFILE_3: aom_wb_write_literal(wb, 6, 3); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003377 default: assert(0);
3378 }
3379}
3380
3381static void write_bitdepth_colorspace_sampling(
Yaowu Xuf883b422016-08-30 14:01:10 -07003382 AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003383 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003384 assert(cm->bit_depth > AOM_BITS_8);
3385 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003386 }
anorkin76fb1262017-03-22 15:12:12 -07003387#if CONFIG_COLORSPACE_HEADERS
3388 aom_wb_write_literal(wb, cm->color_space, 5);
3389 aom_wb_write_literal(wb, cm->transfer_function, 5);
3390#else
Imdad Sardharwalla317002f2017-12-05 16:24:56 +00003391 aom_wb_write_literal(wb, cm->color_space, 3 + CONFIG_MONO_VIDEO);
anorkin76fb1262017-03-22 15:12:12 -07003392#endif
Imdad Sardharwalla317002f2017-12-05 16:24:56 +00003393 if (cm->color_space == AOM_CS_SRGB) {
3394 assert(cm->profile == PROFILE_1 || cm->profile == PROFILE_3);
3395 aom_wb_write_bit(wb, 0); // unused
3396#if CONFIG_MONO_VIDEO
3397 } else if (cm->color_space == AOM_CS_MONOCHROME) {
3398 return;
3399#endif // CONFIG_MONO_VIDEO
3400 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003401 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Yaowu Xuf883b422016-08-30 14:01:10 -07003402 aom_wb_write_bit(wb, cm->color_range);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003403 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
3404 assert(cm->subsampling_x != 1 || cm->subsampling_y != 1);
Yaowu Xuf883b422016-08-30 14:01:10 -07003405 aom_wb_write_bit(wb, cm->subsampling_x);
3406 aom_wb_write_bit(wb, cm->subsampling_y);
3407 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003408 } else {
3409 assert(cm->subsampling_x == 1 && cm->subsampling_y == 1);
3410 }
anorkin76fb1262017-03-22 15:12:12 -07003411#if CONFIG_COLORSPACE_HEADERS
3412 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
3413 aom_wb_write_literal(wb, cm->chroma_sample_position, 2);
3414 }
3415#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003416 }
Imdad Sardharwalla317002f2017-12-05 16:24:56 +00003417
Yaowu Xu6fc47e52017-12-04 15:07:48 -08003418#if CONFIG_EXT_QM
3419 aom_wb_write_bit(wb, cm->separate_uv_delta_q);
3420#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003421}
3422
Rupert Swarbrickb394bfe2017-11-07 17:52:13 +00003423#if CONFIG_REFERENCE_BUFFER || CONFIG_OBU
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003424void write_sequence_header(AV1_COMP *cpi, struct aom_write_bit_buffer *wb) {
3425 AV1_COMMON *const cm = &cpi->common;
David Barker5e70a112017-10-03 14:28:17 +01003426 SequenceHeader *seq_params = &cm->seq_params;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003427
3428#if CONFIG_FRAME_SIZE
3429 int num_bits_width = 16;
3430 int num_bits_height = 16;
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003431 int max_frame_width = cpi->oxcf.width;
3432 int max_frame_height = cpi->oxcf.height;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003433
3434 seq_params->num_bits_width = num_bits_width;
3435 seq_params->num_bits_height = num_bits_height;
3436 seq_params->max_frame_width = max_frame_width;
3437 seq_params->max_frame_height = max_frame_height;
3438
3439 aom_wb_write_literal(wb, num_bits_width - 1, 4);
3440 aom_wb_write_literal(wb, num_bits_height - 1, 4);
3441 aom_wb_write_literal(wb, max_frame_width - 1, num_bits_width);
3442 aom_wb_write_literal(wb, max_frame_height - 1, num_bits_height);
3443#endif
3444
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003445 /* Placeholder for actually writing to the bitstream */
Yunqing Wangc2502b52017-07-19 17:44:18 -07003446 seq_params->frame_id_numbers_present_flag =
3447#if CONFIG_EXT_TILE
3448 cm->large_scale_tile ? 0 :
3449#endif // CONFIG_EXT_TILE
3450 FRAME_ID_NUMBERS_PRESENT_FLAG;
Sebastien Alaiwand418f682017-10-19 15:06:52 +02003451 seq_params->frame_id_length = FRAME_ID_LENGTH;
3452 seq_params->delta_frame_id_length = DELTA_FRAME_ID_LENGTH;
David Barker5e70a112017-10-03 14:28:17 +01003453
3454 aom_wb_write_bit(wb, seq_params->frame_id_numbers_present_flag);
3455 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003456 // We must always have delta_frame_id_length < frame_id_length,
3457 // in order for a frame to be referenced with a unique delta.
3458 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003459 aom_wb_write_literal(wb, seq_params->delta_frame_id_length - 2, 4);
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003460 aom_wb_write_literal(
3461 wb, seq_params->frame_id_length - seq_params->delta_frame_id_length - 1,
3462 3);
David Barker5e70a112017-10-03 14:28:17 +01003463 }
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003464}
Rupert Swarbrickb394bfe2017-11-07 17:52:13 +00003465#endif // CONFIG_REFERENCE_BUFFER || CONFIG_OBU
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003466
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07003467static void write_sb_size(const AV1_COMMON *cm,
3468 struct aom_write_bit_buffer *wb) {
3469 (void)cm;
3470 (void)wb;
3471 assert(cm->mib_size == mi_size_wide[cm->sb_size]);
3472 assert(cm->mib_size == 1 << cm->mib_size_log2);
3473#if CONFIG_EXT_PARTITION
3474 assert(cm->sb_size == BLOCK_128X128 || cm->sb_size == BLOCK_64X64);
3475 aom_wb_write_bit(wb, cm->sb_size == BLOCK_128X128 ? 1 : 0);
3476#else
3477 assert(cm->sb_size == BLOCK_64X64);
3478#endif // CONFIG_EXT_PARTITION
3479}
3480
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003481static void write_compound_tools(const AV1_COMMON *cm,
3482 struct aom_write_bit_buffer *wb) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003483 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
3484 aom_wb_write_bit(wb, cm->allow_interintra_compound);
3485 } else {
3486 assert(cm->allow_interintra_compound == 0);
3487 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003488 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
3489 aom_wb_write_bit(wb, cm->allow_masked_compound);
3490 } else {
3491 assert(cm->allow_masked_compound == 0);
3492 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003493}
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003494
David Barkerd7c8bd52017-09-25 14:47:29 +01003495static void write_global_motion_params(const WarpedMotionParams *params,
3496 const WarpedMotionParams *ref_params,
Sarah Parker3e579a62017-08-23 16:53:20 -07003497 struct aom_write_bit_buffer *wb,
3498 int allow_hp) {
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01003499 const TransformationType type = params->wmtype;
Sarah Parker3e579a62017-08-23 16:53:20 -07003500
3501 aom_wb_write_bit(wb, type != IDENTITY);
3502 if (type != IDENTITY) {
3503#if GLOBAL_TRANS_TYPES > 4
3504 aom_wb_write_literal(wb, type - 1, GLOBAL_TYPE_BITS);
3505#else
3506 aom_wb_write_bit(wb, type == ROTZOOM);
3507 if (type != ROTZOOM) aom_wb_write_bit(wb, type == TRANSLATION);
3508#endif // GLOBAL_TRANS_TYPES > 4
3509 }
3510
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01003511 if (type >= ROTZOOM) {
3512 aom_wb_write_signed_primitive_refsubexpfin(
3513 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3514 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
3515 (1 << GM_ALPHA_PREC_BITS),
3516 (params->wmmat[2] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
3517 aom_wb_write_signed_primitive_refsubexpfin(
3518 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3519 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF),
3520 (params->wmmat[3] >> GM_ALPHA_PREC_DIFF));
3521 }
3522
3523 if (type >= AFFINE) {
3524 aom_wb_write_signed_primitive_refsubexpfin(
3525 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3526 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF),
3527 (params->wmmat[4] >> GM_ALPHA_PREC_DIFF));
3528 aom_wb_write_signed_primitive_refsubexpfin(
3529 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3530 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
3531 (1 << GM_ALPHA_PREC_BITS),
3532 (params->wmmat[5] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
3533 }
3534
3535 if (type >= TRANSLATION) {
3536 const int trans_bits = (type == TRANSLATION)
3537 ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
3538 : GM_ABS_TRANS_BITS;
3539 const int trans_prec_diff = (type == TRANSLATION)
3540 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
3541 : GM_TRANS_PREC_DIFF;
3542 aom_wb_write_signed_primitive_refsubexpfin(
3543 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
3544 (ref_params->wmmat[0] >> trans_prec_diff),
3545 (params->wmmat[0] >> trans_prec_diff));
3546 aom_wb_write_signed_primitive_refsubexpfin(
3547 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
3548 (ref_params->wmmat[1] >> trans_prec_diff),
3549 (params->wmmat[1] >> trans_prec_diff));
Sarah Parker3e579a62017-08-23 16:53:20 -07003550 }
3551}
3552
3553static void write_global_motion(AV1_COMP *cpi,
3554 struct aom_write_bit_buffer *wb) {
3555 AV1_COMMON *const cm = &cpi->common;
3556 int frame;
3557 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003558 const WarpedMotionParams *ref_params =
3559 cm->error_resilient_mode ? &default_warp_params
3560 : &cm->prev_frame->global_motion[frame];
3561 write_global_motion_params(&cm->global_motion[frame], ref_params, wb,
Sarah Parker3e579a62017-08-23 16:53:20 -07003562 cm->allow_high_precision_mv);
3563 // TODO(sarahparker, debargha): The logic in the commented out code below
3564 // does not work currently and causes mismatches when resize is on.
3565 // Fix it before turning the optimization back on.
3566 /*
3567 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame_buffer(cpi, frame);
3568 if (cpi->source->y_crop_width == ref_buf->y_crop_width &&
3569 cpi->source->y_crop_height == ref_buf->y_crop_height) {
3570 write_global_motion_params(&cm->global_motion[frame],
3571 &cm->prev_frame->global_motion[frame], wb,
3572 cm->allow_high_precision_mv);
3573 } else {
3574 assert(cm->global_motion[frame].wmtype == IDENTITY &&
3575 "Invalid warp type for frames of different resolutions");
3576 }
3577 */
3578 /*
3579 printf("Frame %d/%d: Enc Ref %d: %d %d %d %d\n",
3580 cm->current_video_frame, cm->show_frame, frame,
3581 cm->global_motion[frame].wmmat[0],
3582 cm->global_motion[frame].wmmat[1], cm->global_motion[frame].wmmat[2],
3583 cm->global_motion[frame].wmmat[3]);
3584 */
3585 }
3586}
Sarah Parker3e579a62017-08-23 16:53:20 -07003587
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003588#if !CONFIG_OBU
3589static void write_uncompressed_header_frame(AV1_COMP *cpi,
3590 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003591 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003592 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3593
Yaowu Xuf883b422016-08-30 14:01:10 -07003594 aom_wb_write_literal(wb, AOM_FRAME_MARKER, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003595
3596 write_profile(cm->profile, wb);
3597
Yunqing Wangc2502b52017-07-19 17:44:18 -07003598#if CONFIG_EXT_TILE
3599 aom_wb_write_literal(wb, cm->large_scale_tile, 1);
3600#endif // CONFIG_EXT_TILE
3601
Yaowu Xuc27fc142016-08-22 16:08:15 -07003602 // NOTE: By default all coded frames to be used as a reference
3603 cm->is_reference_frame = 1;
3604
3605 if (cm->show_existing_frame) {
3606 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3607 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3608
3609 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003610 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003611 "Buffer %d does not contain a reconstructed frame",
3612 frame_to_show);
3613 }
3614 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3615
Yaowu Xuf883b422016-08-30 14:01:10 -07003616 aom_wb_write_bit(wb, 1); // show_existing_frame
3617 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003618
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003619#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003620 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003621 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003622 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
3623 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
3624 /* Add a zero byte to prevent emulation of superframe marker */
3625 /* Same logic as when when terminating the entropy coder */
3626 /* Consider to have this logic only one place */
3627 aom_wb_write_literal(wb, 0, 8);
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003628 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003629#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003630
Yaowu Xuc27fc142016-08-22 16:08:15 -07003631 return;
3632 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003633 aom_wb_write_bit(wb, 0); // show_existing_frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07003634 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003635
Yaowu Xuf883b422016-08-30 14:01:10 -07003636 aom_wb_write_bit(wb, cm->frame_type);
3637 aom_wb_write_bit(wb, cm->show_frame);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003638 if (cm->frame_type != KEY_FRAME)
3639 if (!cm->show_frame) aom_wb_write_bit(wb, cm->intra_only);
Yaowu Xuf883b422016-08-30 14:01:10 -07003640 aom_wb_write_bit(wb, cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003641
Pavel Frolov3b95c502017-10-01 21:35:24 +03003642 if (frame_is_intra_only(cm)) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003643#if CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003644 write_sequence_header(cpi, wb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003645#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003646 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003647#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003648 cm->invalid_delta_frame_id_minus1 = 0;
3649 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003650 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003651 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003652 }
3653#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003654
3655#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003656 if (cm->width > cm->seq_params.max_frame_width ||
3657 cm->height > cm->seq_params.max_frame_height) {
3658 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3659 "Frame dimensions are larger than the maximum values");
3660 }
Urvang Joshi94ad3702017-12-06 11:38:08 -08003661#if CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003662 const int coded_width = cm->superres_upscaled_width;
3663 const int coded_height = cm->superres_upscaled_height;
3664#else
3665 const int coded_width = cm->width;
3666 const int coded_height = cm->height;
Urvang Joshi94ad3702017-12-06 11:38:08 -08003667#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003668 int frame_size_override_flag =
David Barker22171312017-11-20 11:26:04 +00003669 (coded_width != cm->seq_params.max_frame_width ||
3670 coded_height != cm->seq_params.max_frame_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003671 aom_wb_write_bit(wb, frame_size_override_flag);
3672#endif
3673
Yaowu Xuc27fc142016-08-22 16:08:15 -07003674 if (cm->frame_type == KEY_FRAME) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003675 write_bitdepth_colorspace_sampling(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003676#if CONFIG_FRAME_SIZE
3677 write_frame_size(cm, frame_size_override_flag, wb);
3678#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003679 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003680#endif
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07003681 write_sb_size(cm, wb);
3682
hui su24f7b072016-10-12 11:36:24 -07003683 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Hui Su85878782017-11-07 14:56:31 -08003684#if CONFIG_INTRABC
3685 if (cm->allow_screen_content_tools) aom_wb_write_bit(wb, cm->allow_intrabc);
3686#endif // CONFIG_INTRABC
RogerZhou3b635242017-09-19 10:06:46 -07003687#if CONFIG_AMVR
3688 if (cm->allow_screen_content_tools) {
RogerZhou10a03802017-10-26 11:49:48 -07003689 if (cm->seq_force_integer_mv == 2) {
RogerZhou3b635242017-09-19 10:06:46 -07003690 aom_wb_write_bit(wb, 1);
3691 } else {
3692 aom_wb_write_bit(wb, 0);
RogerZhou10a03802017-10-26 11:49:48 -07003693 aom_wb_write_bit(wb, cm->seq_force_integer_mv);
RogerZhou3b635242017-09-19 10:06:46 -07003694 }
3695 }
3696#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003697 } else {
Thomas Daedea6a854b2017-06-22 17:49:11 -07003698#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003699 if (!cm->error_resilient_mode) {
3700 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003701 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003702 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3703 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003704 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003705 cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
3706 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003707 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003708 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3709 }
3710 }
Thomas Daedea6a854b2017-06-22 17:49:11 -07003711#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003712 cpi->refresh_frame_mask = get_refresh_mask(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003713
3714 if (cm->intra_only) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003715 write_bitdepth_colorspace_sampling(cm, wb);
3716
Yaowu Xuf883b422016-08-30 14:01:10 -07003717 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003718#if CONFIG_FRAME_SIZE
3719 write_frame_size(cm, frame_size_override_flag, wb);
3720#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003721 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003722#endif
Pavel Frolovef4af292017-11-01 18:23:02 +03003723 write_sb_size(cm, wb);
Hui Sudf89ee32017-11-21 11:47:58 -08003724 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
3725#if CONFIG_INTRABC
3726 if (cm->allow_screen_content_tools)
3727 aom_wb_write_bit(wb, cm->allow_intrabc);
3728#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07003729 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003730 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003731
Yaowu Xuc27fc142016-08-22 16:08:15 -07003732 if (!cpi->refresh_frame_mask) {
3733 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
3734 // will not be used as a reference
3735 cm->is_reference_frame = 0;
3736 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003737
Zoe Liuf40a9572017-10-13 12:37:19 -07003738 for (MV_REFERENCE_FRAME ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME;
3739 ++ref_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003740 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
Yaowu Xuf883b422016-08-30 14:01:10 -07003741 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003742 REF_FRAMES_LOG2);
Zoe Liu17af2742017-10-06 10:36:42 -07003743#if !CONFIG_FRAME_SIGN_BIAS
Yaowu Xuf883b422016-08-30 14:01:10 -07003744 aom_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
Zoe Liu17af2742017-10-06 10:36:42 -07003745#endif // !CONFIG_FRAME_SIGN_BIAS
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003746#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003747 if (cm->seq_params.frame_id_numbers_present_flag) {
3748 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003749 int frame_id_len = cm->seq_params.frame_id_length;
3750 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003751 int delta_frame_id_minus1 =
3752 ((cm->current_frame_id - cm->ref_frame_id[i] +
3753 (1 << frame_id_len)) %
3754 (1 << frame_id_len)) -
3755 1;
3756 if (delta_frame_id_minus1 < 0 ||
3757 delta_frame_id_minus1 >= (1 << diff_len))
3758 cm->invalid_delta_frame_id_minus1 = 1;
3759 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003760 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003761#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07003762 }
3763
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003764#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003765 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003766 write_frame_size_with_refs(cpi, wb);
3767 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003768 write_frame_size(cm, frame_size_override_flag, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003769 }
3770#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003771 write_frame_size_with_refs(cpi, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003772#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003773
RogerZhou3b635242017-09-19 10:06:46 -07003774#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003775 if (cm->seq_force_integer_mv == 2) {
3776 aom_wb_write_bit(wb, cm->cur_frame_force_integer_mv);
RogerZhou3b635242017-09-19 10:06:46 -07003777 }
RogerZhou10a03802017-10-26 11:49:48 -07003778 if (cm->cur_frame_force_integer_mv) {
3779 cm->allow_high_precision_mv = 0;
3780 } else {
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003781#if !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003782 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003783#endif // !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003784 }
3785#else
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003786#if !CONFIG_EIGHTH_PEL_MV_ONLY
Yaowu Xuf883b422016-08-30 14:01:10 -07003787 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003788#endif // !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003789#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003790 fix_interp_filter(cm, cpi->td.counts);
Angie Chiang5678ad92016-11-21 09:38:40 -08003791 write_frame_interp_filter(cm->interp_filter, wb);
Fangwen Fu8d164de2016-12-14 13:40:54 -08003792#if CONFIG_TEMPMV_SIGNALING
Jingning Hane17ebe92017-11-03 15:25:42 -07003793 if (frame_might_use_prev_frame_mvs(cm))
3794 aom_wb_write_bit(wb, cm->use_ref_frame_mvs);
Fangwen Fu8d164de2016-12-14 13:40:54 -08003795#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003796 }
3797 }
3798
Jingning Hanea255c92017-09-29 08:12:09 -07003799#if CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003800 if (cm->show_frame == 0) {
3801 int arf_offset = AOMMIN(
3802 (MAX_GF_INTERVAL - 1),
3803 cpi->twopass.gf_group.arf_src_offset[cpi->twopass.gf_group.index]);
Jingning Hanc723b342017-08-24 11:19:46 -07003804 int brf_offset =
3805 cpi->twopass.gf_group.brf_src_offset[cpi->twopass.gf_group.index];
3806
3807 arf_offset = AOMMIN((MAX_GF_INTERVAL - 1), arf_offset + brf_offset);
Cheng Chend300f0e2017-12-01 10:46:23 -08003808 aom_wb_write_literal(wb, arf_offset, FRAME_OFFSET_BITS);
Jingning Hanc723b342017-08-24 11:19:46 -07003809 }
Zoe Liuf40a9572017-10-13 12:37:19 -07003810#endif // CONFIG_FRAME_MARKER
Jingning Hanc723b342017-08-24 11:19:46 -07003811
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003812#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003813 if (cm->seq_params.frame_id_numbers_present_flag) {
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003814 cm->refresh_mask =
3815 cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
3816 }
3817#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003818
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01003819#if CONFIG_EXT_TILE
3820 const int might_bwd_adapt =
3821 !(cm->error_resilient_mode || cm->large_scale_tile);
3822#else
3823 const int might_bwd_adapt = !cm->error_resilient_mode;
3824#endif // CONFIG_EXT_TILE
3825 if (might_bwd_adapt) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003826 aom_wb_write_bit(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003827 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
3828 }
Thomas Daededa4d8b92017-06-05 15:44:14 -07003829#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuf883b422016-08-30 14:01:10 -07003830 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
Thomas Daededa4d8b92017-06-05 15:44:14 -07003831#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003832 encode_loopfilter(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003833 encode_quantization(cm, wb);
3834 encode_segmentation(cm, xd, wb);
Arild Fuldseth07441162016-08-15 15:07:52 +02003835 {
Thomas Davies28444be2017-10-13 18:12:25 +01003836 int delta_q_allowed = 1;
3837#if !CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003838 int i;
3839 struct segmentation *const seg = &cm->seg;
3840 int segment_quantizer_active = 0;
3841 for (i = 0; i < MAX_SEGMENTS; i++) {
3842 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3843 segment_quantizer_active = 1;
3844 }
3845 }
Thomas Davies28444be2017-10-13 18:12:25 +01003846 delta_q_allowed = !segment_quantizer_active;
3847#endif
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01003848
Yaowu Xu288f8162017-10-10 15:03:22 -07003849 if (cm->delta_q_present_flag) assert(cm->base_qindex > 0);
Thomas Davies28444be2017-10-13 18:12:25 +01003850 // Segment quantizer and delta_q both allowed if CONFIG_EXT_DELTA_Q
3851 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02003852 aom_wb_write_bit(wb, cm->delta_q_present_flag);
3853 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01003854 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
Arild Fuldseth07441162016-08-15 15:07:52 +02003855 xd->prev_qindex = cm->base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07003856#if CONFIG_EXT_DELTA_Q
Fangwen Fu231fe422017-04-24 17:52:29 -07003857 aom_wb_write_bit(wb, cm->delta_lf_present_flag);
3858 if (cm->delta_lf_present_flag) {
3859 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_lf_res) - 1, 2);
Cheng Chen880166a2017-10-02 17:48:48 -07003860 xd->prev_delta_lf_from_base = 0;
Cheng Chena97394f2017-09-27 15:05:14 -07003861#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -07003862 aom_wb_write_bit(wb, cm->delta_lf_multi);
Cheng Chena97394f2017-09-27 15:05:14 -07003863 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
3864 xd->prev_delta_lf[lf_id] = 0;
3865#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003866 }
3867#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003868 }
3869 }
3870 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003871 if (!cm->all_lossless) {
3872 encode_cdef(cm, wb);
3873 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003874#if CONFIG_LOOP_RESTORATION
3875 encode_restoration_mode(cm, wb);
3876#endif // CONFIG_LOOP_RESTORATION
3877 write_tx_mode(cm, &cm->tx_mode, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003878
3879 if (cpi->allow_comp_inter_inter) {
3880 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
Zoe Liub05e5d12017-02-07 14:32:53 -08003881#if !CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07003882 const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE;
Zoe Liub05e5d12017-02-07 14:32:53 -08003883#endif // !CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07003884
Yaowu Xuf883b422016-08-30 14:01:10 -07003885 aom_wb_write_bit(wb, use_hybrid_pred);
Zoe Liub05e5d12017-02-07 14:32:53 -08003886#if !CONFIG_REF_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -07003887 if (!use_hybrid_pred) aom_wb_write_bit(wb, use_compound_pred);
Zoe Liub05e5d12017-02-07 14:32:53 -08003888#endif // !CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07003889 }
Zoe Liu4b847e12017-12-07 12:44:45 -08003890
3891#if CONFIG_EXT_SKIP
3892 if (cm->is_skip_mode_allowed) aom_wb_write_bit(wb, cm->skip_mode_flag);
3893#endif // CONFIG_EXT_SKIP
3894
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003895 write_compound_tools(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003896
Sarah Parkere68a3e42017-02-16 14:03:24 -08003897 aom_wb_write_bit(wb, cm->reduced_tx_set_used);
Sarah Parkere68a3e42017-02-16 14:03:24 -08003898
Angie Chiang6dbffbf2017-10-06 16:59:54 -07003899#if CONFIG_ADAPT_SCAN
Yunqing Wangea35e652017-11-09 12:33:30 -08003900#if CONFIG_EXT_TILE
3901 if (cm->large_scale_tile)
3902 assert(cm->use_adapt_scan == 0);
3903 else
3904#endif // CONFIG_EXT_TILE
3905 aom_wb_write_bit(wb, cm->use_adapt_scan);
Angie Chiang6dbffbf2017-10-06 16:59:54 -07003906#endif
3907
Sarah Parkerf289f9f2017-09-12 18:50:02 -07003908 if (!frame_is_intra_only(cm)) write_global_motion(cpi, wb);
Sarah Parker3e579a62017-08-23 16:53:20 -07003909
Yaowu Xuc27fc142016-08-22 16:08:15 -07003910 write_tile_info(cm, wb);
3911}
3912
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003913#else
3914// New function based on HLS R18
3915static void write_uncompressed_header_obu(AV1_COMP *cpi,
3916 struct aom_write_bit_buffer *wb) {
3917 AV1_COMMON *const cm = &cpi->common;
3918 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3919
3920#if CONFIG_EXT_TILE
3921 aom_wb_write_literal(wb, cm->large_scale_tile, 1);
3922#endif // CONFIG_EXT_TILE
3923
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003924 // NOTE: By default all coded frames to be used as a reference
3925 cm->is_reference_frame = 1;
3926
3927 if (cm->show_existing_frame) {
3928 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3929 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3930
3931 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
3932 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3933 "Buffer %d does not contain a reconstructed frame",
3934 frame_to_show);
3935 }
3936 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3937
3938 aom_wb_write_bit(wb, 1); // show_existing_frame
3939 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
3940
3941#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003942 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003943 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003944 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
3945 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
3946 /* Add a zero byte to prevent emulation of superframe marker */
3947 /* Same logic as when when terminating the entropy coder */
3948 /* Consider to have this logic only one place */
3949 aom_wb_write_literal(wb, 0, 8);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003950 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003951#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003952
3953 return;
3954 } else {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003955 aom_wb_write_bit(wb, 0); // show_existing_frame
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003956 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003957
3958 cm->frame_type = cm->intra_only ? INTRA_ONLY_FRAME : cm->frame_type;
3959 aom_wb_write_literal(wb, cm->frame_type, 2);
3960
3961 if (cm->intra_only) cm->frame_type = INTRA_ONLY_FRAME;
3962
3963 aom_wb_write_bit(wb, cm->show_frame);
3964 aom_wb_write_bit(wb, cm->error_resilient_mode);
3965
3966#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003967 cm->invalid_delta_frame_id_minus1 = 0;
3968 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003969 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003970 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003971 }
3972#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003973
3974#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003975 if (cm->width > cm->seq_params.max_frame_width ||
3976 cm->height > cm->seq_params.max_frame_height) {
3977 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3978 "Frame dimensions are larger than the maximum values");
3979 }
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003980 int frame_size_override_flag =
3981 (cm->width != cm->seq_params.max_frame_width ||
3982 cm->height != cm->seq_params.max_frame_height);
3983 aom_wb_write_bit(wb, frame_size_override_flag);
3984#endif
3985
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003986 if (cm->frame_type == KEY_FRAME) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003987#if CONFIG_FRAME_SIZE
3988 write_frame_size(cm, frame_size_override_flag, wb);
3989#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003990 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003991#endif
Debargha Mukherjeed2630fa2017-09-22 10:32:51 -07003992 write_sb_size(cm, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003993 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Hui Suad7536b2017-12-13 15:48:11 -08003994#if CONFIG_INTRABC
3995 if (cm->allow_screen_content_tools) aom_wb_write_bit(wb, cm->allow_intrabc);
3996#endif // CONFIG_INTRABC
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003997#if CONFIG_AMVR
3998 if (cm->allow_screen_content_tools) {
RogerZhou10a03802017-10-26 11:49:48 -07003999 if (cm->seq_force_integer_mv == 2) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004000 aom_wb_write_bit(wb, 1);
4001 } else {
4002 aom_wb_write_bit(wb, 0);
RogerZhou10a03802017-10-26 11:49:48 -07004003 aom_wb_write_bit(wb, cm->seq_force_integer_mv == 0);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004004 }
4005 }
4006#endif
4007 } else if (cm->frame_type == INTRA_ONLY_FRAME) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004008#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4009 if (!cm->error_resilient_mode) {
4010 if (cm->intra_only) {
4011 aom_wb_write_bit(wb,
4012 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4013 }
4014 }
4015#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004016 cpi->refresh_frame_mask = get_refresh_mask(cpi);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004017
4018 if (cm->intra_only) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004019 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004020#if CONFIG_FRAME_SIZE
4021 write_frame_size(cm, frame_size_override_flag, wb);
4022#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004023 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004024#endif
Hui Suad7536b2017-12-13 15:48:11 -08004025 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
4026#if CONFIG_INTRABC
4027 if (cm->allow_screen_content_tools)
4028 aom_wb_write_bit(wb, cm->allow_intrabc);
4029#endif // CONFIG_INTRABC
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004030 }
4031 } else if (cm->frame_type == INTER_FRAME) {
4032 MV_REFERENCE_FRAME ref_frame;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004033#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4034 if (!cm->error_resilient_mode) {
4035 aom_wb_write_bit(wb, cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
4036 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
4037 aom_wb_write_bit(wb,
4038 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4039 }
4040#endif
4041
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004042 cpi->refresh_frame_mask = get_refresh_mask(cpi);
4043 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004044
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004045 if (!cpi->refresh_frame_mask) {
4046 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
4047 // will not be used as a reference
4048 cm->is_reference_frame = 0;
4049 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004050
4051 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4052 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
4053 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
4054 REF_FRAMES_LOG2);
Zoe Liu17af2742017-10-06 10:36:42 -07004055#if !CONFIG_FRAME_SIGN_BIAS
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004056 aom_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
Zoe Liu17af2742017-10-06 10:36:42 -07004057#endif // !CONFIG_FRAME_SIGN_BIAS
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004058#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004059 if (cm->seq_params.frame_id_numbers_present_flag) {
4060 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02004061 int frame_id_len = cm->seq_params.frame_id_length;
4062 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01004063 int delta_frame_id_minus1 =
4064 ((cm->current_frame_id - cm->ref_frame_id[i] +
4065 (1 << frame_id_len)) %
4066 (1 << frame_id_len)) -
4067 1;
4068 if (delta_frame_id_minus1 < 0 ||
4069 delta_frame_id_minus1 >= (1 << diff_len))
4070 cm->invalid_delta_frame_id_minus1 = 1;
4071 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004072 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004073#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004074 }
4075
4076#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004077 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004078 write_frame_size_with_refs(cpi, wb);
4079 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004080 write_frame_size(cm, frame_size_override_flag, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004081 }
4082#else
4083 write_frame_size_with_refs(cpi, wb);
4084#endif
4085
4086#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07004087 if (cm->seq_force_integer_mv == 2) {
4088 aom_wb_write_bit(wb, cm->cur_frame_force_integer_mv == 0);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004089 }
4090#endif
4091 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
4092
4093 fix_interp_filter(cm, cpi->td.counts);
4094 write_frame_interp_filter(cm->interp_filter, wb);
4095#if CONFIG_TEMPMV_SIGNALING
4096 if (frame_might_use_prev_frame_mvs(cm)) {
Jingning Han923f8272017-12-14 10:50:12 -08004097 aom_wb_write_bit(wb, cm->use_ref_frame_mvs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004098 }
4099#endif
4100 } else if (cm->frame_type == S_FRAME) {
4101 MV_REFERENCE_FRAME ref_frame;
4102
4103#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4104 if (!cm->error_resilient_mode) {
4105 aom_wb_write_bit(wb, cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
4106 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
4107 aom_wb_write_bit(wb,
4108 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4109 }
4110#endif
4111
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004112 if (!cpi->refresh_frame_mask) {
4113 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
4114 // will not be used as a reference
4115 cm->is_reference_frame = 0;
4116 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004117
4118 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4119 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
4120 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
4121 REF_FRAMES_LOG2);
4122 assert(cm->ref_frame_sign_bias[ref_frame] == 0);
4123#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004124 if (cm->seq_params.frame_id_numbers_present_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004125 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02004126 int frame_id_len = cm->seq_params.frame_id_length;
4127 int diff_len = cm->seq_params.delta_frame_id_length;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004128 int delta_frame_id_minus1 =
4129 ((cm->current_frame_id - cm->ref_frame_id[i] +
4130 (1 << frame_id_len)) %
4131 (1 << frame_id_len)) -
4132 1;
4133 if (delta_frame_id_minus1 < 0 ||
4134 delta_frame_id_minus1 >= (1 << diff_len))
4135 cm->invalid_delta_frame_id_minus1 = 1;
4136 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
4137 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004138#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004139 }
4140
4141#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004142 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004143 write_frame_size_with_refs(cpi, wb);
4144 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004145 write_frame_size(cm, frame_size_override_flag, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004146 }
4147#else
4148 write_frame_size_with_refs(cpi, wb);
4149#endif
4150
4151 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
4152
4153 fix_interp_filter(cm, cpi->td.counts);
4154 write_frame_interp_filter(cm->interp_filter, wb);
4155#if CONFIG_TEMPMV_SIGNALING
4156 if (frame_might_use_prev_frame_mvs(cm)) {
Jingning Han923f8272017-12-14 10:50:12 -08004157 aom_wb_write_bit(wb, cm->use_ref_frame_mvs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004158 }
4159#endif
4160 }
4161
Soo-Chul Hanebdbcb42017-11-02 18:26:21 -04004162#if CONFIG_FRAME_MARKER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004163 if (cm->show_frame == 0) {
4164 int arf_offset = AOMMIN(
4165 (MAX_GF_INTERVAL - 1),
4166 cpi->twopass.gf_group.arf_src_offset[cpi->twopass.gf_group.index]);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004167 int brf_offset =
4168 cpi->twopass.gf_group.brf_src_offset[cpi->twopass.gf_group.index];
4169
4170 arf_offset = AOMMIN((MAX_GF_INTERVAL - 1), arf_offset + brf_offset);
Cheng Chend300f0e2017-12-01 10:46:23 -08004171 aom_wb_write_literal(wb, arf_offset, FRAME_OFFSET_BITS);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004172 }
Zoe Liu104d62e2017-12-07 12:44:45 -08004173#endif // CONFIG_FRAME_MARKER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004174
4175#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004176 if (cm->seq_params.frame_id_numbers_present_flag) {
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004177 cm->refresh_mask =
4178 cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
4179 }
4180#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004181
4182 if (!cm->error_resilient_mode) {
4183 aom_wb_write_bit(
4184 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
4185 }
4186#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4187 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
4188#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004189 encode_loopfilter(cm, wb);
4190 encode_quantization(cm, wb);
4191 encode_segmentation(cm, xd, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004192 {
Thomas Davies28444be2017-10-13 18:12:25 +01004193 int delta_q_allowed = 1;
4194#if !CONFIG_EXT_DELTA_Q
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004195 int i;
4196 struct segmentation *const seg = &cm->seg;
4197 int segment_quantizer_active = 0;
4198 for (i = 0; i < MAX_SEGMENTS; i++) {
4199 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
4200 segment_quantizer_active = 1;
4201 }
4202 }
Thomas Davies28444be2017-10-13 18:12:25 +01004203 delta_q_allowed = !segment_quantizer_active;
4204#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004205
4206 if (cm->delta_q_present_flag)
Thomas Davies28444be2017-10-13 18:12:25 +01004207 assert(delta_q_allowed == 1 && cm->base_qindex > 0);
4208 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004209 aom_wb_write_bit(wb, cm->delta_q_present_flag);
4210 if (cm->delta_q_present_flag) {
4211 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
4212 xd->prev_qindex = cm->base_qindex;
4213#if CONFIG_EXT_DELTA_Q
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004214 aom_wb_write_bit(wb, cm->delta_lf_present_flag);
4215 if (cm->delta_lf_present_flag) {
4216 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_lf_res) - 1, 2);
Yaowu Xub02d0b12017-12-15 01:32:34 +00004217 xd->prev_delta_lf_from_base = 0;
Cheng Chena97394f2017-09-27 15:05:14 -07004218#if CONFIG_LOOPFILTER_LEVEL
Yaowu Xub02d0b12017-12-15 01:32:34 +00004219 aom_wb_write_bit(wb, cm->delta_lf_multi);
Cheng Chena97394f2017-09-27 15:05:14 -07004220 for (int lf_id = 0; lf_id < FRAME_LF_COUNT; ++lf_id)
4221 xd->prev_delta_lf[lf_id] = 0;
4222#endif // CONFIG_LOOPFILTER_LEVEL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004223 }
4224#endif // CONFIG_EXT_DELTA_Q
4225 }
4226 }
4227 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004228 if (!cm->all_lossless) {
4229 encode_cdef(cm, wb);
4230 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004231#if CONFIG_LOOP_RESTORATION
4232 encode_restoration_mode(cm, wb);
4233#endif // CONFIG_LOOP_RESTORATION
4234 write_tx_mode(cm, &cm->tx_mode, wb);
4235
4236 if (cpi->allow_comp_inter_inter) {
4237 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
4238#if !CONFIG_REF_ADAPT
4239 const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE;
4240#endif // !CONFIG_REF_ADAPT
4241
4242 aom_wb_write_bit(wb, use_hybrid_pred);
4243#if !CONFIG_REF_ADAPT
4244 if (!use_hybrid_pred) aom_wb_write_bit(wb, use_compound_pred);
4245#endif // !CONFIG_REF_ADAPT
4246 }
Zoe Liu4b847e12017-12-07 12:44:45 -08004247
4248#if CONFIG_EXT_SKIP
4249#if 0
4250 printf("\n[ENCODER] Frame=%d, is_skip_mode_allowed=%d, skip_mode_flag=%d\n\n",
4251 (int)cm->frame_offset, cm->is_skip_mode_allowed, cm->skip_mode_flag);
4252#endif // 0
4253 if (cm->is_skip_mode_allowed) aom_wb_write_bit(wb, cm->skip_mode_flag);
4254#endif // CONFIG_EXT_SKIP
4255
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004256 write_compound_tools(cm, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004257
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004258 aom_wb_write_bit(wb, cm->reduced_tx_set_used);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004259
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004260 if (!frame_is_intra_only(cm)) write_global_motion(cpi, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004261
4262 write_tile_info(cm, wb);
4263}
4264#endif // CONFIG_OBU
4265
Yaowu Xuf883b422016-08-30 14:01:10 -07004266static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data) {
4267 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004268 FRAME_CONTEXT *const fc = cm->fc;
Yaowu Xuf883b422016-08-30 14:01:10 -07004269 aom_writer *header_bc;
Ryanf0e39192017-10-09 09:45:13 -07004270
Thomas Davies80188d12016-10-26 16:08:35 -07004271 const int probwt = cm->num_tg;
Thomas Davies04e5aa72017-06-28 14:36:39 +01004272 (void)probwt;
Thomas Davies04e5aa72017-06-28 14:36:39 +01004273 (void)fc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004274
Yaowu Xuf883b422016-08-30 14:01:10 -07004275 aom_writer real_header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004276 header_bc = &real_header_bc;
Alex Converse30f0e152017-03-28 10:13:27 -07004277 aom_start_encode(header_bc, data);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004278
Debargha Mukherjee801cc922017-09-22 17:22:50 -07004279 if (!frame_is_intra_only(cm)) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07004280 if (cm->reference_mode != COMPOUND_REFERENCE &&
4281 cm->allow_interintra_compound) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004282 }
Sarah Parker689b0ca2016-10-11 12:06:33 -07004283 }
Yaowu Xuf883b422016-08-30 14:01:10 -07004284 aom_stop_encode(header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004285 assert(header_bc->pos <= 0xffff);
4286 return header_bc->pos;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004287}
4288
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004289#if !CONFIG_OBU || CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004290static int choose_size_bytes(uint32_t size, int spare_msbs) {
4291 // Choose the number of bytes required to represent size, without
4292 // using the 'spare_msbs' number of most significant bits.
4293
4294 // Make sure we will fit in 4 bytes to start with..
4295 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
4296
4297 // Normalise to 32 bits
4298 size <<= spare_msbs;
4299
4300 if (size >> 24 != 0)
4301 return 4;
4302 else if (size >> 16 != 0)
4303 return 3;
4304 else if (size >> 8 != 0)
4305 return 2;
4306 else
4307 return 1;
4308}
4309
4310static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) {
4311 switch (sz) {
4312 case 1: dst[0] = (uint8_t)(val & 0xff); break;
4313 case 2: mem_put_le16(dst, val); break;
4314 case 3: mem_put_le24(dst, val); break;
4315 case 4: mem_put_le32(dst, val); break;
James Zern06c372d2017-04-20 16:08:29 -07004316 default: assert(0 && "Invalid size"); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004317 }
4318}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004319
Yaowu Xuf883b422016-08-30 14:01:10 -07004320static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004321 const uint32_t data_size, const uint32_t max_tile_size,
4322 const uint32_t max_tile_col_size,
4323 int *const tile_size_bytes,
4324 int *const tile_col_size_bytes) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004325 // Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
4326 int tsb;
4327 int tcsb;
4328
Yaowu Xuc27fc142016-08-22 16:08:15 -07004329#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004330 if (cm->large_scale_tile) {
4331 // The top bit in the tile size field indicates tile copy mode, so we
4332 // have 1 less bit to code the tile size
4333 tsb = choose_size_bytes(max_tile_size, 1);
4334 tcsb = choose_size_bytes(max_tile_col_size, 0);
4335 } else {
4336#endif // CONFIG_EXT_TILE
4337 tsb = choose_size_bytes(max_tile_size, 0);
4338 tcsb = 4; // This is ignored
4339 (void)max_tile_col_size;
4340#if CONFIG_EXT_TILE
4341 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004342#endif // CONFIG_EXT_TILE
4343
4344 assert(tsb > 0);
4345 assert(tcsb > 0);
4346
4347 *tile_size_bytes = tsb;
4348 *tile_col_size_bytes = tcsb;
4349
4350 if (tsb == 4 && tcsb == 4) {
4351 return data_size;
4352 } else {
4353 uint32_t wpos = 0;
4354 uint32_t rpos = 0;
4355
4356#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004357 if (cm->large_scale_tile) {
4358 int tile_row;
4359 int tile_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004360
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004361 for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
4362 // All but the last column has a column header
4363 if (tile_col < cm->tile_cols - 1) {
4364 uint32_t tile_col_size = mem_get_le32(dst + rpos);
4365 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004366
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004367 // Adjust the tile column size by the number of bytes removed
4368 // from the tile size fields.
4369 tile_col_size -= (4 - tsb) * cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004370
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004371 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
4372 wpos += tcsb;
4373 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004374
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004375 for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
4376 // All, including the last row has a header
4377 uint32_t tile_header = mem_get_le32(dst + rpos);
4378 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004379
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004380 // If this is a copy tile, we need to shift the MSB to the
4381 // top bit of the new width, and there is no data to copy.
4382 if (tile_header >> 31 != 0) {
4383 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
4384 mem_put_varsize(dst + wpos, tsb, tile_header);
4385 wpos += tsb;
4386 } else {
4387 mem_put_varsize(dst + wpos, tsb, tile_header);
4388 wpos += tsb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004389
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004390 memmove(dst + wpos, dst + rpos, tile_header);
4391 rpos += tile_header;
4392 wpos += tile_header;
4393 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004394 }
4395 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004396 } else {
4397#endif // CONFIG_EXT_TILE
4398 const int n_tiles = cm->tile_cols * cm->tile_rows;
4399 int n;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004400
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004401 for (n = 0; n < n_tiles; n++) {
4402 int tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004403
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004404 if (n == n_tiles - 1) {
4405 tile_size = data_size - rpos;
4406 } else {
4407 tile_size = mem_get_le32(dst + rpos);
4408 rpos += 4;
4409 mem_put_varsize(dst + wpos, tsb, tile_size);
4410 wpos += tsb;
4411 }
4412
4413 memmove(dst + wpos, dst + rpos, tile_size);
4414
4415 rpos += tile_size;
4416 wpos += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004417 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004418#if CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004419 }
4420#endif // CONFIG_EXT_TILE
4421
4422 assert(rpos > wpos);
4423 assert(rpos == data_size);
4424
4425 return wpos;
4426 }
4427}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004428#endif
4429
4430#if CONFIG_OBU
Soo-Chul Han38427e82017-09-27 15:06:13 -04004431
4432uint32_t write_obu_header(OBU_TYPE obu_type, int obu_extension,
4433 uint8_t *const dst) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004434 struct aom_write_bit_buffer wb = { dst, 0 };
4435 uint32_t size = 0;
4436
Soo-Chul Han38427e82017-09-27 15:06:13 -04004437 // first bit is obu_forbidden_bit according to R19
4438 aom_wb_write_literal(&wb, 0, 1);
4439 aom_wb_write_literal(&wb, (int)obu_type, 4);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004440 aom_wb_write_literal(&wb, 0, 2);
4441 aom_wb_write_literal(&wb, obu_extension ? 1 : 0, 1);
4442 if (obu_extension) {
4443 aom_wb_write_literal(&wb, obu_extension & 0xFF, 8);
4444 }
4445
4446 size = aom_wb_bytes_written(&wb);
4447 return size;
4448}
4449
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004450static uint32_t write_sequence_header_obu(AV1_COMP *cpi, uint8_t *const dst) {
4451 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004452 struct aom_write_bit_buffer wb = { dst, 0 };
4453 uint32_t size = 0;
4454
4455 write_profile(cm->profile, &wb);
4456
4457 aom_wb_write_literal(&wb, 0, 4);
4458
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01004459 write_sequence_header(cpi, &wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004460
4461 // color_config
4462 write_bitdepth_colorspace_sampling(cm, &wb);
4463
4464 size = aom_wb_bytes_written(&wb);
4465 return size;
4466}
4467
4468static uint32_t write_frame_header_obu(AV1_COMP *cpi, uint8_t *const dst) {
4469 AV1_COMMON *const cm = &cpi->common;
4470 struct aom_write_bit_buffer wb = { dst, 0 };
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004471 uint32_t total_size = 0;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004472 uint32_t compressed_hdr_size, uncompressed_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004473
4474 write_uncompressed_header_obu(cpi, &wb);
4475
4476 if (cm->show_existing_frame) {
4477 total_size = aom_wb_bytes_written(&wb);
4478 return total_size;
4479 }
4480
4481 // write the tile length code (Always 4 bytes for now)
4482 aom_wb_write_literal(&wb, 3, 2);
4483
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004484 if (!use_compressed_header(cm)) {
4485 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
4486 compressed_hdr_size = 0;
4487 } else {
4488 // placeholder for the compressed header length
4489 struct aom_write_bit_buffer compr_hdr_len_wb = wb;
4490 aom_wb_write_literal(&wb, 0, 16);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004491
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004492 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
4493 compressed_hdr_size =
4494 write_compressed_header(cpi, dst + uncompressed_hdr_size);
4495 aom_wb_overwrite_literal(&compr_hdr_len_wb, (int)(compressed_hdr_size), 16);
4496 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004497
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004498 total_size = uncompressed_hdr_size + compressed_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004499 return total_size;
4500}
4501
4502static uint32_t write_tile_group_header(uint8_t *const dst, int startTile,
4503 int endTile, int tiles_log2) {
4504 struct aom_write_bit_buffer wb = { dst, 0 };
4505 uint32_t size = 0;
4506
4507 aom_wb_write_literal(&wb, startTile, tiles_log2);
4508 aom_wb_write_literal(&wb, endTile, tiles_log2);
4509
4510 size = aom_wb_bytes_written(&wb);
4511 return size;
4512}
4513
4514static uint32_t write_tiles_in_tg_obus(AV1_COMP *const cpi, uint8_t *const dst,
4515 unsigned int *max_tile_size,
4516 unsigned int *max_tile_col_size,
4517 uint8_t *const frame_header_obu_location,
4518 uint32_t frame_header_obu_size,
4519 int insert_frame_header_obu_flag) {
Thomas Davies4822e142017-10-10 11:30:36 +01004520 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004521 aom_writer mode_bc;
4522 int tile_row, tile_col;
4523 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
4524 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
4525 uint32_t total_size = 0;
4526 const int tile_cols = cm->tile_cols;
4527 const int tile_rows = cm->tile_rows;
4528 unsigned int tile_size = 0;
4529 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
4530 // Fixed size tile groups for the moment
4531 const int num_tg_hdrs = cm->num_tg;
4532 const int tg_size =
4533#if CONFIG_EXT_TILE
4534 (cm->large_scale_tile)
4535 ? 1
4536 :
4537#endif // CONFIG_EXT_TILE
4538 (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
4539 int tile_count = 0;
4540 int curr_tg_data_size = 0;
4541 uint8_t *data = dst;
4542 int new_tg = 1;
4543#if CONFIG_EXT_TILE
4544 const int have_tiles = tile_cols * tile_rows > 1;
4545#endif
4546
Thomas Davies4822e142017-10-10 11:30:36 +01004547#if CONFIG_SIMPLE_BWD_ADAPT
4548 cm->largest_tile_id = 0;
4549#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004550 *max_tile_size = 0;
4551 *max_tile_col_size = 0;
4552
4553#if CONFIG_EXT_TILE
4554 if (cm->large_scale_tile) {
4555 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
4556 TileInfo tile_info;
4557 const int is_last_col = (tile_col == tile_cols - 1);
4558 const uint32_t col_offset = total_size;
4559
4560 av1_tile_set_col(&tile_info, cm, tile_col);
4561
4562 // The last column does not have a column header
4563 if (!is_last_col) total_size += 4;
4564
4565 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
4566 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
4567 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
4568 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
4569 const int data_offset = have_tiles ? 4 : 0;
4570 const int tile_idx = tile_row * tile_cols + tile_col;
4571 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
4572 av1_tile_set_row(&tile_info, cm, tile_row);
4573
4574 buf->data = dst + total_size;
4575
4576 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
4577 // even for the last one, unless no tiling is used at all.
4578 total_size += data_offset;
4579 // Initialise tile context from the frame context
4580 this_tile->tctx = *cm->fc;
4581 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07004582 mode_bc.allow_update_cdf = !cm->large_scale_tile;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004583 aom_start_encode(&mode_bc, buf->data + data_offset);
4584 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
4585 assert(tok == tok_end);
4586 aom_stop_encode(&mode_bc);
4587 tile_size = mode_bc.pos;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004588 buf->size = tile_size;
4589
4590 // Record the maximum tile size we see, so we can compact headers later.
Thomas Davies4822e142017-10-10 11:30:36 +01004591 if (tile_size > *max_tile_size) {
4592 *max_tile_size = tile_size;
4593#if CONFIG_SIMPLE_BWD_ADAPT
4594 cm->largest_tile_id = tile_cols * tile_row + tile_col;
4595#endif
4596 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004597
4598 if (have_tiles) {
4599 // tile header: size of this tile, or copy offset
4600 uint32_t tile_header = tile_size;
4601 const int tile_copy_mode =
4602 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256)
4603 ? 1
4604 : 0;
4605
4606 // If tile_copy_mode = 1, check if this tile is a copy tile.
4607 // Very low chances to have copy tiles on the key frames, so don't
4608 // search on key frames to reduce unnecessary search.
4609 if (cm->frame_type != KEY_FRAME && tile_copy_mode) {
4610 const int idendical_tile_offset =
4611 find_identical_tile(tile_row, tile_col, tile_buffers);
4612
4613 if (idendical_tile_offset > 0) {
4614 tile_size = 0;
4615 tile_header = idendical_tile_offset | 0x80;
4616 tile_header <<= 24;
4617 }
4618 }
4619
4620 mem_put_le32(buf->data, tile_header);
4621 }
4622
4623 total_size += tile_size;
4624 }
4625
4626 if (!is_last_col) {
4627 uint32_t col_size = total_size - col_offset - 4;
4628 mem_put_le32(dst + col_offset, col_size);
4629
4630 // If it is not final packing, record the maximum tile column size we
4631 // see, otherwise, check if the tile size is out of the range.
4632 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
4633 }
4634 }
4635 } else {
4636#endif // CONFIG_EXT_TILE
4637
4638 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
4639 TileInfo tile_info;
4640 const int is_last_row = (tile_row == tile_rows - 1);
4641 av1_tile_set_row(&tile_info, cm, tile_row);
4642
4643 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
4644 const int tile_idx = tile_row * tile_cols + tile_col;
4645 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
4646 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
4647 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
4648 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
4649 const int is_last_col = (tile_col == tile_cols - 1);
4650 const int is_last_tile = is_last_col && is_last_row;
4651 int is_last_tile_in_tg = 0;
4652
4653 if (new_tg) {
4654 if (insert_frame_header_obu_flag && tile_idx) {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004655 // insert a copy of frame header OBU (including
4656 // PRE_OBU_SIZE_BYTES-byte size),
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004657 // except before the first tile group
4658 data = dst + total_size;
4659 memmove(data, frame_header_obu_location, frame_header_obu_size);
4660 total_size += frame_header_obu_size;
4661 }
4662 data = dst + total_size;
4663 // A new tile group begins at this tile. Write the obu header and
4664 // tile group header
Soo-Chul Han38427e82017-09-27 15:06:13 -04004665 curr_tg_data_size =
4666 write_obu_header(OBU_TILE_GROUP, 0, data + PRE_OBU_SIZE_BYTES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004667 if (n_log2_tiles)
4668 curr_tg_data_size += write_tile_group_header(
Soo-Chul Han38427e82017-09-27 15:06:13 -04004669 data + curr_tg_data_size + PRE_OBU_SIZE_BYTES, tile_idx,
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004670 AOMMIN(tile_idx + tg_size - 1, tile_cols * tile_rows - 1),
4671 n_log2_tiles);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004672 total_size += curr_tg_data_size + PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004673 new_tg = 0;
4674 tile_count = 0;
4675 }
4676 tile_count++;
4677 av1_tile_set_col(&tile_info, cm, tile_col);
4678
4679 if (tile_count == tg_size || tile_idx == (tile_cols * tile_rows - 1)) {
4680 is_last_tile_in_tg = 1;
4681 new_tg = 1;
4682 } else {
4683 is_last_tile_in_tg = 0;
4684 }
4685
4686#if CONFIG_DEPENDENT_HORZTILES
4687 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
4688#endif
4689 buf->data = dst + total_size;
4690
Soo-Chul Han38427e82017-09-27 15:06:13 -04004691// The last tile of the tile group does not have a header.
4692#if CONFIG_ADD_4BYTES_OBUSIZE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004693 if (!is_last_tile_in_tg) total_size += 4;
Soo-Chul Han38427e82017-09-27 15:06:13 -04004694#else
4695 total_size += 4;
4696#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004697
4698 // Initialise tile context from the frame context
4699 this_tile->tctx = *cm->fc;
4700 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07004701 mode_bc.allow_update_cdf = 1;
Soo-Chul Han13f0d9c2017-10-22 21:55:52 -04004702#if CONFIG_LOOP_RESTORATION
Rupert Swarbrick76405202017-11-07 16:35:55 +00004703 av1_reset_loop_restoration(&cpi->td.mb.e_mbd);
Soo-Chul Han13f0d9c2017-10-22 21:55:52 -04004704#endif // CONFIG_LOOP_RESTORATION
4705
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004706 aom_start_encode(&mode_bc, dst + total_size);
4707 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
4708#if !CONFIG_LV_MAP
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004709 assert(tok == tok_end);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004710#endif // !CONFIG_LV_MAP
4711 aom_stop_encode(&mode_bc);
4712 tile_size = mode_bc.pos;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004713 assert(tile_size > 0);
4714
4715 curr_tg_data_size += (tile_size + (is_last_tile_in_tg ? 0 : 4));
4716 buf->size = tile_size;
Thomas Davies4822e142017-10-10 11:30:36 +01004717#if CONFIG_SIMPLE_BWD_ADAPT
4718 if (tile_size > *max_tile_size) {
4719 cm->largest_tile_id = tile_cols * tile_row + tile_col;
4720 }
4721#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004722 if (!is_last_tile) {
4723 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
4724 }
Thomas Davies4822e142017-10-10 11:30:36 +01004725
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004726 if (!is_last_tile_in_tg) {
4727 // size of this tile
4728 mem_put_le32(buf->data, tile_size);
4729 } else {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004730#if CONFIG_ADD_4BYTES_OBUSIZE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004731 // write current tile group size
4732 mem_put_le32(data, curr_tg_data_size);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004733#else
4734 mem_put_le32(buf->data, tile_size);
4735#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004736 }
4737
4738 total_size += tile_size;
4739 }
4740 }
4741#if CONFIG_EXT_TILE
4742 }
4743#endif // CONFIG_EXT_TILE
4744 return (uint32_t)total_size;
4745}
4746
4747#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004748
Yaowu Xuf883b422016-08-30 14:01:10 -07004749void av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004750 uint8_t *data = dst;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004751 uint32_t data_size;
Thomas Daviesb25ba502017-07-18 10:18:24 +01004752#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004753 AV1_COMMON *const cm = &cpi->common;
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004754 uint32_t compressed_hdr_size = 0;
4755 uint32_t uncompressed_hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07004756 struct aom_write_bit_buffer saved_wb;
Yaowu Xuf883b422016-08-30 14:01:10 -07004757 struct aom_write_bit_buffer wb = { data, 0 };
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004758 const int have_tiles = cm->tile_cols * cm->tile_rows > 1;
4759 int tile_size_bytes;
4760 int tile_col_size_bytes;
Thomas Daviesb25ba502017-07-18 10:18:24 +01004761#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004762 unsigned int max_tile_size;
4763 unsigned int max_tile_col_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004764#if CONFIG_OBU
4765#if !CONFIG_EXT_TILE
4766 AV1_COMMON *const cm = &cpi->common;
4767#endif
4768 uint32_t obu_size;
4769 uint8_t *frame_header_location;
4770 uint32_t frame_header_size;
4771#endif
Thomas Davies80188d12016-10-26 16:08:35 -07004772
Angie Chiangb11aedf2017-03-10 17:31:46 -08004773#if CONFIG_BITSTREAM_DEBUG
4774 bitstream_queue_reset_write();
4775#endif
4776
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004777#if CONFIG_OBU
Soo-Chul Han38427e82017-09-27 15:06:13 -04004778 // The TD is now written outside the frame encode loop
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004779
4780 // write sequence header obu if KEY_FRAME, preceded by 4-byte size
4781 if (cm->frame_type == KEY_FRAME) {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004782 obu_size =
4783 write_obu_header(OBU_SEQUENCE_HEADER, 0, data + PRE_OBU_SIZE_BYTES);
4784 obu_size +=
4785 write_sequence_header_obu(cpi, data + PRE_OBU_SIZE_BYTES + obu_size);
4786#if CONFIG_ADD_4BYTES_OBUSIZE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004787 mem_put_le32(data, obu_size);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004788#endif
4789 data += obu_size + PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004790 }
4791
4792 // write frame header obu, preceded by 4-byte size
Soo-Chul Han38427e82017-09-27 15:06:13 -04004793 frame_header_location = data + PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004794 obu_size = write_obu_header(OBU_FRAME_HEADER, 0, frame_header_location);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004795 frame_header_size =
4796 write_frame_header_obu(cpi, data + PRE_OBU_SIZE_BYTES + obu_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004797 obu_size += frame_header_size;
Soo-Chul Han38427e82017-09-27 15:06:13 -04004798#if CONFIG_ADD_4BYTES_OBUSIZE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004799 mem_put_le32(data, obu_size);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004800#endif
4801 data += obu_size + PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004802
4803 if (cm->show_existing_frame) {
4804 data_size = 0;
4805 } else {
4806 // Each tile group obu will be preceded by 4-byte size of the tile group
4807 // obu
Soo-Chul Han38427e82017-09-27 15:06:13 -04004808 data_size = write_tiles_in_tg_obus(
4809 cpi, data, &max_tile_size, &max_tile_col_size,
4810 frame_header_location - PRE_OBU_SIZE_BYTES,
4811 obu_size + PRE_OBU_SIZE_BYTES, 1 /* cm->error_resilient_mode */);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004812 }
4813
4814#endif
4815
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004816#if CONFIG_EXT_TILE
4817 if (cm->large_scale_tile) {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004818#if !CONFIG_OBU
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004819 write_uncompressed_header_frame(cpi, &wb);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004820#else
4821 write_uncompressed_header_obu(cpi, &wb);
4822#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004823
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004824 if (cm->show_existing_frame) {
4825 *size = aom_wb_bytes_written(&wb);
4826 return;
4827 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004828
4829 // We do not know these in advance. Output placeholder bit.
4830 saved_wb = wb;
4831 // Write tile size magnitudes
4832 if (have_tiles) {
4833 // Note that the last item in the uncompressed header is the data
4834 // describing tile configuration.
4835 // Number of bytes in tile column size - 1
4836 aom_wb_write_literal(&wb, 0, 2);
4837
4838 // Number of bytes in tile size - 1
4839 aom_wb_write_literal(&wb, 0, 2);
4840 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004841
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004842 if (!use_compressed_header(cm)) {
4843 uncompressed_hdr_size = (uint32_t)aom_wb_bytes_written(&wb);
4844 aom_clear_system_state();
4845 compressed_hdr_size = 0;
4846 } else {
4847 // Size of compressed header
4848 aom_wb_write_literal(&wb, 0, 16);
4849 uncompressed_hdr_size = (uint32_t)aom_wb_bytes_written(&wb);
4850 aom_clear_system_state();
4851 // Write the compressed header
4852 compressed_hdr_size =
4853 write_compressed_header(cpi, data + uncompressed_hdr_size);
4854 }
4855 data += uncompressed_hdr_size + compressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004856
Yunqing Wangb041d8a2017-11-15 12:31:18 -08004857#define EXT_TILE_DEBUG 0
4858#if EXT_TILE_DEBUG
4859 {
4860 char fn[20] = "./fh";
4861 fn[4] = cm->current_video_frame / 100 + '0';
4862 fn[5] = (cm->current_video_frame % 100) / 10 + '0';
4863 fn[6] = (cm->current_video_frame % 10) + '0';
4864 fn[7] = '\0';
4865 av1_print_uncompressed_frame_header(
4866 data - uncompressed_hdr_size - compressed_hdr_size,
4867 uncompressed_hdr_size, fn);
4868 }
4869#endif // EXT_TILE_DEBUG
4870#undef EXT_TILE_DEBUG
4871
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004872 // Write the encoded tile data
4873 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
4874 } else {
4875#endif // CONFIG_EXT_TILE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004876#if !CONFIG_OBU
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004877 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004878#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004879#if CONFIG_EXT_TILE
4880 }
4881#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004882#if CONFIG_EXT_TILE
4883 if (cm->large_scale_tile) {
4884 if (have_tiles) {
4885 data_size =
4886 remux_tiles(cm, data, data_size, max_tile_size, max_tile_col_size,
4887 &tile_size_bytes, &tile_col_size_bytes);
4888 }
4889
4890 data += data_size;
4891
4892 // Now fill in the gaps in the uncompressed header.
4893 if (have_tiles) {
4894 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
4895 aom_wb_write_literal(&saved_wb, tile_col_size_bytes - 1, 2);
4896
4897 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
4898 aom_wb_write_literal(&saved_wb, tile_size_bytes - 1, 2);
4899 }
Debargha Mukherjee2eada612017-09-22 15:37:39 -07004900 // TODO(jbb): Figure out what to do if compressed_hdr_size > 16 bits.
4901 assert(compressed_hdr_size <= 0xffff);
Rupert Swarbrick53685902017-10-27 13:35:19 +01004902 // Fill in the compressed header size (but only if we're using one)
4903 if (use_compressed_header(cm)) {
4904 aom_wb_write_literal(&saved_wb, compressed_hdr_size, 16);
4905 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004906 } else {
4907#endif // CONFIG_EXT_TILE
4908 data += data_size;
4909#if CONFIG_EXT_TILE
4910 }
4911#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004912 *size = data - dst;
4913}