blob: c05ada03777c92972b144c9ea69ff627a12226d1 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070017#include "aom_dsp/aom_dsp_common.h"
Debargha Mukherjee47748b52017-03-24 12:20:49 -070018#include "aom_dsp/binary_codes_writer.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070019#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070020#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070021#include "aom_ports/mem_ops.h"
22#include "aom_ports/system_state.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070023#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070025#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070026
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010027#include "av1/common/cdef.h"
Luc Trudeaud183b642017-11-28 11:42:37 -050028#if CONFIG_CFL
29#include "av1/common/cfl.h"
30#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070031#include "av1/common/entropy.h"
32#include "av1/common/entropymode.h"
33#include "av1/common/entropymv.h"
34#include "av1/common/mvref_common.h"
Thomas Daviesf6936102016-09-05 16:51:31 +010035#include "av1/common/odintrin.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070036#include "av1/common/pred_common.h"
37#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080038#include "av1/common/reconintra.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070039#include "av1/common/seg_common.h"
40#include "av1/common/tile_common.h"
41
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/encoder/bitstream.h"
43#include "av1/encoder/cost.h"
44#include "av1/encoder/encodemv.h"
Hui Suec73b442018-01-04 12:47:53 -080045#include "av1/encoder/encodetxb.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070046#include "av1/encoder/mcomp.h"
hui sud13c24a2017-04-07 16:13:07 -070047#include "av1/encoder/palette.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070048#include "av1/encoder/segmentation.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070049#include "av1/encoder/tokenize.h"
50
Di Chen56586622017-06-09 13:49:44 -070051#define ENC_MISMATCH_DEBUG 0
Zoe Liu85b66462017-04-20 14:28:19 -070052
Yaowu Xuf883b422016-08-30 14:01:10 -070053static INLINE void write_uniform(aom_writer *w, int n, int v) {
hui su37499292017-04-26 09:49:53 -070054 const int l = get_unsigned_bits(n);
55 const int m = (1 << l) - n;
Yaowu Xuc27fc142016-08-22 16:08:15 -070056 if (l == 0) return;
57 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -070058 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070059 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -070060 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
61 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070062 }
63}
64
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070065#if CONFIG_LOOP_RESTORATION
Rupert Swarbrick6c545212017-09-01 17:17:25 +010066static void loop_restoration_write_sb_coeffs(const AV1_COMMON *const cm,
67 MACROBLOCKD *xd,
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +010068 const RestorationUnitInfo *rui,
69 aom_writer *const w, int plane);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070070#endif // CONFIG_LOOP_RESTORATION
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040071#if CONFIG_OBU
72static void write_uncompressed_header_obu(AV1_COMP *cpi,
Jingning Handa11e692017-12-19 08:45:08 -080073#if CONFIG_EXT_TILE
74 struct aom_write_bit_buffer *saved_wb,
75#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040076 struct aom_write_bit_buffer *wb);
77#else
78static void write_uncompressed_header_frame(AV1_COMP *cpi,
79 struct aom_write_bit_buffer *wb);
80#endif
81
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040082#if !CONFIG_OBU || CONFIG_EXT_TILE
Thomas Daviesdbfc4f92017-01-18 16:46:09 +000083static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
84 const uint32_t data_size, const uint32_t max_tile_size,
85 const uint32_t max_tile_col_size,
86 int *const tile_size_bytes,
87 int *const tile_col_size_bytes);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -040088#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070089
Jingning Han3e4c6a62017-12-14 14:50:57 -080090static void write_intra_mode_kf(FRAME_CONTEXT *frame_ctx, const MODE_INFO *mi,
91 const MODE_INFO *above_mi,
92 const MODE_INFO *left_mi, PREDICTION_MODE mode,
93 aom_writer *w) {
Alex Converse28744302017-04-13 14:46:22 -070094#if CONFIG_INTRABC
95 assert(!is_intrabc_block(&mi->mbmi));
96#endif // CONFIG_INTRABC
Jingning Han9010e202017-12-14 14:48:09 -080097 (void)mi;
Jingning Han9010e202017-12-14 14:48:09 -080098 aom_write_symbol(w, mode, get_y_mode_cdf(frame_ctx, above_mi, left_mi),
Jingning Hanf04254f2017-03-08 10:51:35 -080099 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -0800100}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700101
Thomas Davies1de6c882017-01-11 17:47:49 +0000102static void write_inter_mode(aom_writer *w, PREDICTION_MODE mode,
Zoe Liu7f24e1b2017-03-17 17:42:05 -0700103 FRAME_CONTEXT *ec_ctx, const int16_t mode_ctx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700104 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700105
Thomas Davies149eda52017-06-12 18:11:55 +0100106 aom_write_symbol(w, mode != NEWMV, ec_ctx->newmv_cdf[newmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700107
Jingning Hanf2b87bd2017-05-18 16:27:30 -0700108 if (mode != NEWMV) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700109 const int16_t zeromv_ctx =
110 (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700111 aom_write_symbol(w, mode != GLOBALMV, ec_ctx->zeromv_cdf[zeromv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700112
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700113 if (mode != GLOBALMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700114 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Thomas Davies149eda52017-06-12 18:11:55 +0100115 aom_write_symbol(w, mode != NEARESTMV, ec_ctx->refmv_cdf[refmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700116 }
117 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700118}
119
Thomas Davies149eda52017-06-12 18:11:55 +0100120static void write_drl_idx(FRAME_CONTEXT *ec_ctx, const MB_MODE_INFO *mbmi,
Yaowu Xuf883b422016-08-30 14:01:10 -0700121 const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) {
122 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700123
124 assert(mbmi->ref_mv_idx < 3);
125
Sebastien Alaiwan34d55662017-11-15 09:36:03 +0100126 const int new_mv = mbmi->mode == NEWMV || mbmi->mode == NEW_NEWMV;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000127 if (new_mv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700128 int idx;
129 for (idx = 0; idx < 2; ++idx) {
Jingning Hanb4fc74d2017-12-21 14:34:03 -0800130 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700131 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700132 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700133
Thomas Davies149eda52017-06-12 18:11:55 +0100134 aom_write_symbol(w, mbmi->ref_mv_idx != idx, ec_ctx->drl_cdf[drl_ctx],
135 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700136 if (mbmi->ref_mv_idx == idx) return;
137 }
138 }
139 return;
140 }
141
David Barker3dfba992017-04-03 16:10:09 +0100142 if (have_nearmv_in_inter_mode(mbmi->mode)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700143 int idx;
144 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
145 for (idx = 1; idx < 3; ++idx) {
Jingning Hanb4fc74d2017-12-21 14:34:03 -0800146 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700147 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700148 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
Thomas Davies149eda52017-06-12 18:11:55 +0100149 aom_write_symbol(w, mbmi->ref_mv_idx != (idx - 1),
150 ec_ctx->drl_cdf[drl_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700151 if (mbmi->ref_mv_idx == (idx - 1)) return;
152 }
153 }
154 return;
155 }
156}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700157
Thomas Davies8c08a332017-06-26 17:30:34 +0100158static void write_inter_compound_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
159 aom_writer *w, PREDICTION_MODE mode,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700160 const int16_t mode_ctx) {
Thomas Davies8c08a332017-06-26 17:30:34 +0100161 assert(is_inter_compound_mode(mode));
Thomas Davies8c08a332017-06-26 17:30:34 +0100162 (void)cm;
163 aom_write_symbol(w, INTER_COMPOUND_OFFSET(mode),
164 xd->tile_ctx->inter_compound_mode_cdf[mode_ctx],
165 INTER_COMPOUND_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700166}
Zoe Liu85b66462017-04-20 14:28:19 -0700167
Thomas Davies985bfc32017-06-27 16:51:26 +0100168static void write_tx_size_vartx(const AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700169 const MB_MODE_INFO *mbmi, TX_SIZE tx_size,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700170 int depth, int blk_row, int blk_col,
171 aom_writer *w) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100172 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
173 (void)cm;
Jingning Hanf65b8702016-10-31 12:13:20 -0700174 const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0);
175 const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0);
176
Yaowu Xuc27fc142016-08-22 16:08:15 -0700177 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
178
Jingning Han571189c2016-10-24 10:38:43 -0700179 if (depth == MAX_VARTX_DEPTH) {
Jingning Han331662e2017-05-30 17:03:32 -0700180 txfm_partition_update(xd->above_txfm_context + blk_col,
181 xd->left_txfm_context + blk_row, tx_size, tx_size);
Jingning Han94d5bfc2016-10-21 10:14:36 -0700182 return;
183 }
184
Hui Su7167d952018-02-01 16:33:12 -0800185 const int ctx = txfm_partition_context(xd->above_txfm_context + blk_col,
186 xd->left_txfm_context + blk_row,
187 mbmi->sb_type, tx_size);
188 const int txb_size_index =
189 av1_get_txb_size_index(mbmi->sb_type, blk_row, blk_col);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000190 const int write_txfm_partition =
Hui Su7167d952018-02-01 16:33:12 -0800191 tx_size == mbmi->inter_tx_size[txb_size_index];
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000192 if (write_txfm_partition) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100193 aom_write_symbol(w, 0, ec_ctx->txfm_partition_cdf[ctx], 2);
Thomas Davies985bfc32017-06-27 16:51:26 +0100194
Jingning Han331662e2017-05-30 17:03:32 -0700195 txfm_partition_update(xd->above_txfm_context + blk_col,
196 xd->left_txfm_context + blk_row, tx_size, tx_size);
Yue Chend6bdd462017-07-19 16:05:43 -0700197 // TODO(yuec): set correct txfm partition update for qttx
Yaowu Xuc27fc142016-08-22 16:08:15 -0700198 } else {
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800199 const TX_SIZE sub_txs = sub_tx_size_map[1][tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700200 const int bsw = tx_size_wide_unit[sub_txs];
201 const int bsh = tx_size_high_unit[sub_txs];
Jingning Hanf64062f2016-11-02 16:22:18 -0700202
Thomas Davies985bfc32017-06-27 16:51:26 +0100203 aom_write_symbol(w, 1, ec_ctx->txfm_partition_cdf[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700204
David Barker16c64e32017-08-23 16:54:59 +0100205 if (sub_txs == TX_4X4) {
Jingning Han331662e2017-05-30 17:03:32 -0700206 txfm_partition_update(xd->above_txfm_context + blk_col,
207 xd->left_txfm_context + blk_row, sub_txs, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700208 return;
209 }
210
Yue Chen0797a202017-10-27 17:24:56 -0700211 assert(bsw > 0 && bsh > 0);
212 for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh)
213 for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
214 int offsetr = blk_row + row;
215 int offsetc = blk_col + col;
216 write_tx_size_vartx(cm, xd, mbmi, sub_txs, depth + 1, offsetr, offsetc,
217 w);
218 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 }
220}
221
Yaowu Xuf883b422016-08-30 14:01:10 -0700222static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd,
223 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700224 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
225 const BLOCK_SIZE bsize = mbmi->sb_type;
Thomas Davies15580c52017-03-09 13:53:42 +0000226 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
227 (void)cm;
Rupert Swarbrickfcff0b22017-10-05 09:26:04 +0100228 if (block_signals_txsize(bsize)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700229 const TX_SIZE tx_size = mbmi->tx_size;
David Barker84dc6e92018-01-16 15:57:49 +0000230 const int tx_size_ctx = get_tx_size_context(xd, 0);
Debargha Mukherjee0fa057f2017-12-06 17:06:29 -0800231 const int depth = tx_size_to_depth(tx_size, bsize, 0);
232 const int max_depths = bsize_to_max_depth(bsize, 0);
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800233 const int32_t tx_size_cat = bsize_to_tx_size_cat(bsize, 0);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800234
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800235 assert(depth >= 0 && depth <= max_depths);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800236 assert(!is_inter_block(mbmi));
Yue Chen49587a72016-09-28 17:09:47 -0700237 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700238
Thomas Davies15580c52017-03-09 13:53:42 +0000239 aom_write_symbol(w, depth, ec_ctx->tx_size_cdf[tx_size_cat][tx_size_ctx],
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800240 max_depths + 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700241 }
242}
243
Yaowu Xuf883b422016-08-30 14:01:10 -0700244static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
245 int segment_id, const MODE_INFO *mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700246 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
247 return 1;
248 } else {
249 const int skip = mi->mbmi.skip;
Zoe Liue646daa2017-10-17 15:28:46 -0700250 const int ctx = av1_get_skip_context(xd);
Thomas Davies61e3e372017-04-04 16:10:23 +0100251 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Davies61e3e372017-04-04 16:10:23 +0100252 aom_write_symbol(w, skip, ec_ctx->skip_cdfs[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700253 return skip;
254 }
255}
256
Zoe Liuf40a9572017-10-13 12:37:19 -0700257#if CONFIG_EXT_SKIP
258static int write_skip_mode(const AV1_COMMON *cm, const MACROBLOCKD *xd,
259 int segment_id, const MODE_INFO *mi, aom_writer *w) {
260 if (!cm->skip_mode_flag) return 0;
261 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
262 return 0;
263 }
264 const int skip_mode = mi->mbmi.skip_mode;
265 if (!is_comp_ref_allowed(mi->mbmi.sb_type)) {
266 assert(!skip_mode);
267 return 0;
268 }
269 const int ctx = av1_get_skip_mode_context(xd);
270 aom_write_symbol(w, skip_mode, xd->tile_ctx->skip_mode_cdfs[ctx], 2);
271 return skip_mode;
272}
273#endif // CONFIG_EXT_SKIP
274
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100275static void write_is_inter(const AV1_COMMON *cm, const MACROBLOCKD *xd,
276 int segment_id, aom_writer *w, const int is_inter) {
277 if (!segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
Frederic Barbier5e911422017-12-12 17:17:07 +0100278 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)
279#if CONFIG_SEGMENT_GLOBALMV
280 || segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV)
281#endif
Johannb0ef6ff2018-02-08 14:32:21 -0800282 )
Frederic Barbier5e911422017-12-12 17:17:07 +0100283 if (!av1_is_valid_scale(&cm->frame_refs[0].sf))
284 return; // LAST_FRAME not valid for reference
285
Yue Chen170678a2017-10-17 13:43:10 -0700286 const int ctx = av1_get_intra_inter_context(xd);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100287 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100288 aom_write_symbol(w, is_inter, ec_ctx->intra_inter_cdf[ctx], 2);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100289 }
290}
291
Thomas Daviesd9b57262017-06-27 17:43:25 +0100292static void write_motion_mode(const AV1_COMMON *cm, MACROBLOCKD *xd,
293 const MODE_INFO *mi, aom_writer *w) {
Sarah Parker19234cc2017-03-10 16:43:25 -0800294 const MB_MODE_INFO *mbmi = &mi->mbmi;
Thomas Daviesd9b57262017-06-27 17:43:25 +0100295
Sebastien Alaiwan48795802017-10-30 12:07:13 +0100296 MOTION_MODE last_motion_mode_allowed =
Luc Trudeau2eb9b842017-12-13 11:19:16 -0500297 motion_mode_allowed(cm->global_motion, xd, mi);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000298 switch (last_motion_mode_allowed) {
299 case SIMPLE_TRANSLATION: break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000300 case OBMC_CAUSAL:
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000301 aom_write_symbol(w, mbmi->motion_mode == OBMC_CAUSAL,
302 xd->tile_ctx->obmc_cdf[mbmi->sb_type], 2);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000303 break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000304 default:
305 aom_write_symbol(w, mbmi->motion_mode,
306 xd->tile_ctx->motion_mode_cdf[mbmi->sb_type],
307 MOTION_MODES);
Yue Chen69f18e12016-09-08 14:48:15 -0700308 }
Yue Chen69f18e12016-09-08 14:48:15 -0700309}
Wei-Ting Lin85a8f702017-06-22 13:55:15 -0700310
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000311static void write_delta_qindex(const AV1_COMMON *cm, const MACROBLOCKD *xd,
312 int delta_qindex, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +0200313 int sign = delta_qindex < 0;
314 int abs = sign ? -delta_qindex : delta_qindex;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000315 int rem_bits, thr;
Thomas Daviesf6936102016-09-05 16:51:31 +0100316 int smallval = abs < DELTA_Q_SMALL ? 1 : 0;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000317 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
318 (void)cm;
Thomas Daviesf6936102016-09-05 16:51:31 +0100319
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000320 aom_write_symbol(w, AOMMIN(abs, DELTA_Q_SMALL), ec_ctx->delta_q_cdf,
321 DELTA_Q_PROBS + 1);
Thomas Daviesf6936102016-09-05 16:51:31 +0100322
323 if (!smallval) {
324 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
325 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100326 aom_write_literal(w, rem_bits - 1, 3);
Thomas Daviesf6936102016-09-05 16:51:31 +0100327 aom_write_literal(w, abs - thr, rem_bits);
Arild Fuldseth07441162016-08-15 15:07:52 +0200328 }
329 if (abs > 0) {
330 aom_write_bit(w, sign);
331 }
332}
Thomas Daviesf6936102016-09-05 16:51:31 +0100333
Fangwen Fu231fe422017-04-24 17:52:29 -0700334#if CONFIG_EXT_DELTA_Q
335static void write_delta_lflevel(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Cheng Chena97394f2017-09-27 15:05:14 -0700336#if CONFIG_LOOPFILTER_LEVEL
337 int lf_id,
338#endif
Fangwen Fu231fe422017-04-24 17:52:29 -0700339 int delta_lflevel, aom_writer *w) {
340 int sign = delta_lflevel < 0;
341 int abs = sign ? -delta_lflevel : delta_lflevel;
342 int rem_bits, thr;
343 int smallval = abs < DELTA_LF_SMALL ? 1 : 0;
Fangwen Fu231fe422017-04-24 17:52:29 -0700344 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
345 (void)cm;
Fangwen Fu231fe422017-04-24 17:52:29 -0700346
Cheng Chena97394f2017-09-27 15:05:14 -0700347#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -0700348 if (cm->delta_lf_multi) {
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +0000349 assert(lf_id >= 0 && lf_id < (av1_num_planes(cm) > 1 ? FRAME_LF_COUNT
350 : FRAME_LF_COUNT - 2));
Cheng Chen880166a2017-10-02 17:48:48 -0700351 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL),
352 ec_ctx->delta_lf_multi_cdf[lf_id], DELTA_LF_PROBS + 1);
353 } else {
354 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
355 DELTA_LF_PROBS + 1);
356 }
Cheng Chena97394f2017-09-27 15:05:14 -0700357#else
Fangwen Fu231fe422017-04-24 17:52:29 -0700358 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
359 DELTA_LF_PROBS + 1);
Cheng Chena97394f2017-09-27 15:05:14 -0700360#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -0700361
362 if (!smallval) {
363 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
364 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100365 aom_write_literal(w, rem_bits - 1, 3);
Fangwen Fu231fe422017-04-24 17:52:29 -0700366 aom_write_literal(w, abs - thr, rem_bits);
367 }
368 if (abs > 0) {
369 aom_write_bit(w, sign);
370 }
371}
Fangwen Fu231fe422017-04-24 17:52:29 -0700372#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +0200373
Sarah Parker99e7daa2017-08-29 10:30:13 -0700374static void pack_map_tokens(aom_writer *w, const TOKENEXTRA **tp, int n,
375 int num) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700376 const TOKENEXTRA *p = *tp;
hui su40b9e7f2017-07-13 18:15:56 -0700377 write_uniform(w, n, p->token); // The first color index.
378 ++p;
379 --num;
380 for (int i = 0; i < num; ++i) {
Sarah Parker0cf4d9f2017-08-18 13:09:14 -0700381 aom_write_symbol(w, p->token, p->color_map_cdf, n);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700382 ++p;
383 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700384 *tp = p;
385}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700386
Jingning Hana2285692017-10-25 15:14:31 -0700387static void pack_txb_tokens(aom_writer *w, AV1_COMMON *cm, MACROBLOCK *const x,
Jingning Han4fe5f672017-05-19 15:46:07 -0700388 const TOKENEXTRA **tp,
Jingning Hana2285692017-10-25 15:14:31 -0700389 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
390 MB_MODE_INFO *mbmi, int plane,
Jingning Han4fe5f672017-05-19 15:46:07 -0700391 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
392 int block, int blk_row, int blk_col,
393 TX_SIZE tx_size, TOKEN_STATS *token_stats) {
394 const struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han4fe5f672017-05-19 15:46:07 -0700395 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
396 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
397
398 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
399
Debargha Mukherjee8aec7f32017-12-20 15:48:49 -0800400 const TX_SIZE plane_tx_size =
Debargha Mukherjee6396d3a2017-12-21 00:12:38 -0800401 plane ? av1_get_uv_tx_size(mbmi, pd->subsampling_x, pd->subsampling_y)
Hui Su7167d952018-02-01 16:33:12 -0800402 : mbmi->inter_tx_size[av1_get_txb_size_index(plane_bsize, blk_row,
403 blk_col)];
Jingning Han4fe5f672017-05-19 15:46:07 -0700404
Debargha Mukherjee6396d3a2017-12-21 00:12:38 -0800405 if (tx_size == plane_tx_size || plane) {
Jingning Han4fe5f672017-05-19 15:46:07 -0700406 TOKEN_STATS tmp_token_stats;
407 init_token_stats(&tmp_token_stats);
408
Jingning Han4fe5f672017-05-19 15:46:07 -0700409 tran_low_t *tcoeff = BLOCK_OFFSET(x->mbmi_ext->tcoeff[plane], block);
410 uint16_t eob = x->mbmi_ext->eobs[plane][block];
411 TXB_CTX txb_ctx = { x->mbmi_ext->txb_skip_ctx[plane][block],
412 x->mbmi_ext->dc_sign_ctx[plane][block] };
Luc Trudeau2eb9b842017-12-13 11:19:16 -0500413 av1_write_coeffs_txb(cm, xd, w, blk_row, blk_col, plane, tx_size, tcoeff,
414 eob, &txb_ctx);
Jingning Han4fe5f672017-05-19 15:46:07 -0700415#if CONFIG_RD_DEBUG
416 token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
417 token_stats->cost += tmp_token_stats.cost;
418#endif
419 } else {
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800420 const TX_SIZE sub_txs = sub_tx_size_map[1][tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700421 const int bsw = tx_size_wide_unit[sub_txs];
422 const int bsh = tx_size_high_unit[sub_txs];
Jingning Han4fe5f672017-05-19 15:46:07 -0700423
Yue Chen0797a202017-10-27 17:24:56 -0700424 assert(bsw > 0 && bsh > 0);
Jingning Han4fe5f672017-05-19 15:46:07 -0700425
Yue Chen0797a202017-10-27 17:24:56 -0700426 for (int r = 0; r < tx_size_high_unit[tx_size]; r += bsh) {
427 for (int c = 0; c < tx_size_wide_unit[tx_size]; c += bsw) {
428 const int offsetr = blk_row + r;
429 const int offsetc = blk_col + c;
430 const int step = bsh * bsw;
Jingning Han4fe5f672017-05-19 15:46:07 -0700431
Yue Chen0797a202017-10-27 17:24:56 -0700432 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
Jingning Han4fe5f672017-05-19 15:46:07 -0700433
Yue Chen0797a202017-10-27 17:24:56 -0700434 pack_txb_tokens(w, cm, x, tp, tok_end, xd, mbmi, plane, plane_bsize,
435 bit_depth, block, offsetr, offsetc, sub_txs,
436 token_stats);
437 block += step;
438 }
Jingning Han4fe5f672017-05-19 15:46:07 -0700439 }
440 }
441}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700442
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000443#if CONFIG_SPATIAL_SEGMENTATION
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100444static int neg_interleave(int x, int ref, int max) {
445 const int diff = x - ref;
446 if (!ref) return x;
447 if (ref >= (max - 1)) return -diff;
448 if (2 * ref < max) {
449 if (abs(diff) <= ref) {
450 if (diff > 0)
451 return (diff << 1) - 1;
452 else
453 return ((-diff) << 1);
454 }
455 return x;
456 } else {
457 if (abs(diff) < (max - ref)) {
458 if (diff > 0)
459 return (diff << 1) - 1;
460 else
461 return ((-diff) << 1);
462 }
463 return (max - x) - 1;
464 }
465}
466
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000467static void write_segment_id(AV1_COMP *cpi, const MB_MODE_INFO *const mbmi,
468 aom_writer *w, const struct segmentation *seg,
469 struct segmentation_probs *segp, int mi_row,
470 int mi_col, int skip) {
471 AV1_COMMON *const cm = &cpi->common;
Rostislav Pehlivanov3874cb52018-01-11 17:29:00 +0000472 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Rostislav Pehlivanov6a0245c2018-01-17 18:30:00 +0000473 int prev_ul = -1; /* Top left segment_id */
474 int prev_l = -1; /* Current left segment_id */
475 int prev_u = -1; /* Current top segment_id */
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100476
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000477 if (!seg->enabled || !seg->update_map) return;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100478
Rostislav Pehlivanov3874cb52018-01-11 17:29:00 +0000479 if ((xd->up_available) && (xd->left_available))
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000480 prev_ul = get_segment_id(cm, cm->current_frame_seg_map, BLOCK_4X4,
481 mi_row - 1, mi_col - 1);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100482
Rostislav Pehlivanov3874cb52018-01-11 17:29:00 +0000483 if (xd->up_available)
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000484 prev_u = get_segment_id(cm, cm->current_frame_seg_map, BLOCK_4X4,
485 mi_row - 1, mi_col - 0);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100486
Rostislav Pehlivanov3874cb52018-01-11 17:29:00 +0000487 if (xd->left_available)
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000488 prev_l = get_segment_id(cm, cm->current_frame_seg_map, BLOCK_4X4,
489 mi_row - 0, mi_col - 1);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100490
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000491 int cdf_num = pick_spatial_seg_cdf(prev_ul, prev_u, prev_l);
492 int pred = pick_spatial_seg_pred(prev_ul, prev_u, prev_l);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100493
494 if (skip) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000495 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type, mi_row,
496 mi_col, pred);
497 set_spatial_segment_id(cm, cpi->segmentation_map, mbmi->sb_type, mi_row,
498 mi_col, pred);
499 /* mbmi is read only but we need to update segment_id */
500 ((MB_MODE_INFO *)mbmi)->segment_id = pred;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100501 return;
502 }
503
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000504 int coded_id =
505 neg_interleave(mbmi->segment_id, pred, cm->last_active_segid + 1);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100506
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000507 aom_cdf_prob *pred_cdf = segp->spatial_pred_seg_cdf[cdf_num];
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100508 aom_write_symbol(w, coded_id, pred_cdf, 8);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100509
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000510 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type, mi_row,
511 mi_col, mbmi->segment_id);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100512}
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000513#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700514static void write_segment_id(aom_writer *w, const struct segmentation *seg,
Thomas9ac55082016-09-23 18:04:17 +0100515 struct segmentation_probs *segp, int segment_id) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400516 if (seg->enabled && seg->update_map) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400517 aom_write_symbol(w, segment_id, segp->tree_cdf, MAX_SEGMENTS);
Nathan E. Eggef627e582016-08-19 20:06:51 -0400518 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700519}
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000520#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700521
Thomas Davies315f5782017-06-14 15:14:55 +0100522#define WRITE_REF_BIT(bname, pname) \
Thomas Davies0fbd2b72017-09-12 10:49:45 +0100523 aom_write_symbol(w, bname, av1_get_pred_cdf_##pname(xd), 2)
Thomas Davies315f5782017-06-14 15:14:55 +0100524
Yaowu Xuc27fc142016-08-22 16:08:15 -0700525// This function encodes the reference frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700526static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd,
527 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700528 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
529 const int is_compound = has_second_ref(mbmi);
530 const int segment_id = mbmi->segment_id;
531
532 // If segment level coding of this signal is disabled...
533 // or the segment allows multiple reference frame options
534 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
535 assert(!is_compound);
536 assert(mbmi->ref_frame[0] ==
537 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
David Barkerd92f3562017-10-09 17:46:23 +0100538 }
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700539#if CONFIG_SEGMENT_GLOBALMV
David Barkerd92f3562017-10-09 17:46:23 +0100540 else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP) ||
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700541 segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV))
David Barkerd92f3562017-10-09 17:46:23 +0100542#else
543 else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP))
544#endif
545 {
546 assert(!is_compound);
547 assert(mbmi->ref_frame[0] == LAST_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700548 } else {
549 // does the feature use compound prediction or not
550 // (if not specified at the frame/segment level)
551 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Debargha Mukherjee0f248c42017-09-07 12:40:18 -0700552 if (is_comp_ref_allowed(mbmi->sb_type))
Thomas Davies860def62017-06-14 10:00:03 +0100553 aom_write_symbol(w, is_compound, av1_get_reference_mode_cdf(cm, xd), 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700554 } else {
555 assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE));
556 }
557
558 if (is_compound) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700559#if CONFIG_EXT_COMP_REFS
560 const COMP_REFERENCE_TYPE comp_ref_type = has_uni_comp_refs(mbmi)
561 ? UNIDIR_COMP_REFERENCE
562 : BIDIR_COMP_REFERENCE;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100563 aom_write_symbol(w, comp_ref_type, av1_get_comp_reference_type_cdf(xd),
564 2);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700565
566 if (comp_ref_type == UNIDIR_COMP_REFERENCE) {
567 const int bit = mbmi->ref_frame[0] == BWDREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800568 WRITE_REF_BIT(bit, uni_comp_ref_p);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700569
Zoe Liuc082bbc2017-05-17 13:31:37 -0700570 if (!bit) {
Zoe Liufcf5fa22017-06-26 16:00:38 -0700571 assert(mbmi->ref_frame[0] == LAST_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100572 const int bit1 = mbmi->ref_frame[1] == LAST3_FRAME ||
573 mbmi->ref_frame[1] == GOLDEN_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800574 WRITE_REF_BIT(bit1, uni_comp_ref_p1);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100575 if (bit1) {
576 const int bit2 = mbmi->ref_frame[1] == GOLDEN_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800577 WRITE_REF_BIT(bit2, uni_comp_ref_p2);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700578 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700579 } else {
580 assert(mbmi->ref_frame[1] == ALTREF_FRAME);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700581 }
582
583 return;
584 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700585
586 assert(comp_ref_type == BIDIR_COMP_REFERENCE);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700587#endif // CONFIG_EXT_COMP_REFS
588
Yaowu Xuc27fc142016-08-22 16:08:15 -0700589 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
590 mbmi->ref_frame[0] == LAST3_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100591 WRITE_REF_BIT(bit, comp_ref_p);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700592
Yaowu Xuc27fc142016-08-22 16:08:15 -0700593 if (!bit) {
Zoe Liu87818282017-11-26 17:09:59 -0800594 const int bit1 = mbmi->ref_frame[0] == LAST2_FRAME;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100595 WRITE_REF_BIT(bit1, comp_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700596 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100597 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
598 WRITE_REF_BIT(bit2, comp_ref_p2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700599 }
Zoe Liu7b1ec7a2017-05-24 22:28:24 -0700600
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100601 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800602 WRITE_REF_BIT(bit_bwd, comp_bwdref_p);
Zoe Liue9b15e22017-07-19 15:53:01 -0700603
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100604 if (!bit_bwd) {
Zoe Liu49172952018-01-23 14:32:31 -0800605 WRITE_REF_BIT(mbmi->ref_frame[1] == ALTREF2_FRAME, comp_bwdref_p1);
Zoe Liue9b15e22017-07-19 15:53:01 -0700606 }
Zoe Liue9b15e22017-07-19 15:53:01 -0700607
Yaowu Xuc27fc142016-08-22 16:08:15 -0700608 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -0700609 const int bit0 = (mbmi->ref_frame[0] <= ALTREF_FRAME &&
610 mbmi->ref_frame[0] >= BWDREF_FRAME);
Zoe Liu49172952018-01-23 14:32:31 -0800611 WRITE_REF_BIT(bit0, single_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700612
613 if (bit0) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100614 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800615 WRITE_REF_BIT(bit1, single_ref_p2);
Zoe Liue9b15e22017-07-19 15:53:01 -0700616
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100617 if (!bit1) {
Zoe Liu49172952018-01-23 14:32:31 -0800618 WRITE_REF_BIT(mbmi->ref_frame[0] == ALTREF2_FRAME, single_ref_p6);
Zoe Liue9b15e22017-07-19 15:53:01 -0700619 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700620 } else {
621 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
622 mbmi->ref_frame[0] == GOLDEN_FRAME);
Zoe Liu49172952018-01-23 14:32:31 -0800623 WRITE_REF_BIT(bit2, single_ref_p3);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700624
625 if (!bit2) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100626 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800627 WRITE_REF_BIT(bit3, single_ref_p4);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700628 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100629 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800630 WRITE_REF_BIT(bit4, single_ref_p5);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700631 }
632 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700633 }
634 }
635}
636
hui su5db97432016-10-14 16:10:14 -0700637#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -0800638static void write_filter_intra_mode_info(const MACROBLOCKD *xd,
hui su5db97432016-10-14 16:10:14 -0700639 const MB_MODE_INFO *const mbmi,
640 aom_writer *w) {
Yue Chen95e13e22017-11-01 23:56:35 -0700641 if (mbmi->mode == DC_PRED && mbmi->palette_mode_info.palette_size[0] == 0 &&
642 av1_filter_intra_allowed_txsize(mbmi->tx_size)) {
Yue Chenb0571872017-12-18 18:12:59 -0800643 aom_write_symbol(w, mbmi->filter_intra_mode_info.use_filter_intra,
Yue Chen4eba69b2017-11-09 22:37:35 -0800644 xd->tile_ctx->filter_intra_cdfs[mbmi->tx_size], 2);
Yue Chenb0571872017-12-18 18:12:59 -0800645 if (mbmi->filter_intra_mode_info.use_filter_intra) {
hui su5db97432016-10-14 16:10:14 -0700646 const FILTER_INTRA_MODE mode =
Yue Chenb0571872017-12-18 18:12:59 -0800647 mbmi->filter_intra_mode_info.filter_intra_mode;
Yue Chen994dba22017-12-19 15:27:26 -0800648 aom_write_symbol(w, mode, xd->tile_ctx->filter_intra_mode_cdf,
Yue Chen63ce36f2017-10-10 23:37:31 -0700649 FILTER_INTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700650 }
651 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700652}
hui su5db97432016-10-14 16:10:14 -0700653#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -0700654
Luc Trudeau866da792018-02-12 11:13:34 -0500655static void write_angle_delta(aom_writer *w, int angle_delta,
656 aom_cdf_prob *cdf) {
Joe Young3ca43bf2017-10-06 15:12:46 -0700657#if CONFIG_EXT_INTRA_MOD
Luc Trudeau866da792018-02-12 11:13:34 -0500658 aom_write_symbol(w, angle_delta + MAX_ANGLE_DELTA, cdf,
659 2 * MAX_ANGLE_DELTA + 1);
Joe Young3ca43bf2017-10-06 15:12:46 -0700660#else
Luc Trudeau866da792018-02-12 11:13:34 -0500661 (void)cdf;
662 write_uniform(w, 2 * MAX_ANGLE_DELTA + 1, MAX_ANGLE_DELTA + angle_delta);
Joe Young3ca43bf2017-10-06 15:12:46 -0700663#endif // CONFIG_EXT_INTRA_MOD
Yaowu Xuc27fc142016-08-22 16:08:15 -0700664}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700665
Angie Chiang5678ad92016-11-21 09:38:40 -0800666static void write_mb_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd,
667 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700668 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700669 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
Thomas Davies77c7c402017-01-11 17:58:54 +0000670 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Jingning Han203b1d32017-01-12 16:00:13 -0800671
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700672 if (!av1_is_interp_needed(xd)) {
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100673 assert(mbmi->interp_filters ==
674 av1_broadcast_interp_filter(
675 av1_unswitchable_filter(cm->interp_filter)));
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700676 return;
677 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700678 if (cm->interp_filter == SWITCHABLE) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700679#if CONFIG_DUAL_FILTER
Jingning Han203b1d32017-01-12 16:00:13 -0800680 int dir;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700681 for (dir = 0; dir < 2; ++dir) {
682 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
683 (mbmi->ref_frame[1] > INTRA_FRAME &&
684 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700685 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100686 InterpFilter filter =
687 av1_extract_interp_filter(mbmi->interp_filters, dir);
688 aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
Angie Chiangb9b42a02017-01-20 12:47:36 -0800689 SWITCHABLE_FILTERS);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100690 ++cpi->interp_filter_selected[0][filter];
Angie Chiang38edf682017-02-21 15:13:09 -0800691 } else {
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100692 assert(av1_extract_interp_filter(mbmi->interp_filters, dir) ==
693 EIGHTTAP_REGULAR);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700694 }
695 }
696#else
697 {
Yaowu Xuf883b422016-08-30 14:01:10 -0700698 const int ctx = av1_get_pred_context_switchable_interp(xd);
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100699 InterpFilter filter = av1_extract_interp_filter(mbmi->interp_filters, 0);
700 aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
701 SWITCHABLE_FILTERS);
702 ++cpi->interp_filter_selected[0][filter];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700703 }
Jingning Han203b1d32017-01-12 16:00:13 -0800704#endif // CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -0700705 }
706}
707
hui su33567b22017-04-30 16:40:19 -0700708// Transmit color values with delta encoding. Write the first value as
709// literal, and the deltas between each value and the previous one. "min_val" is
710// the smallest possible value of the deltas.
711static void delta_encode_palette_colors(const int *colors, int num,
712 int bit_depth, int min_val,
713 aom_writer *w) {
714 if (num <= 0) return;
hui sufa4ff852017-05-15 12:20:50 -0700715 assert(colors[0] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700716 aom_write_literal(w, colors[0], bit_depth);
717 if (num == 1) return;
718 int max_delta = 0;
719 int deltas[PALETTE_MAX_SIZE];
720 memset(deltas, 0, sizeof(deltas));
721 for (int i = 1; i < num; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700722 assert(colors[i] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700723 const int delta = colors[i] - colors[i - 1];
724 deltas[i - 1] = delta;
725 assert(delta >= min_val);
726 if (delta > max_delta) max_delta = delta;
727 }
728 const int min_bits = bit_depth - 3;
729 int bits = AOMMAX(av1_ceil_log2(max_delta + 1 - min_val), min_bits);
hui sufa4ff852017-05-15 12:20:50 -0700730 assert(bits <= bit_depth);
hui su33567b22017-04-30 16:40:19 -0700731 int range = (1 << bit_depth) - colors[0] - min_val;
hui sud13c24a2017-04-07 16:13:07 -0700732 aom_write_literal(w, bits - min_bits, 2);
hui su33567b22017-04-30 16:40:19 -0700733 for (int i = 0; i < num - 1; ++i) {
734 aom_write_literal(w, deltas[i] - min_val, bits);
735 range -= deltas[i];
736 bits = AOMMIN(bits, av1_ceil_log2(range));
hui sud13c24a2017-04-07 16:13:07 -0700737 }
738}
739
hui su33567b22017-04-30 16:40:19 -0700740// Transmit luma palette color values. First signal if each color in the color
741// cache is used. Those colors that are not in the cache are transmitted with
742// delta encoding.
743static void write_palette_colors_y(const MACROBLOCKD *const xd,
744 const PALETTE_MODE_INFO *const pmi,
745 int bit_depth, aom_writer *w) {
746 const int n = pmi->palette_size[0];
hui su33567b22017-04-30 16:40:19 -0700747 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700748 const int n_cache = av1_get_palette_cache(xd, 0, color_cache);
hui su33567b22017-04-30 16:40:19 -0700749 int out_cache_colors[PALETTE_MAX_SIZE];
750 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
751 const int n_out_cache =
752 av1_index_color_cache(color_cache, n_cache, pmi->palette_colors, n,
753 cache_color_found, out_cache_colors);
754 int n_in_cache = 0;
755 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
756 const int found = cache_color_found[i];
757 aom_write_bit(w, found);
758 n_in_cache += found;
759 }
760 assert(n_in_cache + n_out_cache == n);
761 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 1, w);
762}
763
764// Write chroma palette color values. U channel is handled similarly to the luma
765// channel. For v channel, either use delta encoding or transmit raw values
766// directly, whichever costs less.
767static void write_palette_colors_uv(const MACROBLOCKD *const xd,
768 const PALETTE_MODE_INFO *const pmi,
hui sud13c24a2017-04-07 16:13:07 -0700769 int bit_depth, aom_writer *w) {
hui sud13c24a2017-04-07 16:13:07 -0700770 const int n = pmi->palette_size[1];
hui sud13c24a2017-04-07 16:13:07 -0700771 const uint16_t *colors_u = pmi->palette_colors + PALETTE_MAX_SIZE;
772 const uint16_t *colors_v = pmi->palette_colors + 2 * PALETTE_MAX_SIZE;
hui sud13c24a2017-04-07 16:13:07 -0700773 // U channel colors.
hui su33567b22017-04-30 16:40:19 -0700774 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700775 const int n_cache = av1_get_palette_cache(xd, 1, color_cache);
hui su33567b22017-04-30 16:40:19 -0700776 int out_cache_colors[PALETTE_MAX_SIZE];
777 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
778 const int n_out_cache = av1_index_color_cache(
779 color_cache, n_cache, colors_u, n, cache_color_found, out_cache_colors);
780 int n_in_cache = 0;
781 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
782 const int found = cache_color_found[i];
783 aom_write_bit(w, found);
784 n_in_cache += found;
hui sud13c24a2017-04-07 16:13:07 -0700785 }
hui su33567b22017-04-30 16:40:19 -0700786 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 0, w);
787
788 // V channel colors. Don't use color cache as the colors are not sorted.
hui sud13c24a2017-04-07 16:13:07 -0700789 const int max_val = 1 << bit_depth;
790 int zero_count = 0, min_bits_v = 0;
791 int bits_v =
792 av1_get_palette_delta_bits_v(pmi, bit_depth, &zero_count, &min_bits_v);
793 const int rate_using_delta =
794 2 + bit_depth + (bits_v + 1) * (n - 1) - zero_count;
795 const int rate_using_raw = bit_depth * n;
796 if (rate_using_delta < rate_using_raw) { // delta encoding
hui sufa4ff852017-05-15 12:20:50 -0700797 assert(colors_v[0] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700798 aom_write_bit(w, 1);
799 aom_write_literal(w, bits_v - min_bits_v, 2);
800 aom_write_literal(w, colors_v[0], bit_depth);
hui su33567b22017-04-30 16:40:19 -0700801 for (int i = 1; i < n; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700802 assert(colors_v[i] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700803 if (colors_v[i] == colors_v[i - 1]) { // No need to signal sign bit.
804 aom_write_literal(w, 0, bits_v);
805 continue;
806 }
807 const int delta = abs((int)colors_v[i] - colors_v[i - 1]);
808 const int sign_bit = colors_v[i] < colors_v[i - 1];
809 if (delta <= max_val - delta) {
810 aom_write_literal(w, delta, bits_v);
811 aom_write_bit(w, sign_bit);
812 } else {
813 aom_write_literal(w, max_val - delta, bits_v);
814 aom_write_bit(w, !sign_bit);
815 }
816 }
817 } else { // Transmit raw values.
818 aom_write_bit(w, 0);
hui sufa4ff852017-05-15 12:20:50 -0700819 for (int i = 0; i < n; ++i) {
820 assert(colors_v[i] < (1 << bit_depth));
821 aom_write_literal(w, colors_v[i], bit_depth);
822 }
hui sud13c24a2017-04-07 16:13:07 -0700823 }
824}
hui sud13c24a2017-04-07 16:13:07 -0700825
Yaowu Xuf883b422016-08-30 14:01:10 -0700826static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Hui Su8b618f62017-12-20 12:03:35 -0800827 const MODE_INFO *const mi, int mi_row,
828 int mi_col, aom_writer *w) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000829 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700830 const MB_MODE_INFO *const mbmi = &mi->mbmi;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700831 const BLOCK_SIZE bsize = mbmi->sb_type;
Hui Su473cf892017-11-08 18:14:31 -0800832 assert(av1_allow_palette(cm->allow_screen_content_tools, bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700833 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
Hui Suc1f411b2017-12-19 15:58:28 -0800834 const int bsize_ctx = av1_get_palette_bsize_ctx(bsize);
Rupert Swarbrick6f9cd942017-08-02 15:57:18 +0100835
Yaowu Xuc27fc142016-08-22 16:08:15 -0700836 if (mbmi->mode == DC_PRED) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800837 const int n = pmi->palette_size[0];
Hui Sudb685552018-01-12 16:38:33 -0800838 const int palette_y_mode_ctx = av1_get_palette_mode_ctx(xd);
Thomas Davies59f92312017-08-23 00:33:12 +0100839 aom_write_symbol(
840 w, n > 0,
Hui Suc1f411b2017-12-19 15:58:28 -0800841 xd->tile_ctx->palette_y_mode_cdf[bsize_ctx][palette_y_mode_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700842 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +0100843 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Hui Suc1f411b2017-12-19 15:58:28 -0800844 xd->tile_ctx->palette_y_size_cdf[bsize_ctx],
Thomas Daviesce7272d2017-07-04 16:11:08 +0100845 PALETTE_SIZES);
hui su33567b22017-04-30 16:40:19 -0700846 write_palette_colors_y(xd, pmi, cm->bit_depth, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700847 }
848 }
849
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +0000850 const int uv_dc_pred =
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000851 num_planes > 1 && mbmi->uv_mode == UV_DC_PRED &&
Hui Su8b618f62017-12-20 12:03:35 -0800852 is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
853 xd->plane[1].subsampling_y);
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +0000854 if (uv_dc_pred) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800855 const int n = pmi->palette_size[1];
856 const int palette_uv_mode_ctx = (pmi->palette_size[0] > 0);
Thomas Davies59f92312017-08-23 00:33:12 +0100857 aom_write_symbol(w, n > 0,
858 xd->tile_ctx->palette_uv_mode_cdf[palette_uv_mode_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700859 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +0100860 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Hui Suc1f411b2017-12-19 15:58:28 -0800861 xd->tile_ctx->palette_uv_size_cdf[bsize_ctx],
Thomas Daviesce7272d2017-07-04 16:11:08 +0100862 PALETTE_SIZES);
hui su33567b22017-04-30 16:40:19 -0700863 write_palette_colors_uv(xd, pmi, cm->bit_depth, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700864 }
865 }
866}
867
Angie Chiangc31ea682017-04-13 16:20:54 -0700868void av1_write_tx_type(const AV1_COMMON *const cm, const MACROBLOCKD *xd,
Angie Chiangcd9b03f2017-04-16 13:37:13 -0700869#if CONFIG_TXK_SEL
Luc Trudeau2eb9b842017-12-13 11:19:16 -0500870 int blk_row, int blk_col, int plane, TX_SIZE tx_size,
Angie Chiangc31ea682017-04-13 16:20:54 -0700871#endif
872 aom_writer *w) {
873 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Jingning Han2a4da942016-11-03 18:31:30 -0700874 const int is_inter = is_inter_block(mbmi);
Jingning Han243b66b2017-06-23 12:11:47 -0700875#if !CONFIG_TXK_SEL
Debargha Mukherjee5577bd12017-11-20 16:04:26 -0800876 const TX_SIZE mtx_size =
877 get_max_rect_tx_size(xd->mi[0]->mbmi.sb_type, is_inter);
Sarah Parker90024e42017-10-06 16:50:47 -0700878 const TX_SIZE tx_size =
Debargha Mukherjee3ebb0d02017-12-14 05:05:18 -0800879 is_inter ? TXSIZEMAX(sub_tx_size_map[1][mtx_size], mbmi->min_tx_size)
Sarah Parker90024e42017-10-06 16:50:47 -0700880 : mbmi->tx_size;
Jingning Han243b66b2017-06-23 12:11:47 -0700881#endif // !CONFIG_TXK_SEL
Thomas Daviescef09622017-01-11 17:27:12 +0000882 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviescef09622017-01-11 17:27:12 +0000883
Angie Chiangcd9b03f2017-04-16 13:37:13 -0700884#if !CONFIG_TXK_SEL
Angie Chiangc31ea682017-04-13 16:20:54 -0700885 TX_TYPE tx_type = mbmi->tx_type;
886#else
887 // Only y plane's tx_type is transmitted
Angie Chiang39b06eb2017-04-14 09:52:29 -0700888 if (plane > 0) return;
889 PLANE_TYPE plane_type = get_plane_type(plane);
Sarah Parker7c71cc02018-01-29 12:27:58 -0800890 TX_TYPE tx_type = av1_get_tx_type(plane_type, xd, blk_row, blk_col, tx_size,
891 cm->reduced_tx_set_used);
Angie Chiangc31ea682017-04-13 16:20:54 -0700892#endif
893
Hui Su99350a62018-01-11 16:41:09 -0800894 const TX_SIZE square_tx_size = txsize_sqr_map[tx_size];
895 const BLOCK_SIZE bsize = mbmi->sb_type;
896 if (get_ext_tx_types(tx_size, bsize, is_inter, cm->reduced_tx_set_used) > 1 &&
897 ((!cm->seg.enabled && cm->base_qindex > 0) ||
898 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
899 !mbmi->skip &&
900 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
901 const TxSetType tx_set_type =
902 get_ext_tx_set_type(tx_size, bsize, is_inter, cm->reduced_tx_set_used);
903 const int eset =
904 get_ext_tx_set(tx_size, bsize, is_inter, cm->reduced_tx_set_used);
905 // eset == 0 should correspond to a set with only DCT_DCT and there
906 // is no need to send the tx_type
907 assert(eset > 0);
908 assert(av1_ext_tx_used[tx_set_type][tx_type]);
909 if (is_inter) {
910 aom_write_symbol(w, av1_ext_tx_ind[tx_set_type][tx_type],
911 ec_ctx->inter_ext_tx_cdf[eset][square_tx_size],
912 av1_num_ext_tx_set[tx_set_type]);
913 } else {
Yue Chen57b8ff62017-10-10 23:37:31 -0700914#if CONFIG_FILTER_INTRA
Hui Su99350a62018-01-11 16:41:09 -0800915 PREDICTION_MODE intra_dir;
916 if (mbmi->filter_intra_mode_info.use_filter_intra)
917 intra_dir =
918 fimode_to_intradir[mbmi->filter_intra_mode_info.filter_intra_mode];
919 else
920 intra_dir = mbmi->mode;
921 aom_write_symbol(
922 w, av1_ext_tx_ind[tx_set_type][tx_type],
923 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][intra_dir],
924 av1_num_ext_tx_set[tx_set_type]);
Yue Chen57b8ff62017-10-10 23:37:31 -0700925#else
Hui Su99350a62018-01-11 16:41:09 -0800926 aom_write_symbol(
927 w, av1_ext_tx_ind[tx_set_type][tx_type],
928 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][mbmi->mode],
929 av1_num_ext_tx_set[tx_set_type]);
Yue Chen57b8ff62017-10-10 23:37:31 -0700930#endif
Lester Lu432012f2017-08-17 14:39:29 -0700931 }
Jingning Han2a4da942016-11-03 18:31:30 -0700932 }
933}
934
Jingning Hanf04254f2017-03-08 10:51:35 -0800935static void write_intra_mode(FRAME_CONTEXT *frame_ctx, BLOCK_SIZE bsize,
936 PREDICTION_MODE mode, aom_writer *w) {
Hui Su814f41e2017-10-02 12:21:24 -0700937 aom_write_symbol(w, mode, frame_ctx->y_mode_cdf[size_group_lookup[bsize]],
Jingning Hanf04254f2017-03-08 10:51:35 -0800938 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -0800939}
940
941static void write_intra_uv_mode(FRAME_CONTEXT *frame_ctx,
Luc Trudeaud6d9eee2017-07-12 12:36:50 -0400942 UV_PREDICTION_MODE uv_mode,
David Michael Barrcb3a8ef2018-01-06 15:48:49 +0900943 PREDICTION_MODE y_mode,
944#if CONFIG_CFL
945 CFL_ALLOWED_TYPE cfl_allowed,
Luc Trudeau6e1cd782017-06-21 13:52:36 -0400946#endif
David Michael Barrcb3a8ef2018-01-06 15:48:49 +0900947 aom_writer *w) {
948#if CONFIG_CFL
949 aom_write_symbol(w, uv_mode, frame_ctx->uv_mode_cdf[cfl_allowed][y_mode],
950 UV_INTRA_MODES - !cfl_allowed);
951#else
952 uv_mode = get_uv_mode(uv_mode);
Luc Trudeau6e1cd782017-06-21 13:52:36 -0400953 aom_write_symbol(w, uv_mode, frame_ctx->uv_mode_cdf[y_mode], UV_INTRA_MODES);
David Michael Barrcb3a8ef2018-01-06 15:48:49 +0900954#endif
Jingning Hanf04254f2017-03-08 10:51:35 -0800955}
956
Luc Trudeauf5334002017-04-25 12:21:26 -0400957#if CONFIG_CFL
David Michael Barrf6eaa152017-07-19 19:42:28 +0900958static void write_cfl_alphas(FRAME_CONTEXT *const ec_ctx, int idx,
959 int joint_sign, aom_writer *w) {
960 aom_write_symbol(w, joint_sign, ec_ctx->cfl_sign_cdf, CFL_JOINT_SIGNS);
961 // Magnitudes are only signaled for nonzero codes.
962 if (CFL_SIGN_U(joint_sign) != CFL_SIGN_ZERO) {
963 aom_cdf_prob *cdf_u = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_U(joint_sign)];
964 aom_write_symbol(w, CFL_IDX_U(idx), cdf_u, CFL_ALPHABET_SIZE);
965 }
966 if (CFL_SIGN_V(joint_sign) != CFL_SIGN_ZERO) {
967 aom_cdf_prob *cdf_v = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_V(joint_sign)];
968 aom_write_symbol(w, CFL_IDX_V(idx), cdf_v, CFL_ALPHABET_SIZE);
969 }
Luc Trudeauf5334002017-04-25 12:21:26 -0400970}
971#endif
972
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200973static void write_cdef(AV1_COMMON *cm, aom_writer *w, int skip, int mi_col,
974 int mi_row) {
975 if (cm->all_lossless) return;
976
977 const int m = ~((1 << (6 - MI_SIZE_LOG2)) - 1);
978 const MB_MODE_INFO *mbmi =
979 &cm->mi_grid_visible[(mi_row & m) * cm->mi_stride + (mi_col & m)]->mbmi;
980 // Initialise when at top left part of the superblock
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000981 if (!(mi_row & (cm->seq_params.mib_size - 1)) &&
982 !(mi_col & (cm->seq_params.mib_size - 1))) { // Top left?
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200983#if CONFIG_EXT_PARTITION
984 cm->cdef_preset[0] = cm->cdef_preset[1] = cm->cdef_preset[2] =
985 cm->cdef_preset[3] = -1;
986#else
987 cm->cdef_preset = -1;
988#endif
989 }
990
991// Emit CDEF param at first non-skip coding block
992#if CONFIG_EXT_PARTITION
993 const int mask = 1 << (6 - MI_SIZE_LOG2);
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000994 const int index = cm->seq_params.sb_size == BLOCK_128X128
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200995 ? !!(mi_col & mask) + 2 * !!(mi_row & mask)
996 : 0;
997 if (cm->cdef_preset[index] == -1 && !skip) {
998 aom_write_literal(w, mbmi->cdef_strength, cm->cdef_bits);
999 cm->cdef_preset[index] = mbmi->cdef_strength;
1000 }
1001#else
1002 if (cm->cdef_preset == -1 && !skip) {
1003 aom_write_literal(w, mbmi->cdef_strength, cm->cdef_bits);
1004 cm->cdef_preset = mbmi->cdef_strength;
1005 }
1006#endif
1007}
1008
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001009static void write_inter_segment_id(AV1_COMP *cpi, aom_writer *w,
1010 const struct segmentation *const seg,
1011 struct segmentation_probs *const segp,
1012 int mi_row, int mi_col, int skip,
1013 int preskip) {
1014 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1015 const MODE_INFO *mi = xd->mi[0];
1016 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1017#if CONFIG_SPATIAL_SEGMENTATION
1018 AV1_COMMON *const cm = &cpi->common;
1019#else
1020 (void)mi_row;
1021 (void)mi_col;
1022 (void)skip;
1023 (void)preskip;
1024#endif
1025
1026 if (seg->update_map) {
1027#if CONFIG_SPATIAL_SEGMENTATION
1028 if (preskip) {
1029 if (!cm->preskip_segid) return;
1030 } else {
1031 if (cm->preskip_segid) return;
1032 if (skip) {
Rostislav Pehlivanov2d4322b2018-01-11 17:19:58 +00001033 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 1);
Hui Su52b1ba22017-12-27 14:25:25 -08001034 if (seg->temporal_update) ((MB_MODE_INFO *)mbmi)->seg_id_predicted = 0;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001035 return;
1036 }
1037 }
1038#endif
1039 if (seg->temporal_update) {
1040 const int pred_flag = mbmi->seg_id_predicted;
1041 aom_cdf_prob *pred_cdf = av1_get_pred_cdf_seg_id(segp, xd);
1042 aom_write_symbol(w, pred_flag, pred_cdf, 2);
1043 if (!pred_flag) {
1044#if CONFIG_SPATIAL_SEGMENTATION
1045 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1046#else
1047 write_segment_id(w, seg, segp, mbmi->segment_id);
1048#endif
1049 }
1050#if CONFIG_SPATIAL_SEGMENTATION
1051 if (pred_flag) {
1052 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type,
1053 mi_row, mi_col, mbmi->segment_id);
1054 }
1055#endif
1056 } else {
1057#if CONFIG_SPATIAL_SEGMENTATION
1058 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1059#else
1060 write_segment_id(w, seg, segp, mbmi->segment_id);
1061#endif
1062 }
1063 }
1064}
1065
Angie Chiangc31ea682017-04-13 16:20:54 -07001066static void pack_inter_mode_mvs(AV1_COMP *cpi, const int mi_row,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001067 const int mi_col, aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001068 AV1_COMMON *const cm = &cpi->common;
Arild Fuldseth07441162016-08-15 15:07:52 +02001069 MACROBLOCK *const x = &cpi->td.mb;
1070 MACROBLOCKD *const xd = &x->e_mbd;
Thomas Davies24523292017-01-11 16:56:47 +00001071 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Angie Chiangc31ea682017-04-13 16:20:54 -07001072 const MODE_INFO *mi = xd->mi[0];
Thomas Davies24523292017-01-11 16:56:47 +00001073
Yaowu Xuc27fc142016-08-22 16:08:15 -07001074 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001075 struct segmentation_probs *const segp = &ec_ctx->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001076 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1077 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
1078 const PREDICTION_MODE mode = mbmi->mode;
1079 const int segment_id = mbmi->segment_id;
1080 const BLOCK_SIZE bsize = mbmi->sb_type;
1081 const int allow_hp = cm->allow_high_precision_mv;
1082 const int is_inter = is_inter_block(mbmi);
1083 const int is_compound = has_second_ref(mbmi);
1084 int skip, ref;
David Barker45390c12017-02-20 14:44:40 +00001085 (void)mi_row;
1086 (void)mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001087
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001088 write_inter_segment_id(cpi, w, seg, segp, mi_row, mi_col, 0, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001089
Zoe Liuf40a9572017-10-13 12:37:19 -07001090#if CONFIG_EXT_SKIP
1091 write_skip_mode(cm, xd, segment_id, mi, w);
1092
1093 if (mbmi->skip_mode) {
1094 skip = mbmi->skip;
1095 assert(skip);
1096 } else {
1097#endif // CONFIG_EXT_SKIP
1098 skip = write_skip(cm, xd, segment_id, mi, w);
1099#if CONFIG_EXT_SKIP
1100 }
1101#endif // CONFIG_EXT_SKIP
1102
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001103#if CONFIG_SPATIAL_SEGMENTATION
1104 write_inter_segment_id(cpi, w, seg, segp, mi_row, mi_col, skip, 0);
1105#endif
Zoe Liuf40a9572017-10-13 12:37:19 -07001106
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001107 write_cdef(cm, w, skip, mi_col, mi_row);
1108
Arild Fuldseth07441162016-08-15 15:07:52 +02001109 if (cm->delta_q_present_flag) {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001110 int super_block_upper_left =
1111 ((mi_row & (cm->seq_params.mib_size - 1)) == 0) &&
1112 ((mi_col & (cm->seq_params.mib_size - 1)) == 0);
1113 if ((bsize != cm->seq_params.sb_size || skip == 0) &&
1114 super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001115 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001116 int reduced_delta_qindex =
1117 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001118 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001119 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001120#if CONFIG_EXT_DELTA_Q
Cheng Chena97394f2017-09-27 15:05:14 -07001121#if CONFIG_LOOPFILTER_LEVEL
1122 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07001123 if (cm->delta_lf_multi) {
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +00001124 const int frame_lf_count =
1125 av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
1126 for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id) {
Cheng Chen880166a2017-10-02 17:48:48 -07001127 int reduced_delta_lflevel =
1128 (mbmi->curr_delta_lf[lf_id] - xd->prev_delta_lf[lf_id]) /
1129 cm->delta_lf_res;
1130 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
1131 xd->prev_delta_lf[lf_id] = mbmi->curr_delta_lf[lf_id];
1132 }
1133 } else {
Cheng Chena97394f2017-09-27 15:05:14 -07001134 int reduced_delta_lflevel =
Cheng Chen880166a2017-10-02 17:48:48 -07001135 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
Cheng Chena97394f2017-09-27 15:05:14 -07001136 cm->delta_lf_res;
Cheng Chen880166a2017-10-02 17:48:48 -07001137 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
1138 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -07001139 }
1140 }
1141#else
Fangwen Fu231fe422017-04-24 17:52:29 -07001142 if (cm->delta_lf_present_flag) {
1143 int reduced_delta_lflevel =
1144 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
1145 cm->delta_lf_res;
1146 write_delta_lflevel(cm, xd, reduced_delta_lflevel, w);
1147 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
1148 }
Cheng Chena97394f2017-09-27 15:05:14 -07001149#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07001150#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001151 }
1152 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001153
Zoe Liuf40a9572017-10-13 12:37:19 -07001154#if CONFIG_EXT_SKIP
1155 if (!mbmi->skip_mode)
1156#endif // CONFIG_EXT_SKIP
1157 write_is_inter(cm, xd, mbmi->segment_id, w, is_inter);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001158
Debargha Mukherjee4def76a2017-10-19 13:38:35 -07001159 if (cm->tx_mode == TX_MODE_SELECT && block_signals_txsize(bsize) &&
Yaowu Xuc27fc142016-08-22 16:08:15 -07001160 !(is_inter && skip) && !xd->lossless[segment_id]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001161 if (is_inter) { // This implies skip flag is 0.
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001162 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, bsize, 0);
Jingning Hanf64062f2016-11-02 16:22:18 -07001163 const int bh = tx_size_high_unit[max_tx_size];
1164 const int bw = tx_size_wide_unit[max_tx_size];
Jingning Han9ca05b72017-01-03 14:41:36 -08001165 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1166 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001167 int idx, idy;
Jingning Hanfe45b212016-11-22 10:30:23 -08001168 for (idy = 0; idy < height; idy += bh)
1169 for (idx = 0; idx < width; idx += bw)
Debargha Mukherjeeedc73462017-10-31 15:13:32 -07001170 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, 0, idy, idx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001171 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001172 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001173 write_selected_tx_size(cm, xd, w);
1174 }
1175 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001176 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001177 }
1178
Zoe Liuf40a9572017-10-13 12:37:19 -07001179#if CONFIG_EXT_SKIP
Zoe Liu56644192017-12-19 13:16:18 -08001180 if (mbmi->skip_mode) return;
Zoe Liuf40a9572017-10-13 12:37:19 -07001181#endif // CONFIG_EXT_SKIP
1182
Yaowu Xuc27fc142016-08-22 16:08:15 -07001183 if (!is_inter) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001184 write_intra_mode(ec_ctx, bsize, mode, w);
Luc Trudeau866da792018-02-12 11:13:34 -05001185 const int use_angle_delta = av1_use_angle_delta(bsize);
1186
Hui Su7fb93972018-02-20 21:18:03 -08001187 if (use_angle_delta && av1_is_directional_mode(mode)) {
Luc Trudeau866da792018-02-12 11:13:34 -05001188 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_Y],
1189 ec_ctx->angle_delta_cdf[mode - V_PRED]);
1190 }
1191
David Barkerc2a680e2018-02-07 15:53:53 +00001192#if CONFIG_MONO_VIDEO
1193 if (!cm->seq_params.monochrome &&
1194 is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
1195 xd->plane[1].subsampling_y))
1196#else
Jingning Hand3a64432017-04-06 17:04:17 -07001197 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
David Barkerc2a680e2018-02-07 15:53:53 +00001198 xd->plane[1].subsampling_y))
1199#endif // CONFIG_MONO_VIDEO
1200 {
Luc Trudeau866da792018-02-12 11:13:34 -05001201 const UV_PREDICTION_MODE uv_mode = mbmi->uv_mode;
David Michael Barrcb3a8ef2018-01-06 15:48:49 +09001202#if !CONFIG_CFL
Luc Trudeau866da792018-02-12 11:13:34 -05001203 write_intra_uv_mode(ec_ctx, uv_mode, mode, w);
David Michael Barrcb3a8ef2018-01-06 15:48:49 +09001204#else
Luc Trudeau866da792018-02-12 11:13:34 -05001205 write_intra_uv_mode(ec_ctx, uv_mode, mode, is_cfl_allowed(mbmi), w);
1206 if (uv_mode == UV_CFL_PRED)
David Michael Barr23198662017-06-19 23:19:48 +09001207 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001208#endif
Hui Su7fb93972018-02-20 21:18:03 -08001209 if (use_angle_delta && av1_is_directional_mode(get_uv_mode(uv_mode))) {
Luc Trudeau866da792018-02-12 11:13:34 -05001210 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_UV],
1211 ec_ctx->angle_delta_cdf[uv_mode - V_PRED]);
1212 }
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001213 }
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001214
Hui Sue87fb232017-10-05 15:00:15 -07001215 if (av1_allow_palette(cm->allow_screen_content_tools, bsize))
Hui Su8b618f62017-12-20 12:03:35 -08001216 write_palette_mode_info(cm, xd, mi, mi_row, mi_col, w);
hui su5db97432016-10-14 16:10:14 -07001217#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -08001218 write_filter_intra_mode_info(xd, mbmi, w);
hui su5db97432016-10-14 16:10:14 -07001219#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001220 } else {
Yaowu Xub0d0d002016-11-22 09:26:43 -08001221 int16_t mode_ctx;
Zoe Liufa8bad12018-01-23 14:32:31 -08001222
1223 av1_collect_neighbors_ref_counts(xd);
1224
Yaowu Xuc27fc142016-08-22 16:08:15 -07001225 write_ref_frames(cm, xd, w);
1226
Jingning Han7ae50fd2018-02-05 16:33:40 -08001227#if CONFIG_OPT_REF_MV
1228 mode_ctx =
1229 av1_mode_context_analyzer(mbmi_ext->mode_context, mbmi->ref_frame);
1230#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001231 if (is_compound)
1232 mode_ctx = mbmi_ext->compound_mode_context[mbmi->ref_frame[0]];
1233 else
Luc Trudeau15a18e32017-12-13 14:15:25 -05001234 mode_ctx =
1235 av1_mode_context_analyzer(mbmi_ext->mode_context, mbmi->ref_frame);
Jingning Han7ae50fd2018-02-05 16:33:40 -08001236#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001237
1238 // If segment skip is not enabled code the mode.
1239 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001240 if (is_inter_compound_mode(mode))
1241 write_inter_compound_mode(cm, xd, w, mode, mode_ctx);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001242 else if (is_inter_singleref_mode(mode))
1243 write_inter_mode(w, mode, ec_ctx, mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001244
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001245 if (mode == NEWMV || mode == NEW_NEWMV || have_nearmv_in_inter_mode(mode))
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001246 write_drl_idx(ec_ctx, mbmi, mbmi_ext, w);
1247 else
1248 assert(mbmi->ref_mv_idx == 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001249 }
1250
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001251 if (mode == NEWMV || mode == NEW_NEWMV) {
1252 int_mv ref_mv;
1253 for (ref = 0; ref < 1 + is_compound; ++ref) {
1254 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1255 int nmv_ctx =
1256 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1257 mbmi_ext->ref_mv_stack[rf_type], ref, mbmi->ref_mv_idx);
1258 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1259 ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0];
1260 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv, nmvc,
1261 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001262 }
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001263 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
1264 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
Imdad Sardharwallac23ad632017-11-28 14:12:38 +00001265 int nmv_ctx = av1_nmv_ctx(
1266 mbmi_ext->ref_mv_count[rf_type], mbmi_ext->ref_mv_stack[rf_type], 1,
1267 mbmi->ref_mv_idx + (mode == NEAR_NEWMV ? 1 : 0));
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001268 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1269 av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv,
1270 &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv, nmvc,
1271 allow_hp);
1272 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
1273 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
Imdad Sardharwallac23ad632017-11-28 14:12:38 +00001274 int nmv_ctx = av1_nmv_ctx(
1275 mbmi_ext->ref_mv_count[rf_type], mbmi_ext->ref_mv_stack[rf_type], 0,
1276 mbmi->ref_mv_idx + (mode == NEW_NEARMV ? 1 : 0));
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001277 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
1278 av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv,
1279 &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv, nmvc,
1280 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001281 }
1282
Yaowu Xuc27fc142016-08-22 16:08:15 -07001283 if (cpi->common.reference_mode != COMPOUND_REFERENCE &&
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001284 cpi->common.allow_interintra_compound && is_interintra_allowed(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001285 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
1286 const int bsize_group = size_group_lookup[bsize];
Thomas Daviescff91712017-07-07 11:49:55 +01001287 aom_write_symbol(w, interintra, ec_ctx->interintra_cdf[bsize_group], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001288 if (interintra) {
Thomas Davies299ff042017-06-27 13:41:59 +01001289 aom_write_symbol(w, mbmi->interintra_mode,
1290 ec_ctx->interintra_mode_cdf[bsize_group],
1291 INTERINTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001292 if (is_interintra_wedge_used(bsize)) {
Thomas Daviescff91712017-07-07 11:49:55 +01001293 aom_write_symbol(w, mbmi->use_wedge_interintra,
1294 ec_ctx->wedge_interintra_cdf[bsize], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001295 if (mbmi->use_wedge_interintra) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001296 aom_write_literal(w, mbmi->interintra_wedge_index,
1297 get_wedge_bits_lookup(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001298 assert(mbmi->interintra_wedge_sign == 0);
1299 }
1300 }
1301 }
1302 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001303
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001304 if (mbmi->ref_frame[1] != INTRA_FRAME) write_motion_mode(cm, xd, mi, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001305
Cheng Chen33a13d92017-11-28 16:49:59 -08001306#if CONFIG_JNT_COMP
1307 // First write idx to indicate current compound inter prediction mode group
1308 // Group A (0): jnt_comp, compound_average
1309 // Group B (1): interintra, compound_segment, wedge
1310 if (has_second_ref(mbmi)) {
Zoe Liu5f11e912017-12-05 23:23:56 -08001311 const int masked_compound_used =
1312 is_any_masked_compound_used(bsize) && cm->allow_masked_compound;
Cheng Chen5a881722017-11-30 17:05:10 -08001313
Zoe Liu5f11e912017-12-05 23:23:56 -08001314 if (masked_compound_used) {
Cheng Chen5a881722017-11-30 17:05:10 -08001315 const int ctx_comp_group_idx = get_comp_group_idx_context(xd);
1316 aom_write_symbol(w, mbmi->comp_group_idx,
1317 ec_ctx->comp_group_idx_cdf[ctx_comp_group_idx], 2);
Zoe Liu5f11e912017-12-05 23:23:56 -08001318 } else {
1319 assert(mbmi->comp_group_idx == 0);
Cheng Chen5a881722017-11-30 17:05:10 -08001320 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001321
1322 if (mbmi->comp_group_idx == 0) {
1323 if (mbmi->compound_idx)
1324 assert(mbmi->interinter_compound_type == COMPOUND_AVERAGE);
1325
1326 const int comp_index_ctx = get_comp_index_context(cm, xd);
1327 aom_write_symbol(w, mbmi->compound_idx,
1328 ec_ctx->compound_index_cdf[comp_index_ctx], 2);
1329 } else {
Zoe Liu5f11e912017-12-05 23:23:56 -08001330 assert(cpi->common.reference_mode != SINGLE_REFERENCE &&
1331 is_inter_compound_mode(mbmi->mode) &&
1332 mbmi->motion_mode == SIMPLE_TRANSLATION);
1333 assert(masked_compound_used);
1334 // compound_segment, wedge
Cheng Chen33a13d92017-11-28 16:49:59 -08001335 assert(mbmi->interinter_compound_type == COMPOUND_WEDGE ||
1336 mbmi->interinter_compound_type == COMPOUND_SEG);
Cheng Chen33a13d92017-11-28 16:49:59 -08001337
Zoe Liu5f11e912017-12-05 23:23:56 -08001338 if (is_interinter_compound_used(COMPOUND_WEDGE, bsize))
1339 aom_write_symbol(w, mbmi->interinter_compound_type - 1,
1340 ec_ctx->compound_type_cdf[bsize],
1341 COMPOUND_TYPES - 1);
1342
1343 if (mbmi->interinter_compound_type == COMPOUND_WEDGE) {
1344 assert(is_interinter_compound_used(COMPOUND_WEDGE, bsize));
1345 aom_write_literal(w, mbmi->wedge_index, get_wedge_bits_lookup(bsize));
1346 aom_write_bit(w, mbmi->wedge_sign);
1347 } else {
1348 assert(mbmi->interinter_compound_type == COMPOUND_SEG);
1349 aom_write_literal(w, mbmi->mask_type, MAX_SEG_MASK_BITS);
Cheng Chen33a13d92017-11-28 16:49:59 -08001350 }
1351 }
1352 }
1353#else // CONFIG_JNT_COMP
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001354 if (cpi->common.reference_mode != SINGLE_REFERENCE &&
Zoe Liu85b66462017-04-20 14:28:19 -07001355 is_inter_compound_mode(mbmi->mode) &&
Zoe Liu85b66462017-04-20 14:28:19 -07001356 mbmi->motion_mode == SIMPLE_TRANSLATION &&
Zoe Liu85b66462017-04-20 14:28:19 -07001357 is_any_masked_compound_used(bsize)) {
Cheng Chen33a13d92017-11-28 16:49:59 -08001358 if (cm->allow_masked_compound) {
Sarah Parker680b9b12017-08-16 18:55:34 -07001359 if (!is_interinter_compound_used(COMPOUND_WEDGE, bsize))
1360 aom_write_bit(w, mbmi->interinter_compound_type == COMPOUND_AVERAGE);
1361 else
Cheng Chenbdd6ca82017-10-23 22:34:25 -07001362 aom_write_symbol(w, mbmi->interinter_compound_type,
1363 ec_ctx->compound_type_cdf[bsize], COMPOUND_TYPES);
Sarah Parker680b9b12017-08-16 18:55:34 -07001364 if (is_interinter_compound_used(COMPOUND_WEDGE, bsize) &&
1365 mbmi->interinter_compound_type == COMPOUND_WEDGE) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001366 aom_write_literal(w, mbmi->wedge_index, get_wedge_bits_lookup(bsize));
1367 aom_write_bit(w, mbmi->wedge_sign);
1368 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001369 if (mbmi->interinter_compound_type == COMPOUND_SEG) {
Cheng Chenbdd6ca82017-10-23 22:34:25 -07001370 aom_write_literal(w, mbmi->mask_type, MAX_SEG_MASK_BITS);
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001371 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001372 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001373 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001374#endif // CONFIG_JNT_COMP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001375
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07001376 write_mb_interp_filter(cpi, xd, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001377 }
1378
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001379#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001380 av1_write_tx_type(cm, xd, w);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001381#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07001382}
1383
Hui Suc2232cf2017-10-11 17:32:56 -07001384#if CONFIG_INTRABC
1385static void write_intrabc_info(AV1_COMMON *cm, MACROBLOCKD *xd,
1386 const MB_MODE_INFO_EXT *mbmi_ext,
1387 int enable_tx_size, aom_writer *w) {
1388 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1389 int use_intrabc = is_intrabc_block(mbmi);
1390 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1391 aom_write_symbol(w, use_intrabc, ec_ctx->intrabc_cdf, 2);
1392 if (use_intrabc) {
1393 assert(mbmi->mode == DC_PRED);
1394 assert(mbmi->uv_mode == UV_DC_PRED);
Hui Su12546aa2017-10-13 16:10:01 -07001395 if ((enable_tx_size && !mbmi->skip)) {
Hui Su12546aa2017-10-13 16:10:01 -07001396 const BLOCK_SIZE bsize = mbmi->sb_type;
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001397 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, bsize, 0);
Hui Su12546aa2017-10-13 16:10:01 -07001398 const int bh = tx_size_high_unit[max_tx_size];
1399 const int bw = tx_size_wide_unit[max_tx_size];
1400 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1401 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Hui Su12546aa2017-10-13 16:10:01 -07001402 int idx, idy;
1403 for (idy = 0; idy < height; idy += bh) {
1404 for (idx = 0; idx < width; idx += bw) {
Debargha Mukherjeeedc73462017-10-31 15:13:32 -07001405 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, 0, idy, idx, w);
Hui Su12546aa2017-10-13 16:10:01 -07001406 }
1407 }
Hui Su12546aa2017-10-13 16:10:01 -07001408 } else {
Hui Su12546aa2017-10-13 16:10:01 -07001409 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, mbmi->skip, xd);
Hui Su12546aa2017-10-13 16:10:01 -07001410 }
Hui Suc2232cf2017-10-11 17:32:56 -07001411 int_mv dv_ref = mbmi_ext->ref_mvs[INTRA_FRAME][0];
1412 av1_encode_dv(w, &mbmi->mv[0].as_mv, &dv_ref.as_mv, &ec_ctx->ndvc);
Sebastien Alaiwan3bac9922017-11-02 12:34:41 +01001413#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001414 av1_write_tx_type(cm, xd, w);
Sebastien Alaiwan3bac9922017-11-02 12:34:41 +01001415#endif // !CONFIG_TXK_SEL
Hui Suc2232cf2017-10-11 17:32:56 -07001416 }
1417}
1418#endif // CONFIG_INTRABC
1419
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001420static void write_mb_modes_kf(AV1_COMP *cpi, MACROBLOCKD *xd,
Alex Converse44c2bad2017-05-11 09:36:10 -07001421#if CONFIG_INTRABC
1422 const MB_MODE_INFO_EXT *mbmi_ext,
1423#endif // CONFIG_INTRABC
Jingning Han36fe3202017-02-20 22:31:49 -08001424 const int mi_row, const int mi_col,
Angie Chiangc31ea682017-04-13 16:20:54 -07001425 aom_writer *w) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001426 AV1_COMMON *const cm = &cpi->common;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001427 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001428 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001429 struct segmentation_probs *const segp = &ec_ctx->seg;
Angie Chiangc31ea682017-04-13 16:20:54 -07001430 const MODE_INFO *const mi = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001431 const MODE_INFO *const above_mi = xd->above_mi;
1432 const MODE_INFO *const left_mi = xd->left_mi;
1433 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1434 const BLOCK_SIZE bsize = mbmi->sb_type;
Luc Trudeau866da792018-02-12 11:13:34 -05001435 const PREDICTION_MODE mode = mbmi->mode;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001436
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001437#if CONFIG_SPATIAL_SEGMENTATION
1438 if (cm->preskip_segid && seg->update_map)
1439 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1440#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001441 if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001442#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001443
Alex Converse619576b2017-05-10 15:14:18 -07001444 const int skip = write_skip(cm, xd, mbmi->segment_id, mi, w);
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001445
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001446#if CONFIG_SPATIAL_SEGMENTATION
1447 if (!cm->preskip_segid && seg->update_map)
1448 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, skip);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +01001449#endif
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001450
1451 write_cdef(cm, w, skip, mi_col, mi_row);
1452
Arild Fuldseth07441162016-08-15 15:07:52 +02001453 if (cm->delta_q_present_flag) {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001454 int super_block_upper_left =
1455 ((mi_row & (cm->seq_params.mib_size - 1)) == 0) &&
1456 ((mi_col & (cm->seq_params.mib_size - 1)) == 0);
1457 if ((bsize != cm->seq_params.sb_size || skip == 0) &&
1458 super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001459 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001460 int reduced_delta_qindex =
1461 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001462 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001463 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001464#if CONFIG_EXT_DELTA_Q
Cheng Chena97394f2017-09-27 15:05:14 -07001465#if CONFIG_LOOPFILTER_LEVEL
1466 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07001467 if (cm->delta_lf_multi) {
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +00001468 const int frame_lf_count =
1469 av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
1470 for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id) {
Cheng Chen880166a2017-10-02 17:48:48 -07001471 int reduced_delta_lflevel =
1472 (mbmi->curr_delta_lf[lf_id] - xd->prev_delta_lf[lf_id]) /
1473 cm->delta_lf_res;
1474 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
1475 xd->prev_delta_lf[lf_id] = mbmi->curr_delta_lf[lf_id];
1476 }
1477 } else {
Cheng Chena97394f2017-09-27 15:05:14 -07001478 int reduced_delta_lflevel =
Cheng Chen880166a2017-10-02 17:48:48 -07001479 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
Cheng Chena97394f2017-09-27 15:05:14 -07001480 cm->delta_lf_res;
Cheng Chen880166a2017-10-02 17:48:48 -07001481 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
1482 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -07001483 }
1484 }
1485#else
Fangwen Fu231fe422017-04-24 17:52:29 -07001486 if (cm->delta_lf_present_flag) {
1487 int reduced_delta_lflevel =
1488 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
1489 cm->delta_lf_res;
1490 write_delta_lflevel(cm, xd, reduced_delta_lflevel, w);
1491 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
1492 }
Cheng Chena97394f2017-09-27 15:05:14 -07001493#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07001494#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001495 }
1496 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001497
Alex Conversef71808c2017-06-06 12:21:17 -07001498 int enable_tx_size = cm->tx_mode == TX_MODE_SELECT &&
Rupert Swarbrickfcff0b22017-10-05 09:26:04 +01001499 block_signals_txsize(bsize) &&
Alex Conversef71808c2017-06-06 12:21:17 -07001500 !xd->lossless[mbmi->segment_id];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001501
Alex Converse28744302017-04-13 14:46:22 -07001502#if CONFIG_INTRABC
Hui Sueb2fd5c2017-12-15 14:38:01 -08001503 if (av1_allow_intrabc(cm)) {
Hui Suc2232cf2017-10-11 17:32:56 -07001504 write_intrabc_info(cm, xd, mbmi_ext, enable_tx_size, w);
1505 if (is_intrabc_block(mbmi)) return;
Alex Converse28744302017-04-13 14:46:22 -07001506 }
1507#endif // CONFIG_INTRABC
Hui Suc2232cf2017-10-11 17:32:56 -07001508
Alex Conversef71808c2017-06-06 12:21:17 -07001509 if (enable_tx_size) write_selected_tx_size(cm, xd, w);
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001510#if CONFIG_INTRABC
Hui Su12546aa2017-10-13 16:10:01 -07001511 if (cm->allow_screen_content_tools)
1512 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, mbmi->skip, xd);
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001513#endif // CONFIG_INTRABC
Alex Converse28744302017-04-13 14:46:22 -07001514
Luc Trudeau866da792018-02-12 11:13:34 -05001515 write_intra_mode_kf(ec_ctx, mi, above_mi, left_mi, mode, w);
1516
1517 const int use_angle_delta = av1_use_angle_delta(bsize);
Hui Su7fb93972018-02-20 21:18:03 -08001518 if (use_angle_delta && av1_is_directional_mode(mode)) {
Luc Trudeau866da792018-02-12 11:13:34 -05001519 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_Y],
1520 ec_ctx->angle_delta_cdf[mode - V_PRED]);
1521 }
Jingning Han0b7cbe62017-03-08 10:22:47 -08001522
David Barkerc2a680e2018-02-07 15:53:53 +00001523#if CONFIG_MONO_VIDEO
1524 if (!cm->seq_params.monochrome &&
1525 is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
1526 xd->plane[1].subsampling_y))
1527#else
Jingning Hand3a64432017-04-06 17:04:17 -07001528 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
David Barkerc2a680e2018-02-07 15:53:53 +00001529 xd->plane[1].subsampling_y))
1530#endif // CONFIG_MONO_VIDEO
1531 {
Luc Trudeau866da792018-02-12 11:13:34 -05001532 const UV_PREDICTION_MODE uv_mode = mbmi->uv_mode;
David Michael Barrcb3a8ef2018-01-06 15:48:49 +09001533#if !CONFIG_CFL
Luc Trudeau866da792018-02-12 11:13:34 -05001534 write_intra_uv_mode(ec_ctx, uv_mode, mode, w);
David Michael Barrcb3a8ef2018-01-06 15:48:49 +09001535#else
Luc Trudeau866da792018-02-12 11:13:34 -05001536 write_intra_uv_mode(ec_ctx, uv_mode, mode, is_cfl_allowed(mbmi), w);
1537 if (uv_mode == UV_CFL_PRED)
David Michael Barr23198662017-06-19 23:19:48 +09001538 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Luc Trudeauf5334002017-04-25 12:21:26 -04001539#endif
Hui Su7fb93972018-02-20 21:18:03 -08001540 if (use_angle_delta && av1_is_directional_mode(get_uv_mode(uv_mode))) {
Luc Trudeau866da792018-02-12 11:13:34 -05001541 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_UV],
1542 ec_ctx->angle_delta_cdf[uv_mode - V_PRED]);
1543 }
Luc Trudeau2c317902017-04-28 11:06:50 -04001544 }
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07001545
Hui Sue87fb232017-10-05 15:00:15 -07001546 if (av1_allow_palette(cm->allow_screen_content_tools, bsize))
Hui Su8b618f62017-12-20 12:03:35 -08001547 write_palette_mode_info(cm, xd, mi, mi_row, mi_col, w);
hui su5db97432016-10-14 16:10:14 -07001548#if CONFIG_FILTER_INTRA
Yue Chen4eba69b2017-11-09 22:37:35 -08001549 write_filter_intra_mode_info(xd, mbmi, w);
hui su5db97432016-10-14 16:10:14 -07001550#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001551
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001552#if !CONFIG_TXK_SEL
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001553 av1_write_tx_type(cm, xd, w);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001554#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07001555}
1556
Angie Chiangd4022822016-11-02 18:30:25 -07001557#if CONFIG_RD_DEBUG
1558static void dump_mode_info(MODE_INFO *mi) {
1559 printf("\nmi->mbmi.mi_row == %d\n", mi->mbmi.mi_row);
1560 printf("&& mi->mbmi.mi_col == %d\n", mi->mbmi.mi_col);
1561 printf("&& mi->mbmi.sb_type == %d\n", mi->mbmi.sb_type);
1562 printf("&& mi->mbmi.tx_size == %d\n", mi->mbmi.tx_size);
Jingning Han2fac8a42017-12-14 16:26:00 -08001563 printf("&& mi->mbmi.mode == %d\n", mi->mbmi.mode);
Angie Chiangd4022822016-11-02 18:30:25 -07001564}
Angie Chiangd02001d2016-11-06 15:31:49 -08001565static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats,
1566 int plane) {
1567 if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) {
1568 int r, c;
1569 printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n",
1570 plane, rd_stats->txb_coeff_cost[plane], token_stats->cost);
Angie Chiangd02001d2016-11-06 15:31:49 -08001571 printf("rd txb_coeff_cost_map\n");
1572 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1573 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1574 printf("%d ", rd_stats->txb_coeff_cost_map[plane][r][c]);
1575 }
1576 printf("\n");
1577 }
1578
1579 printf("pack txb_coeff_cost_map\n");
1580 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1581 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1582 printf("%d ", token_stats->txb_coeff_cost_map[r][c]);
1583 }
1584 printf("\n");
1585 }
Angie Chiangd02001d2016-11-06 15:31:49 -08001586 return 1;
1587 }
1588 return 0;
1589}
Angie Chiangd4022822016-11-02 18:30:25 -07001590#endif
1591
Di Chen56586622017-06-09 13:49:44 -07001592#if ENC_MISMATCH_DEBUG
1593static void enc_dump_logs(AV1_COMP *cpi, int mi_row, int mi_col) {
1594 AV1_COMMON *const cm = &cpi->common;
1595 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1596 MODE_INFO *m;
1597 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1598 m = xd->mi[0];
1599 if (is_inter_block(&m->mbmi)) {
Zoe Liuf40a9572017-10-13 12:37:19 -07001600#define FRAME_TO_CHECK 11
Zoe Liu17af2742017-10-06 10:36:42 -07001601 if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
Di Chen56586622017-06-09 13:49:44 -07001602 const MB_MODE_INFO *const mbmi = &m->mbmi;
1603 const BLOCK_SIZE bsize = mbmi->sb_type;
1604
1605 int_mv mv[2];
1606 int is_comp_ref = has_second_ref(&m->mbmi);
1607 int ref;
1608
1609 for (ref = 0; ref < 1 + is_comp_ref; ++ref)
1610 mv[ref].as_mv = m->mbmi.mv[ref].as_mv;
1611
1612 if (!is_comp_ref) {
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001613 mv[1].as_int = 0;
Di Chen56586622017-06-09 13:49:44 -07001614 }
Di Chen56586622017-06-09 13:49:44 -07001615
Di Chen56586622017-06-09 13:49:44 -07001616 MACROBLOCK *const x = &cpi->td.mb;
Di Chen56586622017-06-09 13:49:44 -07001617 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
Zoe Liuf40a9572017-10-13 12:37:19 -07001618 const int16_t mode_ctx =
1619 is_comp_ref ? mbmi_ext->compound_mode_context[mbmi->ref_frame[0]]
1620 : av1_mode_context_analyzer(mbmi_ext->mode_context,
Luc Trudeau15a18e32017-12-13 14:15:25 -05001621 mbmi->ref_frame);
Zoe Liuf40a9572017-10-13 12:37:19 -07001622
Di Chen56586622017-06-09 13:49:44 -07001623 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
1624 int16_t zeromv_ctx = -1;
1625 int16_t refmv_ctx = -1;
Zoe Liuf40a9572017-10-13 12:37:19 -07001626
Di Chen56586622017-06-09 13:49:44 -07001627 if (mbmi->mode != NEWMV) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -07001628 zeromv_ctx = (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Jingning Han59b12632018-02-12 10:44:52 -08001629 if (mbmi->mode != GLOBALMV)
Di Chen56586622017-06-09 13:49:44 -07001630 refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Di Chen56586622017-06-09 13:49:44 -07001631 }
1632
Zoe Liuf40a9572017-10-13 12:37:19 -07001633#if CONFIG_EXT_SKIP
1634 printf(
1635 "=== ENCODER ===: "
1636 "Frame=%d, (mi_row,mi_col)=(%d,%d), skip_mode=%d, mode=%d, bsize=%d, "
1637 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
1638 "ref[1]=%d, motion_mode=%d, mode_ctx=%d, "
1639 "newmv_ctx=%d, zeromv_ctx=%d, refmv_ctx=%d, tx_size=%d\n",
1640 cm->current_video_frame, mi_row, mi_col, mbmi->skip_mode, mbmi->mode,
1641 bsize, cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col,
1642 mv[1].as_mv.row, mv[1].as_mv.col, mbmi->ref_frame[0],
1643 mbmi->ref_frame[1], mbmi->motion_mode, mode_ctx, newmv_ctx,
1644 zeromv_ctx, refmv_ctx, mbmi->tx_size);
1645#else
Di Chen56586622017-06-09 13:49:44 -07001646 printf(
1647 "=== ENCODER ===: "
1648 "Frame=%d, (mi_row,mi_col)=(%d,%d), mode=%d, bsize=%d, "
1649 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
Zoe Liuf40a9572017-10-13 12:37:19 -07001650 "ref[1]=%d, motion_mode=%d, mode_ctx=%d, "
1651 "newmv_ctx=%d, zeromv_ctx=%d, refmv_ctx=%d, tx_size=%d\n",
Di Chen56586622017-06-09 13:49:44 -07001652 cm->current_video_frame, mi_row, mi_col, mbmi->mode, bsize,
1653 cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col, mv[1].as_mv.row,
1654 mv[1].as_mv.col, mbmi->ref_frame[0], mbmi->ref_frame[1],
Zoe Liuf40a9572017-10-13 12:37:19 -07001655 mbmi->motion_mode, mode_ctx, newmv_ctx, zeromv_ctx, refmv_ctx,
1656 mbmi->tx_size);
1657#endif // CONFIG_EXT_SKIP
Di Chen56586622017-06-09 13:49:44 -07001658 }
1659 }
1660}
1661#endif // ENC_MISMATCH_DEBUG
1662
Yue Chen64550b62017-01-12 12:18:22 -08001663static void write_mbmi_b(AV1_COMP *cpi, const TileInfo *const tile,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001664 aom_writer *w, int mi_row, int mi_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001665 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001666 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1667 MODE_INFO *m;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001668 int bh, bw;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001669 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1670 m = xd->mi[0];
1671
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001672 assert(m->mbmi.sb_type <= cm->seq_params.sb_size ||
Rupert Swarbrick72678572017-08-02 12:05:26 +01001673 (m->mbmi.sb_type >= BLOCK_SIZES && m->mbmi.sb_type < BLOCK_SIZES_ALL));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001674
Jingning Hanc709e1f2016-12-06 14:48:09 -08001675 bh = mi_size_high[m->mbmi.sb_type];
1676 bw = mi_size_wide[m->mbmi.sb_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001677
1678 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1679
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001680 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001681#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001682 cm->dependent_horz_tiles,
1683#endif // CONFIG_DEPENDENT_HORZTILES
1684 cm->mi_rows, cm->mi_cols);
Yushin Cho77bba8d2016-11-04 16:36:56 -07001685
Yaowu Xuc27fc142016-08-22 16:08:15 -07001686 if (frame_is_intra_only(cm)) {
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001687#if CONFIG_INTRABC
Hui Su12546aa2017-10-13 16:10:01 -07001688 if (cm->allow_screen_content_tools) {
1689 xd->above_txfm_context =
1690 cm->above_txfm_context + (mi_col << TX_UNIT_WIDE_LOG2);
1691 xd->left_txfm_context = xd->left_txfm_context_buffer +
1692 ((mi_row & MAX_MIB_MASK) << TX_UNIT_HIGH_LOG2);
1693 }
Sebastien Alaiwanfb838772017-10-24 12:02:54 +02001694#endif // CONFIG_INTRABC
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001695 write_mb_modes_kf(cpi, xd,
Alex Converse44c2bad2017-05-11 09:36:10 -07001696#if CONFIG_INTRABC
1697 cpi->td.mb.mbmi_ext,
1698#endif // CONFIG_INTRABC
1699 mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001700 } else {
Jingning Han331662e2017-05-30 17:03:32 -07001701 xd->above_txfm_context =
1702 cm->above_txfm_context + (mi_col << TX_UNIT_WIDE_LOG2);
1703 xd->left_txfm_context = xd->left_txfm_context_buffer +
1704 ((mi_row & MAX_MIB_MASK) << TX_UNIT_HIGH_LOG2);
Angie Chiang38edf682017-02-21 15:13:09 -08001705 // has_subpel_mv_component needs the ref frame buffers set up to look
1706 // up if they are scaled. has_subpel_mv_component is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07001707 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
1708 set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
Zoe Liu85b66462017-04-20 14:28:19 -07001709
Di Chen56586622017-06-09 13:49:44 -07001710#if ENC_MISMATCH_DEBUG
Di Chen56586622017-06-09 13:49:44 -07001711 enc_dump_logs(cpi, mi_row, mi_col);
1712#endif // ENC_MISMATCH_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001713
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001714 pack_inter_mode_mvs(cpi, mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001715 }
Yue Chen64550b62017-01-12 12:18:22 -08001716}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001717
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001718static void write_inter_txb_coeff(AV1_COMMON *const cm, MACROBLOCK *const x,
1719 MB_MODE_INFO *const mbmi, aom_writer *w,
1720 const TOKENEXTRA **tok,
1721 const TOKENEXTRA *const tok_end,
1722 TOKEN_STATS *token_stats, const int row,
1723 const int col, int *block, const int plane) {
1724 MACROBLOCKD *const xd = &x->e_mbd;
1725 const struct macroblockd_plane *const pd = &xd->plane[plane];
Debargha Mukherjee19619882017-11-22 13:13:14 -08001726 const BLOCK_SIZE bsize = mbmi->sb_type;
1727 const BLOCK_SIZE bsizec =
1728 scale_chroma_bsize(bsize, pd->subsampling_x, pd->subsampling_y);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001729
Debargha Mukherjee5d149e12017-12-14 12:49:51 -08001730 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsizec, pd);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001731
Debargha Mukherjee19619882017-11-22 13:13:14 -08001732 TX_SIZE max_tx_size = get_vartx_max_txsize(
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001733 xd, plane_bsize, pd->subsampling_x || pd->subsampling_y);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001734 const int step =
1735 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
1736 const int bkw = tx_size_wide_unit[max_tx_size];
1737 const int bkh = tx_size_high_unit[max_tx_size];
1738
1739 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
1740 int mu_blocks_wide = block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
1741 int mu_blocks_high = block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
1742
1743 int blk_row, blk_col;
1744
1745 const int num_4x4_w = block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
1746 const int num_4x4_h = block_size_high[plane_bsize] >> tx_size_wide_log2[0];
1747
Jingning Hancdbc47f2018-01-12 16:21:07 -08001748 const int unit_height =
1749 AOMMIN(mu_blocks_high + (row >> pd->subsampling_y), num_4x4_h);
1750 const int unit_width =
1751 AOMMIN(mu_blocks_wide + (col >> pd->subsampling_x), num_4x4_w);
1752 for (blk_row = row >> pd->subsampling_y; blk_row < unit_height;
1753 blk_row += bkh) {
1754 for (blk_col = col >> pd->subsampling_x; blk_col < unit_width;
1755 blk_col += bkw) {
Sebastien Alaiwancad5ebc2018-02-20 16:18:20 +01001756 pack_txb_tokens(w, cm, x, tok, tok_end, xd, mbmi, plane, plane_bsize,
1757 cm->bit_depth, *block, blk_row, blk_col, max_tx_size,
1758 token_stats);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001759 *block += step;
1760 }
1761 }
1762}
1763
Yue Chen64550b62017-01-12 12:18:22 -08001764static void write_tokens_b(AV1_COMP *cpi, const TileInfo *const tile,
1765 aom_writer *w, const TOKENEXTRA **tok,
1766 const TOKENEXTRA *const tok_end, int mi_row,
1767 int mi_col) {
1768 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001769 const int num_planes = av1_num_planes(cm);
Yue Chen64550b62017-01-12 12:18:22 -08001770 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Wei-Ting Lin1d46d902017-06-26 15:57:18 -07001771 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1772 MODE_INFO *const m = *(cm->mi_grid_visible + mi_offset);
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001773 MB_MODE_INFO *const mbmi = &m->mbmi;
Yue Chen64550b62017-01-12 12:18:22 -08001774 int plane;
1775 int bh, bw;
Yushin Cho258a0242017-03-06 13:53:01 -08001776 MACROBLOCK *const x = &cpi->td.mb;
Yue Chen64550b62017-01-12 12:18:22 -08001777 (void)tok;
1778 (void)tok_end;
Wei-Ting Lin1d46d902017-06-26 15:57:18 -07001779 xd->mi = cm->mi_grid_visible + mi_offset;
Yue Chen64550b62017-01-12 12:18:22 -08001780
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001781 assert(mbmi->sb_type <= cm->seq_params.sb_size ||
Rupert Swarbrick72678572017-08-02 12:05:26 +01001782 (mbmi->sb_type >= BLOCK_SIZES && mbmi->sb_type < BLOCK_SIZES_ALL));
Yue Chen64550b62017-01-12 12:18:22 -08001783
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001784 bh = mi_size_high[mbmi->sb_type];
1785 bw = mi_size_wide[mbmi->sb_type];
Yue Chen64550b62017-01-12 12:18:22 -08001786 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1787
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001788 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001789#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07001790 cm->dependent_horz_tiles,
1791#endif // CONFIG_DEPENDENT_HORZTILES
1792 cm->mi_rows, cm->mi_cols);
Yue Chen64550b62017-01-12 12:18:22 -08001793
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001794 for (plane = 0; plane < AOMMIN(2, num_planes); ++plane) {
Fangwen Fub3be9262017-03-06 15:34:28 -08001795 const uint8_t palette_size_plane =
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001796 mbmi->palette_mode_info.palette_size[plane];
Zoe Liuf40a9572017-10-13 12:37:19 -07001797#if CONFIG_EXT_SKIP
1798 assert(!mbmi->skip_mode || !palette_size_plane);
1799#endif // CONFIG_EXT_SKIP
Fangwen Fub3be9262017-03-06 15:34:28 -08001800 if (palette_size_plane > 0) {
Alex Converseed37d012017-04-24 11:15:24 -07001801#if CONFIG_INTRABC
1802 assert(mbmi->use_intrabc == 0);
1803#endif
Hui Su8b618f62017-12-20 12:03:35 -08001804 assert(av1_allow_palette(cm->allow_screen_content_tools, mbmi->sb_type));
Fangwen Fub3be9262017-03-06 15:34:28 -08001805 int rows, cols;
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001806 av1_get_block_dimensions(mbmi->sb_type, plane, xd, NULL, NULL, &rows,
Fangwen Fub3be9262017-03-06 15:34:28 -08001807 &cols);
1808 assert(*tok < tok_end);
Sarah Parker99e7daa2017-08-29 10:30:13 -07001809 pack_map_tokens(w, tok, palette_size_plane, rows * cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001810 }
Fangwen Fub3be9262017-03-06 15:34:28 -08001811 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001812
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001813 if (!mbmi->skip) {
Jingning Hanad54a982018-01-12 14:40:29 -08001814 if (!is_inter_block(mbmi))
1815 av1_write_coeffs_mb(cm, x, mi_row, mi_col, w, mbmi->sb_type);
1816
Jingning Hancdbc47f2018-01-12 16:21:07 -08001817 if (is_inter_block(mbmi)) {
1818 int block[MAX_MB_PLANE] = { 0 };
1819 const struct macroblockd_plane *const y_pd = &xd->plane[0];
1820 const BLOCK_SIZE plane_bsize = get_plane_block_size(mbmi->sb_type, y_pd);
Jingning Han42a0fb32016-10-31 10:43:31 -07001821 const int num_4x4_w =
1822 block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
1823 const int num_4x4_h =
1824 block_size_high[plane_bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001825 int row, col;
Angie Chiangd4022822016-11-02 18:30:25 -07001826 TOKEN_STATS token_stats;
Angie Chiangd02001d2016-11-06 15:31:49 -08001827 init_token_stats(&token_stats);
Angie Chiangd4022822016-11-02 18:30:25 -07001828
Jingning Hancdbc47f2018-01-12 16:21:07 -08001829 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, y_pd);
Jingning Hanc2b797f2017-07-19 09:37:11 -07001830 int mu_blocks_wide =
1831 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
1832 int mu_blocks_high =
1833 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
1834
1835 mu_blocks_wide = AOMMIN(num_4x4_w, mu_blocks_wide);
1836 mu_blocks_high = AOMMIN(num_4x4_h, mu_blocks_high);
1837
Jingning Hancdbc47f2018-01-12 16:21:07 -08001838 for (row = 0; row < num_4x4_h; row += mu_blocks_high) {
1839 for (col = 0; col < num_4x4_w; col += mu_blocks_wide) {
1840 for (plane = 0; plane < num_planes && is_inter_block(mbmi); ++plane) {
1841 const struct macroblockd_plane *const pd = &xd->plane[plane];
1842 if (!is_chroma_reference(mi_row, mi_col, mbmi->sb_type,
1843 pd->subsampling_x, pd->subsampling_y)) {
Jingning Hancdbc47f2018-01-12 16:21:07 -08001844 continue;
1845 }
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001846 write_inter_txb_coeff(cm, x, mbmi, w, tok, tok_end, &token_stats,
Jingning Hancdbc47f2018-01-12 16:21:07 -08001847 row, col, &block[plane], plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001848 }
1849 }
Angie Chiangd02001d2016-11-06 15:31:49 -08001850#if CONFIG_RD_DEBUG
Angie Chiang3963d632016-11-10 18:41:40 -08001851 if (mbmi->sb_type >= BLOCK_8X8 &&
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001852 rd_token_stats_mismatch(&mbmi->rd_stats, &token_stats, plane)) {
Angie Chiangd02001d2016-11-06 15:31:49 -08001853 dump_mode_info(m);
1854 assert(0);
1855 }
Jingning Hanfe45b212016-11-22 10:30:23 -08001856#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001857 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001858 }
1859 }
1860}
1861
Yue Chen64550b62017-01-12 12:18:22 -08001862static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
1863 aom_writer *w, const TOKENEXTRA **tok,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001864 const TOKENEXTRA *const tok_end, int mi_row,
1865 int mi_col) {
1866 write_mbmi_b(cpi, tile, w, mi_row, mi_col);
Jingning Hanf5a4d3b2017-08-27 23:01:19 -07001867
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001868 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
Yue Chen64550b62017-01-12 12:18:22 -08001869}
1870
Yaowu Xuf883b422016-08-30 14:01:10 -07001871static void write_partition(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001872 const MACROBLOCKD *const xd, int hbs, int mi_row,
1873 int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize,
Yaowu Xuf883b422016-08-30 14:01:10 -07001874 aom_writer *w) {
Alex Converse55c6bde2017-01-12 15:55:31 -08001875 const int is_partition_point = bsize >= BLOCK_8X8;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00001876
Jingning Hanbf9c6b72016-12-14 14:50:45 -08001877 if (!is_partition_point) return;
1878
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001879 const int has_rows = (mi_row + hbs) < cm->mi_rows;
1880 const int has_cols = (mi_col + hbs) < cm->mi_cols;
1881 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
1882 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1883
1884 if (!has_rows && !has_cols) {
1885 assert(p == PARTITION_SPLIT);
1886 return;
1887 }
1888
Yaowu Xuc27fc142016-08-22 16:08:15 -07001889 if (has_rows && has_cols) {
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001890 aom_write_symbol(w, p, ec_ctx->partition_cdf[ctx],
1891 partition_cdf_length(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001892 } else if (!has_rows && has_cols) {
1893 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001894 assert(bsize > BLOCK_8X8);
1895 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001896 partition_gather_vert_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001897 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001898 } else {
1899 assert(has_rows && !has_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001900 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001901 assert(bsize > BLOCK_8X8);
1902 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001903 partition_gather_horz_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001904 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001905 }
1906}
1907
Yaowu Xuf883b422016-08-30 14:01:10 -07001908static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile,
1909 aom_writer *const w, const TOKENEXTRA **tok,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001910 const TOKENEXTRA *const tok_end, int mi_row,
1911 int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001912 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001913 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Jingning Hanc709e1f2016-12-06 14:48:09 -08001914 const int hbs = mi_size_wide[bsize] / 2;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001915#if CONFIG_EXT_PARTITION_TYPES
1916 const int quarter_step = mi_size_wide[bsize] / 4;
1917 int i;
Rupert Swarbrick3dd33912017-09-12 14:24:11 +01001918#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07001919 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
1920 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
Jingning Han52261842016-12-14 12:17:49 -08001921
Yaowu Xuc27fc142016-08-22 16:08:15 -07001922 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
1923
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001924#if CONFIG_LOOP_RESTORATION
Debargha Mukherjeea78c8f52018-01-31 11:14:38 -08001925 const int num_planes = av1_num_planes(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001926 for (int plane = 0; plane < num_planes; ++plane) {
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001927 int rcol0, rcol1, rrow0, rrow1, tile_tl_idx;
1928 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
1929 &rcol0, &rcol1, &rrow0, &rrow1,
1930 &tile_tl_idx)) {
1931 const int rstride = cm->rst_info[plane].horz_units_per_tile;
1932 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
1933 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
1934 const int rtile_idx = tile_tl_idx + rcol + rrow * rstride;
1935 const RestorationUnitInfo *rui =
1936 &cm->rst_info[plane].unit_info[rtile_idx];
1937 loop_restoration_write_sb_coeffs(cm, xd, rui, w, plane);
1938 }
1939 }
1940 }
1941 }
1942#endif
1943
Yaowu Xuc27fc142016-08-22 16:08:15 -07001944 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001945 switch (partition) {
1946 case PARTITION_NONE:
1947 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1948 break;
1949 case PARTITION_HORZ:
1950 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1951 if (mi_row + hbs < cm->mi_rows)
1952 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1953 break;
1954 case PARTITION_VERT:
1955 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1956 if (mi_col + hbs < cm->mi_cols)
1957 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1958 break;
1959 case PARTITION_SPLIT:
1960 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
1961 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs, subsize);
1962 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col, subsize);
1963 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs,
1964 subsize);
1965 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001966#if CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001967 case PARTITION_HORZ_A:
1968 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1969 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1970 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1971 break;
1972 case PARTITION_HORZ_B:
1973 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1974 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1975 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
1976 break;
1977 case PARTITION_VERT_A:
1978 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1979 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1980 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1981 break;
1982 case PARTITION_VERT_B:
1983 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1984 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1985 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
1986 break;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001987 case PARTITION_HORZ_4:
1988 for (i = 0; i < 4; ++i) {
1989 int this_mi_row = mi_row + i * quarter_step;
1990 if (i > 0 && this_mi_row >= cm->mi_rows) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001991
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001992 write_modes_b(cpi, tile, w, tok, tok_end, this_mi_row, mi_col);
1993 }
1994 break;
1995 case PARTITION_VERT_4:
1996 for (i = 0; i < 4; ++i) {
1997 int this_mi_col = mi_col + i * quarter_step;
1998 if (i > 0 && this_mi_col >= cm->mi_cols) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001999
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002000 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, this_mi_col);
2001 }
2002 break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002003#endif // CONFIG_EXT_PARTITION_TYPES
Debargha Mukherjeeedced252017-10-20 00:02:00 -07002004 default: assert(0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002005 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002006
2007// update partition context
2008#if CONFIG_EXT_PARTITION_TYPES
2009 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
2010#else
2011 if (bsize >= BLOCK_8X8 &&
2012 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
2013 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01002014#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002015}
2016
Yaowu Xuf883b422016-08-30 14:01:10 -07002017static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile,
2018 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002019 const TOKENEXTRA *const tok_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002020 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002021 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
2022 const int mi_row_start = tile->mi_row_start;
2023 const int mi_row_end = tile->mi_row_end;
2024 const int mi_col_start = tile->mi_col_start;
2025 const int mi_col_end = tile->mi_col_end;
2026 int mi_row, mi_col;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002027
2028#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08002029 if (!cm->dependent_horz_tiles || mi_row_start == 0 ||
2030 tile->tg_horz_boundary) {
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002031 av1_zero_above_context(cm, mi_col_start, mi_col_end);
2032 }
2033#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002034 av1_zero_above_context(cm, mi_col_start, mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002035#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02002036 if (cpi->common.delta_q_present_flag) {
2037 xd->prev_qindex = cpi->common.base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07002038#if CONFIG_EXT_DELTA_Q
2039 if (cpi->common.delta_lf_present_flag) {
Cheng Chena97394f2017-09-27 15:05:14 -07002040#if CONFIG_LOOPFILTER_LEVEL
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +00002041 const int frame_lf_count =
2042 av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
2043 for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id)
Cheng Chena97394f2017-09-27 15:05:14 -07002044 xd->prev_delta_lf[lf_id] = 0;
2045#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07002046 xd->prev_delta_lf_from_base = 0;
2047 }
2048#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02002049 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002050
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002051 for (mi_row = mi_row_start; mi_row < mi_row_end;
2052 mi_row += cm->seq_params.mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002053 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002054
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002055 for (mi_col = mi_col_start; mi_col < mi_col_end;
2056 mi_col += cm->seq_params.mib_size) {
2057 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col,
2058 cm->seq_params.sb_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002059 }
2060 }
2061}
2062
Yaowu Xuc27fc142016-08-22 16:08:15 -07002063#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002064static void encode_restoration_mode(AV1_COMMON *cm,
2065 struct aom_write_bit_buffer *wb) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002066 const int num_planes = av1_num_planes(cm);
Hui Su27df8342017-11-07 15:16:05 -08002067#if CONFIG_INTRABC
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00002068
Hui Su27df8342017-11-07 15:16:05 -08002069 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
2070#endif // CONFIG_INTRABC
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002071 int all_none = 1, chroma_none = 1;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002072 for (int p = 0; p < num_planes; ++p) {
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00002073 RestorationInfo *rsi = &cm->rst_info[p];
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002074 if (rsi->frame_restoration_type != RESTORE_NONE) {
2075 all_none = 0;
2076 chroma_none &= p == 0;
2077 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002078 switch (rsi->frame_restoration_type) {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07002079 case RESTORE_NONE:
2080 aom_wb_write_bit(wb, 0);
2081 aom_wb_write_bit(wb, 0);
2082 break;
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07002083 case RESTORE_WIENER:
2084 aom_wb_write_bit(wb, 1);
2085 aom_wb_write_bit(wb, 0);
2086 break;
2087 case RESTORE_SGRPROJ:
2088 aom_wb_write_bit(wb, 1);
2089 aom_wb_write_bit(wb, 1);
2090 break;
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07002091 case RESTORE_SWITCHABLE:
2092 aom_wb_write_bit(wb, 0);
2093 aom_wb_write_bit(wb, 1);
2094 break;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08002095 default: assert(0);
2096 }
2097 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002098 if (!all_none) {
Yue Chen8628ae42018-02-13 11:05:20 -08002099#if CONFIG_EXT_PARTITION
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00002100 assert(cm->seq_params.sb_size == BLOCK_64X64 ||
2101 cm->seq_params.sb_size == BLOCK_128X128);
2102 const int sb_size = cm->seq_params.sb_size == BLOCK_128X128 ? 128 : 64;
Yue Chen8628ae42018-02-13 11:05:20 -08002103#else
2104 assert(cm->seq_params.sb_size == BLOCK_64X64);
2105 const int sb_size = 64;
2106#endif
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00002107
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00002108 RestorationInfo *rsi = &cm->rst_info[0];
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00002109
2110 assert(rsi->restoration_unit_size >= sb_size);
2111 assert(RESTORATION_TILESIZE_MAX == 256);
2112
2113 if (sb_size == 64) {
2114 aom_wb_write_bit(wb, rsi->restoration_unit_size > 64);
2115 }
2116 if (rsi->restoration_unit_size > 64) {
2117 aom_wb_write_bit(wb, rsi->restoration_unit_size > 128);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08002118 }
2119 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002120
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002121 if (num_planes > 1) {
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002122 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
2123 if (s && !chroma_none) {
Johannb0ef6ff2018-02-08 14:32:21 -08002124 aom_wb_write_bit(wb, cm->rst_info[1].restoration_unit_size !=
2125 cm->rst_info[0].restoration_unit_size);
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00002126 assert(cm->rst_info[1].restoration_unit_size ==
2127 cm->rst_info[0].restoration_unit_size ||
2128 cm->rst_info[1].restoration_unit_size ==
2129 (cm->rst_info[0].restoration_unit_size >> s));
2130 assert(cm->rst_info[2].restoration_unit_size ==
2131 cm->rst_info[1].restoration_unit_size);
2132 } else if (!s) {
2133 assert(cm->rst_info[1].restoration_unit_size ==
2134 cm->rst_info[0].restoration_unit_size);
2135 assert(cm->rst_info[2].restoration_unit_size ==
2136 cm->rst_info[1].restoration_unit_size);
2137 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07002138 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002139}
2140
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002141static void write_wiener_filter(int wiener_win, const WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002142 WienerInfo *ref_wiener_info, aom_writer *wb) {
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002143 if (wiener_win == WIENER_WIN)
2144 aom_write_primitive_refsubexpfin(
2145 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2146 WIENER_FILT_TAP0_SUBEXP_K,
2147 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV,
2148 wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV);
2149 else
2150 assert(wiener_info->vfilter[0] == 0 &&
2151 wiener_info->vfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002152 aom_write_primitive_refsubexpfin(
2153 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2154 WIENER_FILT_TAP1_SUBEXP_K,
2155 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV,
2156 wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV);
2157 aom_write_primitive_refsubexpfin(
2158 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2159 WIENER_FILT_TAP2_SUBEXP_K,
2160 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV,
2161 wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV);
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07002162 if (wiener_win == WIENER_WIN)
2163 aom_write_primitive_refsubexpfin(
2164 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
2165 WIENER_FILT_TAP0_SUBEXP_K,
2166 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV,
2167 wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV);
2168 else
2169 assert(wiener_info->hfilter[0] == 0 &&
2170 wiener_info->hfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002171 aom_write_primitive_refsubexpfin(
2172 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
2173 WIENER_FILT_TAP1_SUBEXP_K,
2174 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV,
2175 wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV);
2176 aom_write_primitive_refsubexpfin(
2177 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
2178 WIENER_FILT_TAP2_SUBEXP_K,
2179 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV,
2180 wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV);
2181 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002182}
2183
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002184static void write_sgrproj_filter(const SgrprojInfo *sgrproj_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002185 SgrprojInfo *ref_sgrproj_info,
2186 aom_writer *wb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002187 aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07002188 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1,
2189 SGRPROJ_PRJ_SUBEXP_K,
2190 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
2191 sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0);
2192 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1,
2193 SGRPROJ_PRJ_SUBEXP_K,
2194 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
2195 sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1);
2196 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07002197}
2198
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002199static void loop_restoration_write_sb_coeffs(const AV1_COMMON *const cm,
2200 MACROBLOCKD *xd,
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002201 const RestorationUnitInfo *rui,
2202 aom_writer *const w, int plane) {
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002203 const RestorationInfo *rsi = cm->rst_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002204 RestorationType frame_rtype = rsi->frame_restoration_type;
2205 if (frame_rtype == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002206
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002207 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
2208 WienerInfo *wiener_info = xd->wiener_info + plane;
2209 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002210 RestorationType unit_rtype = rui->restoration_type;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002211
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002212 if (frame_rtype == RESTORE_SWITCHABLE) {
2213 aom_write_symbol(w, unit_rtype, xd->tile_ctx->switchable_restore_cdf,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002214 RESTORE_SWITCHABLE_TYPES);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002215 switch (unit_rtype) {
2216 case RESTORE_WIENER:
2217 write_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, w);
2218 break;
2219 case RESTORE_SGRPROJ:
2220 write_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, w);
2221 break;
2222 default: assert(unit_rtype == RESTORE_NONE); break;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002223 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002224 } else if (frame_rtype == RESTORE_WIENER) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002225 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002226 xd->tile_ctx->wiener_restore_cdf, 2);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002227 if (unit_rtype != RESTORE_NONE) {
2228 write_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, w);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002229 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002230 } else if (frame_rtype == RESTORE_SGRPROJ) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002231 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002232 xd->tile_ctx->sgrproj_restore_cdf, 2);
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002233 if (unit_rtype != RESTORE_NONE) {
2234 write_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, w);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002235 }
2236 }
2237}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002238#endif // CONFIG_LOOP_RESTORATION
2239
Yaowu Xuf883b422016-08-30 14:01:10 -07002240static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002241 const int num_planes = av1_num_planes(cm);
Cheng Chen07365c92017-12-21 16:37:33 -08002242#if CONFIG_INTRABC
Hui Su27df8342017-11-07 15:16:05 -08002243 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
Cheng Chen07365c92017-12-21 16:37:33 -08002244#endif // CONFIG_INTRABC
Yaowu Xuc27fc142016-08-22 16:08:15 -07002245 int i;
2246 struct loopfilter *lf = &cm->lf;
2247
Cheng Chen179479f2017-08-04 10:56:39 -07002248// Encode the loop filter level and type
Cheng Chen13fc8192017-08-19 11:49:28 -07002249#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen76224b02017-12-15 12:21:01 -08002250 aom_wb_write_literal(wb, lf->filter_level[0], 6);
2251 aom_wb_write_literal(wb, lf->filter_level[1], 6);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002252 if (num_planes > 1) {
Cheng Chen76224b02017-12-15 12:21:01 -08002253 if (lf->filter_level[0] || lf->filter_level[1]) {
2254 aom_wb_write_literal(wb, lf->filter_level_u, 6);
2255 aom_wb_write_literal(wb, lf->filter_level_v, 6);
Cheng Chen765e34e2017-12-11 11:43:35 -08002256 }
Cheng Chene94df5c2017-07-19 17:25:33 -07002257 }
Cheng Chena7345512017-12-05 15:36:05 -08002258#else
Cheng Chen179479f2017-08-04 10:56:39 -07002259 aom_wb_write_literal(wb, lf->filter_level, 6);
Cheng Chena7345512017-12-05 15:36:05 -08002260#endif // CONFIG_LOOPFILTER_LEVEL
Yaowu Xuf883b422016-08-30 14:01:10 -07002261 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002262
2263 // Write out loop filter deltas applied at the MB level based on mode or
2264 // ref frame (if they are enabled).
Yaowu Xuf883b422016-08-30 14:01:10 -07002265 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002266
2267 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002268 aom_wb_write_bit(wb, lf->mode_ref_delta_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002269 if (lf->mode_ref_delta_update) {
2270 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
2271 const int delta = lf->ref_deltas[i];
2272 const int changed = delta != lf->last_ref_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002273 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002274 if (changed) {
2275 lf->last_ref_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002276 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002277 }
2278 }
2279
2280 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
2281 const int delta = lf->mode_deltas[i];
2282 const int changed = delta != lf->last_mode_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002283 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002284 if (changed) {
2285 lf->last_mode_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002286 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002287 }
2288 }
2289 }
2290 }
2291}
2292
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002293static void encode_cdef(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002294 const int num_planes = av1_num_planes(cm);
Hui Su27df8342017-11-07 15:16:05 -08002295#if CONFIG_INTRABC
2296 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
2297#endif // CONFIG_INTRABC
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002298 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02002299 aom_wb_write_literal(wb, cm->cdef_pri_damping - 3, 2);
2300 assert(cm->cdef_pri_damping == cm->cdef_sec_damping);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002301 aom_wb_write_literal(wb, cm->cdef_bits, 2);
2302 for (i = 0; i < cm->nb_cdef_strengths; i++) {
2303 aom_wb_write_literal(wb, cm->cdef_strengths[i], CDEF_STRENGTH_BITS);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002304 if (num_planes > 1)
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02002305 aom_wb_write_literal(wb, cm->cdef_uv_strengths[i], CDEF_STRENGTH_BITS);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002306 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002307}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002308
Yaowu Xuf883b422016-08-30 14:01:10 -07002309static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002310 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002311 aom_wb_write_bit(wb, 1);
2312 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002313 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002314 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002315 }
2316}
2317
Yaowu Xuf883b422016-08-30 14:01:10 -07002318static void encode_quantization(const AV1_COMMON *const cm,
2319 struct aom_write_bit_buffer *wb) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002320 const int num_planes = av1_num_planes(cm);
2321
Yaowu Xuf883b422016-08-30 14:01:10 -07002322 aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002323 write_delta_q(wb, cm->y_dc_delta_q);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002324 if (num_planes > 1) {
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002325 int diff_uv_delta = (cm->u_dc_delta_q != cm->v_dc_delta_q) ||
2326 (cm->u_ac_delta_q != cm->v_ac_delta_q);
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002327 if (cm->separate_uv_delta_q) aom_wb_write_bit(wb, diff_uv_delta);
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002328 write_delta_q(wb, cm->u_dc_delta_q);
2329 write_delta_q(wb, cm->u_ac_delta_q);
2330 if (diff_uv_delta) {
2331 write_delta_q(wb, cm->v_dc_delta_q);
2332 write_delta_q(wb, cm->v_ac_delta_q);
2333 }
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002334 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002335#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07002336 aom_wb_write_bit(wb, cm->using_qmatrix);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002337 if (cm->using_qmatrix) {
Yaowu Xuf7a12422018-01-31 15:29:20 -08002338#if CONFIG_AOM_QM_EXT
2339 aom_wb_write_literal(wb, cm->qm_y, QM_LEVEL_BITS);
2340 aom_wb_write_literal(wb, cm->qm_u, QM_LEVEL_BITS);
Yaowu Xuf7a12422018-01-31 15:29:20 -08002341 if (!cm->separate_uv_delta_q)
2342 assert(cm->qm_u == cm->qm_v);
2343 else
Yaowu Xuf7a12422018-01-31 15:29:20 -08002344 aom_wb_write_literal(wb, cm->qm_v, QM_LEVEL_BITS);
2345#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002346 aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS);
2347 aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS);
Yaowu Xuf7a12422018-01-31 15:29:20 -08002348#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002349 }
2350#endif
2351}
2352
Yaowu Xuf883b422016-08-30 14:01:10 -07002353static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
2354 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002355 int i, j;
2356 const struct segmentation *seg = &cm->seg;
2357
Yaowu Xuf883b422016-08-30 14:01:10 -07002358 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002359 if (!seg->enabled) return;
2360
2361 // Segmentation map
2362 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002363 aom_wb_write_bit(wb, seg->update_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002364 } else {
2365 assert(seg->update_map == 1);
2366 }
2367 if (seg->update_map) {
2368 // Select the coding strategy (temporal or spatial)
Yushin Choe8d88792018-01-25 12:09:15 -08002369 if (!cm->error_resilient_mode) av1_choose_segmap_coding_method(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002370
2371 // Write out the chosen coding method.
2372 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002373 aom_wb_write_bit(wb, seg->temporal_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002374 } else {
2375 assert(seg->temporal_update == 0);
2376 }
2377 }
2378
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00002379#if CONFIG_SPATIAL_SEGMENTATION
2380 cm->preskip_segid = 0;
2381#endif
2382
Yaowu Xuc27fc142016-08-22 16:08:15 -07002383 // Segmentation data
Yaowu Xuf883b422016-08-30 14:01:10 -07002384 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002385 if (seg->update_data) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002386 for (i = 0; i < MAX_SEGMENTS; i++) {
2387 for (j = 0; j < SEG_LVL_MAX; j++) {
2388 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002389 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002390 if (active) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00002391#if CONFIG_SPATIAL_SEGMENTATION
2392 cm->preskip_segid |= j >= SEG_LVL_REF_FRAME;
2393 cm->last_active_segid = i;
2394#endif
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002395 const int data_max = av1_seg_feature_data_max(j);
2396 const int data_min = -data_max;
2397 const int ubits = get_unsigned_bits(data_max);
2398 const int data = clamp(get_segdata(seg, i, j), data_min, data_max);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002399
Yaowu Xuf883b422016-08-30 14:01:10 -07002400 if (av1_is_segfeature_signed(j)) {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002401 aom_wb_write_inv_signed_literal(wb, data, ubits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002402 } else {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002403 aom_wb_write_literal(wb, data, ubits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002404 }
2405 }
2406 }
2407 }
2408 }
2409}
2410
Thomas Daedef636d5c2017-06-29 13:48:27 -07002411static void write_tx_mode(AV1_COMMON *cm, TX_MODE *mode,
Yue Cheneeacc4c2017-01-17 17:29:17 -08002412 struct aom_write_bit_buffer *wb) {
Thomas Daedef636d5c2017-06-29 13:48:27 -07002413 if (cm->all_lossless) {
Yue Cheneeacc4c2017-01-17 17:29:17 -08002414 *mode = ONLY_4X4;
2415 return;
2416 }
Debargha Mukherjee923b73d2017-10-31 18:11:34 -07002417 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002418}
2419
Angie Chiang5678ad92016-11-21 09:38:40 -08002420static void write_frame_interp_filter(InterpFilter filter,
2421 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002422 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002423 if (filter != SWITCHABLE)
Angie Chiang6305abe2016-10-24 12:24:44 -07002424 aom_wb_write_literal(wb, filter, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002425}
2426
Yaowu Xuf883b422016-08-30 14:01:10 -07002427static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002428 if (cm->interp_filter == SWITCHABLE) {
2429 // Check to see if only one of the filters is actually used
2430 int count[SWITCHABLE_FILTERS];
2431 int i, j, c = 0;
2432 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2433 count[i] = 0;
2434 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
2435 count[i] += counts->switchable_interp[j][i];
2436 c += (count[i] > 0);
2437 }
2438 if (c == 1) {
2439 // Only one filter is used. So set the filter at frame level
2440 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2441 if (count[i]) {
Sarah Parker4c10a3c2017-04-10 19:37:59 -07002442 if (i == EIGHTTAP_REGULAR || WARP_WM_NEIGHBORS_WITH_OBMC)
Debargha Mukherjee604d8462017-04-06 15:27:00 -07002443 cm->interp_filter = i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002444 break;
2445 }
2446 }
2447 }
2448 }
2449}
2450
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002451#if CONFIG_MAX_TILE
2452
2453// Same function as write_uniform but writing to uncompresses header wb
2454static void wb_write_uniform(struct aom_write_bit_buffer *wb, int n, int v) {
2455 const int l = get_unsigned_bits(n);
2456 const int m = (1 << l) - n;
2457 if (l == 0) return;
2458 if (v < m) {
2459 aom_wb_write_literal(wb, v, l - 1);
2460 } else {
2461 aom_wb_write_literal(wb, m + ((v - m) >> 1), l - 1);
2462 aom_wb_write_literal(wb, (v - m) & 1, 1);
2463 }
2464}
2465
2466static void write_tile_info_max_tile(const AV1_COMMON *const cm,
2467 struct aom_write_bit_buffer *wb) {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002468 int width_mi = ALIGN_POWER_OF_TWO(cm->mi_cols, cm->seq_params.mib_size_log2);
2469 int height_mi = ALIGN_POWER_OF_TWO(cm->mi_rows, cm->seq_params.mib_size_log2);
2470 int width_sb = width_mi >> cm->seq_params.mib_size_log2;
2471 int height_sb = height_mi >> cm->seq_params.mib_size_log2;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002472 int size_sb, i;
2473
2474 aom_wb_write_bit(wb, cm->uniform_tile_spacing_flag);
2475
2476 if (cm->uniform_tile_spacing_flag) {
2477 // Uniform spaced tiles with power-of-two number of rows and columns
2478 // tile columns
2479 int ones = cm->log2_tile_cols - cm->min_log2_tile_cols;
2480 while (ones--) {
2481 aom_wb_write_bit(wb, 1);
2482 }
2483 if (cm->log2_tile_cols < cm->max_log2_tile_cols) {
2484 aom_wb_write_bit(wb, 0);
2485 }
2486
2487 // rows
2488 ones = cm->log2_tile_rows - cm->min_log2_tile_rows;
2489 while (ones--) {
2490 aom_wb_write_bit(wb, 1);
2491 }
2492 if (cm->log2_tile_rows < cm->max_log2_tile_rows) {
2493 aom_wb_write_bit(wb, 0);
2494 }
2495 } else {
2496 // Explicit tiles with configurable tile widths and heights
2497 // columns
2498 for (i = 0; i < cm->tile_cols; i++) {
2499 size_sb = cm->tile_col_start_sb[i + 1] - cm->tile_col_start_sb[i];
2500 wb_write_uniform(wb, AOMMIN(width_sb, MAX_TILE_WIDTH_SB), size_sb - 1);
2501 width_sb -= size_sb;
2502 }
2503 assert(width_sb == 0);
2504
2505 // rows
2506 for (i = 0; i < cm->tile_rows; i++) {
2507 size_sb = cm->tile_row_start_sb[i + 1] - cm->tile_row_start_sb[i];
2508 wb_write_uniform(wb, AOMMIN(height_sb, cm->max_tile_height_sb),
2509 size_sb - 1);
2510 height_sb -= size_sb;
2511 }
2512 assert(height_sb == 0);
2513 }
2514}
2515#endif
2516
Yaowu Xuf883b422016-08-30 14:01:10 -07002517static void write_tile_info(const AV1_COMMON *const cm,
2518 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002519#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002520 if (cm->large_scale_tile) {
2521 const int tile_width =
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002522 ALIGN_POWER_OF_TWO(cm->tile_width, cm->seq_params.mib_size_log2) >>
2523 cm->seq_params.mib_size_log2;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002524 const int tile_height =
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002525 ALIGN_POWER_OF_TWO(cm->tile_height, cm->seq_params.mib_size_log2) >>
2526 cm->seq_params.mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002527
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002528 assert(tile_width > 0);
2529 assert(tile_height > 0);
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08002530
Yaowu Xuc27fc142016-08-22 16:08:15 -07002531// Write the tile sizes
2532#if CONFIG_EXT_PARTITION
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002533 if (cm->seq_params.sb_size == BLOCK_128X128) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002534 assert(tile_width <= 32);
2535 assert(tile_height <= 32);
2536 aom_wb_write_literal(wb, tile_width - 1, 5);
2537 aom_wb_write_literal(wb, tile_height - 1, 5);
2538 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002539#endif // CONFIG_EXT_PARTITION
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002540 assert(tile_width <= 64);
2541 assert(tile_height <= 64);
2542 aom_wb_write_literal(wb, tile_width - 1, 6);
2543 aom_wb_write_literal(wb, tile_height - 1, 6);
2544#if CONFIG_EXT_PARTITION
2545 }
2546#endif // CONFIG_EXT_PARTITION
2547 } else {
2548#endif // CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002549
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002550#if CONFIG_MAX_TILE
2551 write_tile_info_max_tile(cm, wb);
2552#else
2553 int min_log2_tile_cols, max_log2_tile_cols, ones;
2554 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002555
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002556 // columns
2557 ones = cm->log2_tile_cols - min_log2_tile_cols;
2558 while (ones--) aom_wb_write_bit(wb, 1);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002559
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002560 if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0);
2561
2562 // rows
2563 aom_wb_write_bit(wb, cm->log2_tile_rows != 0);
2564 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1);
2565#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002566#if CONFIG_DEPENDENT_HORZTILES
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002567 if (cm->tile_rows > 1) aom_wb_write_bit(wb, cm->dependent_horz_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002568#endif
2569#if CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002570 }
Fangwen Fu70bcb892017-05-06 17:05:19 -07002571#endif // CONFIG_EXT_TILE
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002572
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002573#if CONFIG_LOOPFILTERING_ACROSS_TILES
Lei7bb501d2017-12-13 15:10:34 -08002574#if CONFIG_LOOPFILTERING_ACROSS_TILES_EXT
2575 if (cm->tile_cols > 1) {
2576 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_v_enabled);
2577 }
2578 if (cm->tile_rows > 1) {
2579 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_h_enabled);
2580 }
2581#else
Yunqing Wang42015d12017-10-17 15:43:49 -07002582 if (cm->tile_cols * cm->tile_rows > 1)
2583 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_enabled);
Lei7bb501d2017-12-13 15:10:34 -08002584#endif // CONFIG_LOOPFILTERING_ACROSS_TILES_EXT
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002585#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Cyril Concolato3b5afc12017-12-15 12:54:15 -08002586
2587#if CONFIG_TILE_INFO_FIRST
2588 // write the tile length code (Always 4 bytes for now)
2589 aom_wb_write_literal(wb, 3, 2);
2590#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002591}
2592
Zoe Liu8dd1c982017-09-11 10:14:35 -07002593#if USE_GF16_MULTI_LAYER
2594static int get_refresh_mask_gf16(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002595 int refresh_mask = 0;
2596
Zoe Liu8dd1c982017-09-11 10:14:35 -07002597 if (cpi->refresh_last_frame || cpi->refresh_golden_frame ||
2598 cpi->refresh_bwd_ref_frame || cpi->refresh_alt2_ref_frame ||
2599 cpi->refresh_alt_ref_frame) {
2600 assert(cpi->refresh_fb_idx >= 0 && cpi->refresh_fb_idx < REF_FRAMES);
2601 refresh_mask |= (1 << cpi->refresh_fb_idx);
2602 }
2603
2604 return refresh_mask;
2605}
2606#endif // USE_GF16_MULTI_LAYER
Zoe Liu8dd1c982017-09-11 10:14:35 -07002607
2608static int get_refresh_mask(AV1_COMP *cpi) {
Yi Luo2e6a9ab2017-09-15 08:13:59 -07002609 int refresh_mask = 0;
Zoe Liu8dd1c982017-09-11 10:14:35 -07002610#if USE_GF16_MULTI_LAYER
2611 if (cpi->rc.baseline_gf_interval == 16) return get_refresh_mask_gf16(cpi);
2612#endif // USE_GF16_MULTI_LAYER
2613
Yaowu Xuc27fc142016-08-22 16:08:15 -07002614 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
2615 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
2616 // the 3 LAST reference frames will be updated accordingly, i.e.:
2617 // (1) The original virtual index for LAST3_FRAME will become the new virtual
2618 // index for LAST_FRAME; and
2619 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
2620 // shifted and become the new virtual indexes for LAST2_FRAME and
2621 // LAST3_FRAME.
2622 refresh_mask |=
2623 (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]);
Zoe Liue9b15e22017-07-19 15:53:01 -07002624
Zoe Liue9b15e22017-07-19 15:53:01 -07002625 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
2626 refresh_mask |= (cpi->refresh_alt2_ref_frame << cpi->alt2_fb_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002627
Yaowu Xuf883b422016-08-30 14:01:10 -07002628 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002629 // We have decided to preserve the previously existing golden frame as our
2630 // new ARF frame. However, in the short term we leave it in the GF slot and,
2631 // if we're updating the GF with the current decoded frame, we save it
2632 // instead to the ARF slot.
Yaowu Xuf883b422016-08-30 14:01:10 -07002633 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
Yaowu Xuc27fc142016-08-22 16:08:15 -07002634 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
2635 // there so that it can be done outside of the recode loop.
2636 // Note: This is highly specific to the use of ARF as a forward reference,
2637 // and this needs to be generalized as other uses are implemented
2638 // (like RTC/temporal scalability).
2639 return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx);
2640 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -07002641 const int arf_idx = cpi->alt_fb_idx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002642 return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) |
2643 (cpi->refresh_alt_ref_frame << arf_idx);
2644 }
2645}
2646
2647#if CONFIG_EXT_TILE
2648static INLINE int find_identical_tile(
2649 const int tile_row, const int tile_col,
2650 TileBufferEnc (*const tile_buffers)[1024]) {
2651 const MV32 candidate_offset[1] = { { 1, 0 } };
2652 const uint8_t *const cur_tile_data =
2653 tile_buffers[tile_row][tile_col].data + 4;
Jingning Han99ffce62017-04-25 15:48:41 -07002654 const size_t cur_tile_size = tile_buffers[tile_row][tile_col].size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002655
2656 int i;
2657
2658 if (tile_row == 0) return 0;
2659
2660 // (TODO: yunqingwang) For now, only above tile is checked and used.
2661 // More candidates such as left tile can be added later.
2662 for (i = 0; i < 1; i++) {
2663 int row_offset = candidate_offset[0].row;
2664 int col_offset = candidate_offset[0].col;
2665 int row = tile_row - row_offset;
2666 int col = tile_col - col_offset;
2667 uint8_t tile_hdr;
2668 const uint8_t *tile_data;
2669 TileBufferEnc *candidate;
2670
2671 if (row < 0 || col < 0) continue;
2672
2673 tile_hdr = *(tile_buffers[row][col].data);
2674
2675 // Read out tcm bit
2676 if ((tile_hdr >> 7) == 1) {
2677 // The candidate is a copy tile itself
2678 row_offset += tile_hdr & 0x7f;
2679 row = tile_row - row_offset;
2680 }
2681
2682 candidate = &tile_buffers[row][col];
2683
2684 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
2685
2686 tile_data = candidate->data + 4;
2687
2688 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
2689
2690 // Identical tile found
2691 assert(row_offset > 0);
2692 return row_offset;
2693 }
2694
2695 // No identical tile found
2696 return 0;
2697}
2698#endif // CONFIG_EXT_TILE
2699
Jingning Handa11e692017-12-19 08:45:08 -08002700#if !CONFIG_OBU
Yaowu Xuf883b422016-08-30 14:01:10 -07002701static uint32_t write_tiles(AV1_COMP *const cpi, uint8_t *const dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002702 unsigned int *max_tile_size,
2703 unsigned int *max_tile_col_size) {
Thomas Davies4822e142017-10-10 11:30:36 +01002704 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuf883b422016-08-30 14:01:10 -07002705 aom_writer mode_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002706 int tile_row, tile_col;
2707 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
clang-format67948d32016-09-07 22:40:40 -07002708 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
James Zern71a37de2017-04-20 16:03:13 -07002709 uint32_t total_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002710 const int tile_cols = cm->tile_cols;
2711 const int tile_rows = cm->tile_rows;
Thomas Daviesaf6df172016-11-09 14:04:18 +00002712 unsigned int tile_size = 0;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002713 const int have_tiles = tile_cols * tile_rows > 1;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002714 struct aom_write_bit_buffer wb = { dst, 0 };
Thomas Davies80188d12016-10-26 16:08:35 -07002715 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
Thomas Davies80188d12016-10-26 16:08:35 -07002716 // Fixed size tile groups for the moment
2717 const int num_tg_hdrs = cm->num_tg;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002718 const int tg_size =
2719#if CONFIG_EXT_TILE
2720 (cm->large_scale_tile)
2721 ? 1
2722 :
2723#endif // CONFIG_EXT_TILE
2724 (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
Thomas Davies80188d12016-10-26 16:08:35 -07002725 int tile_count = 0;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00002726 int tg_count = 1;
2727 int tile_size_bytes = 4;
2728 int tile_col_size_bytes;
James Zern71a37de2017-04-20 16:03:13 -07002729 uint32_t uncompressed_hdr_size = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07002730 struct aom_write_bit_buffer tg_params_wb;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00002731 struct aom_write_bit_buffer tile_size_bytes_wb;
James Zern71a37de2017-04-20 16:03:13 -07002732 uint32_t saved_offset;
Thomas Daviesaf6df172016-11-09 14:04:18 +00002733 int mtu_size = cpi->oxcf.mtu;
2734 int curr_tg_data_size = 0;
2735 int hdr_size;
Yaowu Xua8975df2018-01-23 09:32:49 -08002736 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002737
2738 *max_tile_size = 0;
2739 *max_tile_col_size = 0;
2740
Frederic Barbierf57a9372018-01-05 09:35:03 +01002741 // All tile size fields are output on 4 bytes. A call to remux_tiles will
2742 // later compact the data if smaller headers are adequate.
Yaowu Xuc27fc142016-08-22 16:08:15 -07002743
Thomas Davies4822e142017-10-10 11:30:36 +01002744 cm->largest_tile_id = 0;
Thomas Davies4822e142017-10-10 11:30:36 +01002745
Yaowu Xuc27fc142016-08-22 16:08:15 -07002746#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002747 if (cm->large_scale_tile) {
2748 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
2749 TileInfo tile_info;
2750 const int is_last_col = (tile_col == tile_cols - 1);
2751 const uint32_t col_offset = total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002752
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002753 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002754
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002755 // The last column does not have a column header
2756 if (!is_last_col) total_size += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002757
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002758 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
2759 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
2760 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
2761 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
2762 const int data_offset = have_tiles ? 4 : 0;
2763 const int tile_idx = tile_row * tile_cols + tile_col;
2764 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
2765 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002766
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002767 buf->data = dst + total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002768
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002769 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
2770 // even for the last one, unless no tiling is used at all.
2771 total_size += data_offset;
2772 // Initialise tile context from the frame context
2773 this_tile->tctx = *cm->fc;
2774 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07002775 mode_bc.allow_update_cdf = !cm->large_scale_tile;
Rupert Swarbrick7546b302017-10-26 10:45:26 +01002776#if CONFIG_LOOP_RESTORATION
Yaowu Xua8975df2018-01-23 09:32:49 -08002777 av1_reset_loop_restoration(&cpi->td.mb.e_mbd, num_planes);
Rupert Swarbrick7546b302017-10-26 10:45:26 +01002778#endif // CONFIG_LOOP_RESTORATION
2779
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002780 aom_start_encode(&mode_bc, buf->data + data_offset);
2781 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
2782 assert(tok == tok_end);
2783 aom_stop_encode(&mode_bc);
2784 tile_size = mode_bc.pos;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002785 buf->size = tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002786
Thomas Davies4822e142017-10-10 11:30:36 +01002787 if (tile_size > *max_tile_size) {
2788 cm->largest_tile_id = tile_cols * tile_row + tile_col;
2789 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002790 // Record the maximum tile size we see, so we can compact headers later.
2791 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002792
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002793 if (have_tiles) {
2794 // tile header: size of this tile, or copy offset
2795 uint32_t tile_header = tile_size;
2796 const int tile_copy_mode =
2797 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256)
2798 ? 1
2799 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002800
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002801 // If tile_copy_mode = 1, check if this tile is a copy tile.
2802 // Very low chances to have copy tiles on the key frames, so don't
2803 // search on key frames to reduce unnecessary search.
2804 if (cm->frame_type != KEY_FRAME && tile_copy_mode) {
2805 const int idendical_tile_offset =
2806 find_identical_tile(tile_row, tile_col, tile_buffers);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002807
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002808 if (idendical_tile_offset > 0) {
2809 tile_size = 0;
2810 tile_header = idendical_tile_offset | 0x80;
2811 tile_header <<= 24;
2812 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002813 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002814
2815 mem_put_le32(buf->data, tile_header);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002816 }
2817
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002818 total_size += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002819 }
2820
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002821 if (!is_last_col) {
2822 uint32_t col_size = total_size - col_offset - 4;
2823 mem_put_le32(dst + col_offset, col_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002824
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002825 // If it is not final packing, record the maximum tile column size we
2826 // see, otherwise, check if the tile size is out of the range.
2827 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
2828 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002829 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002830 } else {
2831#endif // CONFIG_EXT_TILE
Soo-Chul Han38427e82017-09-27 15:06:13 -04002832
2833#if !CONFIG_OBU
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002834 write_uncompressed_header_frame(cpi, &wb);
Soo-Chul Han38427e82017-09-27 15:06:13 -04002835#else
2836 write_uncompressed_header_obu(cpi, &wb);
2837#endif
Thomas Davies80188d12016-10-26 16:08:35 -07002838
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002839 if (cm->show_existing_frame) {
2840 total_size = aom_wb_bytes_written(&wb);
2841 return (uint32_t)total_size;
2842 }
Jingning Hand3f441c2017-03-06 09:12:54 -08002843
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002844 // Write the tile length code
2845 tile_size_bytes_wb = wb;
2846 aom_wb_write_literal(&wb, 3, 2);
Thomas Davies80188d12016-10-26 16:08:35 -07002847
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002848 /* Write a placeholder for the number of tiles in each tile group */
2849 tg_params_wb = wb;
2850 saved_offset = wb.bit_offset;
2851 if (have_tiles) {
Imdad Sardharwalla857c99b2017-11-21 15:53:31 +00002852 aom_wb_write_literal(&wb, 3, n_log2_tiles);
2853 aom_wb_write_literal(&wb, (1 << n_log2_tiles) - 1, n_log2_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002854 }
Thomas Davies80188d12016-10-26 16:08:35 -07002855
Yunqing Wange7142e12018-01-17 11:20:12 -08002856 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
2857 hdr_size = uncompressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002858 total_size += hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07002859
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002860 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
2861 TileInfo tile_info;
2862 const int is_last_row = (tile_row == tile_rows - 1);
2863 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002864
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002865 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
2866 const int tile_idx = tile_row * tile_cols + tile_col;
2867 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
2868 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
2869 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
2870 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
2871 const int is_last_col = (tile_col == tile_cols - 1);
2872 const int is_last_tile = is_last_col && is_last_row;
Thomas Daviesaf6df172016-11-09 14:04:18 +00002873
Thomas Daviesb25ba502017-07-18 10:18:24 +01002874 if ((!mtu_size && tile_count > tg_size) ||
2875 (mtu_size && tile_count && curr_tg_data_size >= mtu_size)) {
2876 // New tile group
2877 tg_count++;
2878 // We've exceeded the packet size
2879 if (tile_count > 1) {
2880 /* The last tile exceeded the packet size. The tile group size
2881 should therefore be tile_count-1.
2882 Move the last tile and insert headers before it
2883 */
2884 uint32_t old_total_size = total_size - tile_size - 4;
2885 memmove(dst + old_total_size + hdr_size, dst + old_total_size,
2886 (tile_size + 4) * sizeof(uint8_t));
2887 // Copy uncompressed header
2888 memmove(dst + old_total_size, dst,
2889 uncompressed_hdr_size * sizeof(uint8_t));
2890 // Write the number of tiles in the group into the last uncompressed
2891 // header before the one we've just inserted
2892 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
2893 n_log2_tiles);
2894 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 2,
2895 n_log2_tiles);
2896 // Update the pointer to the last TG params
2897 tg_params_wb.bit_offset = saved_offset + 8 * old_total_size;
Thomas Daviesb25ba502017-07-18 10:18:24 +01002898 total_size += hdr_size;
2899 tile_count = 1;
2900 curr_tg_data_size = hdr_size + tile_size + 4;
2901 } else {
2902 // We exceeded the packet size in just one tile
2903 // Copy uncompressed header
2904 memmove(dst + total_size, dst,
2905 uncompressed_hdr_size * sizeof(uint8_t));
2906 // Write the number of tiles in the group into the last uncompressed
2907 // header
2908 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
2909 n_log2_tiles);
2910 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1,
2911 n_log2_tiles);
2912 tg_params_wb.bit_offset = saved_offset + 8 * total_size;
Thomas Daviesb25ba502017-07-18 10:18:24 +01002913 total_size += hdr_size;
2914 tile_count = 0;
2915 curr_tg_data_size = hdr_size;
2916 }
Thomas Daviesaf6df172016-11-09 14:04:18 +00002917 }
Thomas Daviesb25ba502017-07-18 10:18:24 +01002918 tile_count++;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002919 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002920
Thomas Daviesb25ba502017-07-18 10:18:24 +01002921#if CONFIG_DEPENDENT_HORZTILES
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002922 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
Fangwen Fu73126c02017-02-08 22:37:47 -08002923#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002924 buf->data = dst + total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002925
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002926 // The last tile does not have a header.
2927 if (!is_last_tile) total_size += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002928
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002929 // Initialise tile context from the frame context
2930 this_tile->tctx = *cm->fc;
2931 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07002932 mode_bc.allow_update_cdf = 1;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002933#if CONFIG_LOOP_RESTORATION
Yaowu Xua8975df2018-01-23 09:32:49 -08002934 av1_reset_loop_restoration(&cpi->td.mb.e_mbd, num_planes);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01002935#endif // CONFIG_LOOP_RESTORATION
2936
Alex Converse30f0e152017-03-28 10:13:27 -07002937 aom_start_encode(&mode_bc, dst + total_size);
2938 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
Alex Converse30f0e152017-03-28 10:13:27 -07002939 aom_stop_encode(&mode_bc);
2940 tile_size = mode_bc.pos;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002941 assert(tile_size > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002942
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002943 curr_tg_data_size += tile_size + 4;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002944 buf->size = tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002945
Thomas Davies4822e142017-10-10 11:30:36 +01002946 if (tile_size > *max_tile_size) {
2947 cm->largest_tile_id = tile_cols * tile_row + tile_col;
2948 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002949 if (!is_last_tile) {
2950 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
2951 // size of this tile
2952 mem_put_le32(buf->data, tile_size);
2953 }
2954
2955 total_size += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002956 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002957 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002958 // Write the final tile group size
2959 if (n_log2_tiles) {
Dominic Symesf58f1112017-09-25 12:47:40 +02002960 aom_wb_overwrite_literal(
2961 &tg_params_wb, (tile_cols * tile_rows) - tile_count, n_log2_tiles);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002962 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1, n_log2_tiles);
2963 }
2964 // Remux if possible. TODO (Thomas Davies): do this for more than one tile
2965 // group
2966 if (have_tiles && tg_count == 1) {
Yunqing Wange7142e12018-01-17 11:20:12 -08002967 int data_size = total_size - uncompressed_hdr_size;
2968 data_size = remux_tiles(cm, dst + uncompressed_hdr_size, data_size,
2969 *max_tile_size, *max_tile_col_size,
2970 &tile_size_bytes, &tile_col_size_bytes);
2971 total_size = data_size + uncompressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002972 aom_wb_overwrite_literal(&tile_size_bytes_wb, tile_size_bytes - 1, 2);
2973 }
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00002974
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002975#if CONFIG_EXT_TILE
2976 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002977#endif // CONFIG_EXT_TILE
2978 return (uint32_t)total_size;
2979}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002980#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002981
Yaowu Xuf883b422016-08-30 14:01:10 -07002982static void write_render_size(const AV1_COMMON *cm,
2983 struct aom_write_bit_buffer *wb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002984 const int scaling_active = !av1_resize_unscaled(cm);
Yaowu Xuf883b422016-08-30 14:01:10 -07002985 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002986 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002987 aom_wb_write_literal(wb, cm->render_width - 1, 16);
2988 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002989 }
2990}
2991
Urvang Joshi94ad3702017-12-06 11:38:08 -08002992#if CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07002993static void write_superres_scale(const AV1_COMMON *const cm,
2994 struct aom_write_bit_buffer *wb) {
Fergus Simpsone7508412017-03-14 18:14:09 -07002995 // First bit is whether to to scale or not
Urvang Joshide71d142017-10-05 12:12:15 -07002996 if (cm->superres_scale_denominator == SCALE_NUMERATOR) {
Fergus Simpsone7508412017-03-14 18:14:09 -07002997 aom_wb_write_bit(wb, 0); // no scaling
2998 } else {
2999 aom_wb_write_bit(wb, 1); // scaling, write scale factor
Urvang Joshi83010182017-10-27 12:36:02 -07003000 assert(cm->superres_scale_denominator >= SUPERRES_SCALE_DENOMINATOR_MIN);
3001 assert(cm->superres_scale_denominator <
3002 SUPERRES_SCALE_DENOMINATOR_MIN + (1 << SUPERRES_SCALE_BITS));
Fergus Simpsone7508412017-03-14 18:14:09 -07003003 aom_wb_write_literal(
Urvang Joshide71d142017-10-05 12:12:15 -07003004 wb, cm->superres_scale_denominator - SUPERRES_SCALE_DENOMINATOR_MIN,
Fergus Simpsone7508412017-03-14 18:14:09 -07003005 SUPERRES_SCALE_BITS);
3006 }
3007}
Urvang Joshi94ad3702017-12-06 11:38:08 -08003008#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07003009
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003010#if CONFIG_FRAME_SIZE
3011static void write_frame_size(const AV1_COMMON *cm, int frame_size_override,
David Barker22171312017-11-20 11:26:04 +00003012 struct aom_write_bit_buffer *wb)
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003013#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003014static void write_frame_size(const AV1_COMMON *cm,
David Barker22171312017-11-20 11:26:04 +00003015 struct aom_write_bit_buffer *wb)
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003016#endif
David Barker22171312017-11-20 11:26:04 +00003017{
Urvang Joshi94ad3702017-12-06 11:38:08 -08003018#if CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003019 const int coded_width = cm->superres_upscaled_width - 1;
3020 const int coded_height = cm->superres_upscaled_height - 1;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003021#else
David Barker22171312017-11-20 11:26:04 +00003022 const int coded_width = cm->width - 1;
3023 const int coded_height = cm->height - 1;
Urvang Joshi94ad3702017-12-06 11:38:08 -08003024#endif // CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003025
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003026#if CONFIG_FRAME_SIZE
3027 if (frame_size_override) {
3028 const SequenceHeader *seq_params = &cm->seq_params;
3029 int num_bits_width = seq_params->num_bits_width;
3030 int num_bits_height = seq_params->num_bits_height;
David Barker22171312017-11-20 11:26:04 +00003031 aom_wb_write_literal(wb, coded_width, num_bits_width);
3032 aom_wb_write_literal(wb, coded_height, num_bits_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003033 }
3034#else
David Barker22171312017-11-20 11:26:04 +00003035 aom_wb_write_literal(wb, coded_width, 16);
3036 aom_wb_write_literal(wb, coded_height, 16);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003037#endif
David Barker22171312017-11-20 11:26:04 +00003038
Urvang Joshi94ad3702017-12-06 11:38:08 -08003039#if CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003040 write_superres_scale(cm, wb);
Urvang Joshi94ad3702017-12-06 11:38:08 -08003041#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003042 write_render_size(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003043}
3044
Yaowu Xuf883b422016-08-30 14:01:10 -07003045static void write_frame_size_with_refs(AV1_COMP *cpi,
3046 struct aom_write_bit_buffer *wb) {
3047 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003048 int found = 0;
3049
3050 MV_REFERENCE_FRAME ref_frame;
3051 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3052 YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
3053
3054 if (cfg != NULL) {
Urvang Joshi94ad3702017-12-06 11:38:08 -08003055#if CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003056 found = cm->superres_upscaled_width == cfg->y_crop_width &&
3057 cm->superres_upscaled_height == cfg->y_crop_height;
3058#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003059 found =
3060 cm->width == cfg->y_crop_width && cm->height == cfg->y_crop_height;
Urvang Joshi94ad3702017-12-06 11:38:08 -08003061#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003062 found &= cm->render_width == cfg->render_width &&
3063 cm->render_height == cfg->render_height;
3064 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003065 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003066 if (found) {
Urvang Joshi94ad3702017-12-06 11:38:08 -08003067#if CONFIG_HORZONLY_FRAME_SUPERRES
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003068 write_superres_scale(cm, wb);
Urvang Joshi94ad3702017-12-06 11:38:08 -08003069#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003070 break;
3071 }
3072 }
3073
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003074#if CONFIG_FRAME_SIZE
3075 if (!found) {
3076 int frame_size_override = 1; // Allways equal to 1 in this function
3077 write_frame_size(cm, frame_size_override, wb);
3078 }
3079#else
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07003080 if (!found) write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003081#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003082}
3083
Yaowu Xuc27fc142016-08-22 16:08:15 -07003084static void write_profile(BITSTREAM_PROFILE profile,
Yaowu Xuf883b422016-08-30 14:01:10 -07003085 struct aom_write_bit_buffer *wb) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003086 assert(profile >= PROFILE_0 && profile < MAX_PROFILES);
3087 aom_wb_write_literal(wb, profile, 2);
3088}
3089
3090static void write_bitdepth(AV1_COMMON *const cm,
3091 struct aom_write_bit_buffer *wb) {
3092 // Profile 0/1: [0] for 8 bit, [1] 10-bit
3093 // Profile 2: [0] for 8 bit, [10] 10-bit, [11] - 12-bit
3094 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_8 ? 0 : 1);
3095 if (cm->profile == PROFILE_2 && cm->bit_depth != AOM_BITS_8) {
3096 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003097 }
3098}
3099
3100static void write_bitdepth_colorspace_sampling(
Yaowu Xuf883b422016-08-30 14:01:10 -07003101 AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003102 write_bitdepth(cm, wb);
3103#if CONFIG_MONO_VIDEO
Debargha Mukherjeef340fec2018-01-10 18:12:22 -08003104 const int is_monochrome = cm->seq_params.monochrome;
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003105 // monochrome bit
3106 if (cm->profile != PROFILE_1)
3107 aom_wb_write_bit(wb, is_monochrome);
3108 else
3109 assert(!is_monochrome);
3110#elif !CONFIG_CICP
3111 const int is_monochrome = 0;
3112#endif // CONFIG_MONO_VIDEO
Andrey Norkin9e694632017-12-21 18:50:57 -08003113#if CONFIG_CICP
3114 if (cm->color_primaries == AOM_CICP_CP_UNSPECIFIED &&
3115 cm->transfer_characteristics == AOM_CICP_TC_UNSPECIFIED &&
3116 cm->matrix_coefficients == AOM_CICP_MC_UNSPECIFIED) {
3117 aom_wb_write_bit(wb, 0); // No color description present
3118 } else {
3119 aom_wb_write_bit(wb, 1); // Color description present
3120 aom_wb_write_literal(wb, cm->color_primaries, 8);
3121 aom_wb_write_literal(wb, cm->transfer_characteristics, 8);
3122 aom_wb_write_literal(wb, cm->matrix_coefficients, 8);
3123 }
3124#else
anorkin76fb1262017-03-22 15:12:12 -07003125#if CONFIG_COLORSPACE_HEADERS
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003126 if (!is_monochrome) aom_wb_write_literal(wb, cm->color_space, 5);
anorkin76fb1262017-03-22 15:12:12 -07003127 aom_wb_write_literal(wb, cm->transfer_function, 5);
3128#else
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003129 if (!is_monochrome) aom_wb_write_literal(wb, cm->color_space, 4);
3130#endif // CONFIG_COLORSPACE_HEADERS
3131#endif // CONFIG_CICP
Debargha Mukherjeee5267692018-01-16 09:41:15 -08003132#if CONFIG_MONO_VIDEO
3133 if (is_monochrome) return;
3134#endif // CONFIG_MONO_VIDEO
Andrey Norkin9e694632017-12-21 18:50:57 -08003135#if CONFIG_CICP
3136 if (cm->color_primaries == AOM_CICP_CP_BT_709 &&
3137 cm->transfer_characteristics == AOM_CICP_TC_SRGB &&
3138 cm->matrix_coefficients ==
3139 AOM_CICP_MC_IDENTITY) { // it would be better to remove this
3140 // dependency too
3141#else
Imdad Sardharwalla317002f2017-12-05 16:24:56 +00003142 if (cm->color_space == AOM_CS_SRGB) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003143#endif // CONFIG_CICP
3144 assert(cm->subsampling_x == 0 && cm->subsampling_y == 0);
3145 assert(cm->profile == PROFILE_1 ||
3146 (cm->profile == PROFILE_2 && cm->bit_depth == AOM_BITS_12));
Imdad Sardharwalla317002f2017-12-05 16:24:56 +00003147 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003148 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Yaowu Xuf883b422016-08-30 14:01:10 -07003149 aom_wb_write_bit(wb, cm->color_range);
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003150 if (cm->profile == PROFILE_0) {
3151 // 420 only
Yaowu Xuc27fc142016-08-22 16:08:15 -07003152 assert(cm->subsampling_x == 1 && cm->subsampling_y == 1);
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003153 } else if (cm->profile == PROFILE_1) {
3154 // 444 only
3155 assert(cm->subsampling_x == 0 && cm->subsampling_y == 0);
3156 } else if (cm->profile == PROFILE_2) {
3157 if (cm->bit_depth == AOM_BITS_12) {
3158 // 420, 444 or 422
3159 aom_wb_write_bit(wb, cm->subsampling_x);
David Barker0c3545b2018-01-16 17:32:23 +00003160 if (cm->subsampling_x == 0) {
3161 assert(cm->subsampling_y == 0 &&
3162 "4:4:0 subsampling not allowed in AV1");
3163 } else {
3164 aom_wb_write_bit(wb, cm->subsampling_y);
3165 }
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08003166 } else {
3167 // 422 only
3168 assert(cm->subsampling_x == 1 && cm->subsampling_y == 0);
3169 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003170 }
anorkin76fb1262017-03-22 15:12:12 -07003171#if CONFIG_COLORSPACE_HEADERS
3172 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
3173 aom_wb_write_literal(wb, cm->chroma_sample_position, 2);
3174 }
3175#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003176 }
Yaowu Xu6fc47e52017-12-04 15:07:48 -08003177 aom_wb_write_bit(wb, cm->separate_uv_delta_q);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003178}
3179
Andrey Norkin28e9ce22018-01-08 10:11:21 -08003180#if CONFIG_TIMING_INFO_IN_SEQ_HEADERS
3181static void write_timing_info_header(AV1_COMMON *const cm,
3182 struct aom_write_bit_buffer *wb) {
3183 aom_wb_write_bit(wb, cm->timing_info_present); // timing info present flag
3184
3185 if (cm->timing_info_present) {
3186 aom_wb_write_unsigned_literal(wb, cm->num_units_in_tick,
3187 32); // Number of units in tick
3188 aom_wb_write_unsigned_literal(wb, cm->time_scale, 32); // Time scale
3189 aom_wb_write_bit(wb,
3190 cm->equal_picture_interval); // Equal picture interval bit
3191 if (cm->equal_picture_interval) {
3192 aom_wb_write_uvlc(wb,
3193 cm->num_ticks_per_picture - 1); // ticks per picture
3194 }
3195 }
3196}
3197#endif // CONFIG_TIMING_INFO_IN_SEQ_HEADERS
3198
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003199#if CONFIG_FILM_GRAIN
3200static void write_film_grain_params(AV1_COMMON *const cm,
3201 struct aom_write_bit_buffer *wb) {
3202 aom_film_grain_t *pars = &cm->film_grain_params;
3203
3204 aom_wb_write_bit(wb, pars->apply_grain);
3205 if (!pars->apply_grain) return;
3206
3207 aom_wb_write_literal(wb, pars->random_seed, 16);
3208
3209 pars->random_seed += 3245; // For film grain test vectors purposes
3210 if (!pars->random_seed) // Random seed should not be zero
3211 pars->random_seed += 1735;
3212
3213 aom_wb_write_bit(wb, pars->update_parameters);
3214 if (!pars->update_parameters) return;
3215
3216 // Scaling functions parameters
3217
3218 aom_wb_write_literal(wb, pars->num_y_points, 4); // max 14
3219 for (int i = 0; i < pars->num_y_points; i++) {
3220 aom_wb_write_literal(wb, pars->scaling_points_y[i][0], 8);
3221 aom_wb_write_literal(wb, pars->scaling_points_y[i][1], 8);
3222 }
3223
Andrey Norkin20be5452018-02-20 17:46:13 -08003224 if (!cm->seq_params.monochrome)
3225 aom_wb_write_bit(wb, pars->chroma_scaling_from_luma);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003226
Andrey Norkin20be5452018-02-20 17:46:13 -08003227 if (cm->seq_params.monochrome || pars->chroma_scaling_from_luma) {
3228 pars->num_cb_points = 0;
3229 pars->num_cr_points = 0;
Andrey Norkin0c294fa2018-02-16 18:32:12 -08003230 } else {
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003231 aom_wb_write_literal(wb, pars->num_cb_points, 4); // max 10
3232 for (int i = 0; i < pars->num_cb_points; i++) {
3233 aom_wb_write_literal(wb, pars->scaling_points_cb[i][0], 8);
3234 aom_wb_write_literal(wb, pars->scaling_points_cb[i][1], 8);
3235 }
3236
3237 aom_wb_write_literal(wb, pars->num_cr_points, 4); // max 10
3238 for (int i = 0; i < pars->num_cr_points; i++) {
3239 aom_wb_write_literal(wb, pars->scaling_points_cr[i][0], 8);
3240 aom_wb_write_literal(wb, pars->scaling_points_cr[i][1], 8);
3241 }
3242 }
3243
3244 aom_wb_write_literal(wb, pars->scaling_shift - 8, 2); // 8 + value
3245
3246 // AR coefficients
3247 // Only sent if the corresponsing scaling function has
3248 // more than 0 points
3249
3250 aom_wb_write_literal(wb, pars->ar_coeff_lag, 2);
3251
3252 int num_pos_luma = 2 * pars->ar_coeff_lag * (pars->ar_coeff_lag + 1);
Andrey Norkin20be5452018-02-20 17:46:13 -08003253 int num_pos_chroma = num_pos_luma;
3254 if (pars->num_y_points > 0) ++num_pos_chroma;
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003255
3256 if (pars->num_y_points)
3257 for (int i = 0; i < num_pos_luma; i++)
3258 aom_wb_write_literal(wb, pars->ar_coeffs_y[i] + 128, 8);
3259
3260 if (pars->num_cb_points || pars->chroma_scaling_from_luma)
3261 for (int i = 0; i < num_pos_chroma; i++)
3262 aom_wb_write_literal(wb, pars->ar_coeffs_cb[i] + 128, 8);
3263
3264 if (pars->num_cr_points || pars->chroma_scaling_from_luma)
3265 for (int i = 0; i < num_pos_chroma; i++)
3266 aom_wb_write_literal(wb, pars->ar_coeffs_cr[i] + 128, 8);
3267
3268 aom_wb_write_literal(wb, pars->ar_coeff_shift - 6, 2); // 8 + value
3269
Andrey Norkina840cde2018-02-16 15:39:50 -08003270 aom_wb_write_literal(wb, pars->grain_scale_shift, 2);
3271
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003272 if (pars->num_cb_points) {
3273 aom_wb_write_literal(wb, pars->cb_mult, 8);
3274 aom_wb_write_literal(wb, pars->cb_luma_mult, 8);
3275 aom_wb_write_literal(wb, pars->cb_offset, 9);
3276 }
3277
3278 if (pars->num_cr_points) {
3279 aom_wb_write_literal(wb, pars->cr_mult, 8);
3280 aom_wb_write_literal(wb, pars->cr_luma_mult, 8);
3281 aom_wb_write_literal(wb, pars->cr_offset, 9);
3282 }
3283
3284 aom_wb_write_bit(wb, pars->overlap_flag);
3285
3286 aom_wb_write_bit(wb, pars->clip_to_restricted_range);
3287}
3288#endif // CONFIG_FILM_GRAIN
3289
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00003290static void write_sb_size(SequenceHeader *seq_params,
3291 struct aom_write_bit_buffer *wb) {
3292 (void)seq_params;
3293 (void)wb;
3294 assert(seq_params->mib_size == mi_size_wide[seq_params->sb_size]);
3295 assert(seq_params->mib_size == 1 << seq_params->mib_size_log2);
3296#if CONFIG_EXT_PARTITION
3297 assert(seq_params->sb_size == BLOCK_128X128 ||
3298 seq_params->sb_size == BLOCK_64X64);
3299 aom_wb_write_bit(wb, seq_params->sb_size == BLOCK_128X128 ? 1 : 0);
3300#else
3301 assert(seq_params->sb_size == BLOCK_64X64);
3302#endif // CONFIG_EXT_PARTITION
3303}
3304
Rupert Swarbrickb394bfe2017-11-07 17:52:13 +00003305#if CONFIG_REFERENCE_BUFFER || CONFIG_OBU
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003306void write_sequence_header(AV1_COMP *cpi, struct aom_write_bit_buffer *wb) {
3307 AV1_COMMON *const cm = &cpi->common;
David Barker5e70a112017-10-03 14:28:17 +01003308 SequenceHeader *seq_params = &cm->seq_params;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003309
3310#if CONFIG_FRAME_SIZE
3311 int num_bits_width = 16;
3312 int num_bits_height = 16;
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003313 int max_frame_width = cpi->oxcf.width;
3314 int max_frame_height = cpi->oxcf.height;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003315
3316 seq_params->num_bits_width = num_bits_width;
3317 seq_params->num_bits_height = num_bits_height;
3318 seq_params->max_frame_width = max_frame_width;
3319 seq_params->max_frame_height = max_frame_height;
3320
3321 aom_wb_write_literal(wb, num_bits_width - 1, 4);
3322 aom_wb_write_literal(wb, num_bits_height - 1, 4);
3323 aom_wb_write_literal(wb, max_frame_width - 1, num_bits_width);
3324 aom_wb_write_literal(wb, max_frame_height - 1, num_bits_height);
3325#endif
3326
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003327 /* Placeholder for actually writing to the bitstream */
Yunqing Wangc2502b52017-07-19 17:44:18 -07003328 seq_params->frame_id_numbers_present_flag =
3329#if CONFIG_EXT_TILE
3330 cm->large_scale_tile ? 0 :
3331#endif // CONFIG_EXT_TILE
Yaowu Xu6eb9da22018-01-23 10:19:17 -08003332 cm->error_resilient_mode;
Sebastien Alaiwand418f682017-10-19 15:06:52 +02003333 seq_params->frame_id_length = FRAME_ID_LENGTH;
3334 seq_params->delta_frame_id_length = DELTA_FRAME_ID_LENGTH;
David Barker5e70a112017-10-03 14:28:17 +01003335
3336 aom_wb_write_bit(wb, seq_params->frame_id_numbers_present_flag);
3337 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003338 // We must always have delta_frame_id_length < frame_id_length,
3339 // in order for a frame to be referenced with a unique delta.
3340 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003341 aom_wb_write_literal(wb, seq_params->delta_frame_id_length - 2, 4);
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02003342 aom_wb_write_literal(
3343 wb, seq_params->frame_id_length - seq_params->delta_frame_id_length - 1,
3344 3);
David Barker5e70a112017-10-03 14:28:17 +01003345 }
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00003346
3347 write_sb_size(seq_params, wb);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00003348
3349 if (seq_params->force_screen_content_tools == 2) {
3350 aom_wb_write_bit(wb, 1);
3351 } else {
3352 aom_wb_write_bit(wb, 0);
3353 aom_wb_write_bit(wb, seq_params->force_screen_content_tools);
3354 }
3355
3356#if CONFIG_AMVR
3357 if (seq_params->force_screen_content_tools > 0) {
3358 if (seq_params->force_integer_mv == 2) {
3359 aom_wb_write_bit(wb, 1);
3360 } else {
3361 aom_wb_write_bit(wb, 0);
3362 aom_wb_write_bit(wb, seq_params->force_integer_mv);
3363 }
3364 } else {
3365 assert(seq_params->force_integer_mv == 2);
3366 }
3367#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003368}
Rupert Swarbrickb394bfe2017-11-07 17:52:13 +00003369#endif // CONFIG_REFERENCE_BUFFER || CONFIG_OBU
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003370
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003371static void write_compound_tools(const AV1_COMMON *cm,
3372 struct aom_write_bit_buffer *wb) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003373 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
3374 aom_wb_write_bit(wb, cm->allow_interintra_compound);
3375 } else {
3376 assert(cm->allow_interintra_compound == 0);
3377 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003378 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
3379 aom_wb_write_bit(wb, cm->allow_masked_compound);
3380 } else {
3381 assert(cm->allow_masked_compound == 0);
3382 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003383}
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003384
David Barkerd7c8bd52017-09-25 14:47:29 +01003385static void write_global_motion_params(const WarpedMotionParams *params,
3386 const WarpedMotionParams *ref_params,
Sarah Parker3e579a62017-08-23 16:53:20 -07003387 struct aom_write_bit_buffer *wb,
3388 int allow_hp) {
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01003389 const TransformationType type = params->wmtype;
Sarah Parker3e579a62017-08-23 16:53:20 -07003390
3391 aom_wb_write_bit(wb, type != IDENTITY);
3392 if (type != IDENTITY) {
3393#if GLOBAL_TRANS_TYPES > 4
3394 aom_wb_write_literal(wb, type - 1, GLOBAL_TYPE_BITS);
3395#else
3396 aom_wb_write_bit(wb, type == ROTZOOM);
3397 if (type != ROTZOOM) aom_wb_write_bit(wb, type == TRANSLATION);
3398#endif // GLOBAL_TRANS_TYPES > 4
3399 }
3400
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01003401 if (type >= ROTZOOM) {
3402 aom_wb_write_signed_primitive_refsubexpfin(
3403 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3404 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
3405 (1 << GM_ALPHA_PREC_BITS),
3406 (params->wmmat[2] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
3407 aom_wb_write_signed_primitive_refsubexpfin(
3408 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3409 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF),
3410 (params->wmmat[3] >> GM_ALPHA_PREC_DIFF));
3411 }
3412
3413 if (type >= AFFINE) {
3414 aom_wb_write_signed_primitive_refsubexpfin(
3415 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3416 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF),
3417 (params->wmmat[4] >> GM_ALPHA_PREC_DIFF));
3418 aom_wb_write_signed_primitive_refsubexpfin(
3419 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
3420 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
3421 (1 << GM_ALPHA_PREC_BITS),
3422 (params->wmmat[5] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
3423 }
3424
3425 if (type >= TRANSLATION) {
3426 const int trans_bits = (type == TRANSLATION)
3427 ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
3428 : GM_ABS_TRANS_BITS;
3429 const int trans_prec_diff = (type == TRANSLATION)
3430 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
3431 : GM_TRANS_PREC_DIFF;
3432 aom_wb_write_signed_primitive_refsubexpfin(
3433 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
3434 (ref_params->wmmat[0] >> trans_prec_diff),
3435 (params->wmmat[0] >> trans_prec_diff));
3436 aom_wb_write_signed_primitive_refsubexpfin(
3437 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
3438 (ref_params->wmmat[1] >> trans_prec_diff),
3439 (params->wmmat[1] >> trans_prec_diff));
Sarah Parker3e579a62017-08-23 16:53:20 -07003440 }
3441}
3442
3443static void write_global_motion(AV1_COMP *cpi,
3444 struct aom_write_bit_buffer *wb) {
3445 AV1_COMMON *const cm = &cpi->common;
3446 int frame;
3447 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01003448 const WarpedMotionParams *ref_params =
3449 cm->error_resilient_mode ? &default_warp_params
3450 : &cm->prev_frame->global_motion[frame];
3451 write_global_motion_params(&cm->global_motion[frame], ref_params, wb,
Sarah Parker3e579a62017-08-23 16:53:20 -07003452 cm->allow_high_precision_mv);
3453 // TODO(sarahparker, debargha): The logic in the commented out code below
3454 // does not work currently and causes mismatches when resize is on.
3455 // Fix it before turning the optimization back on.
3456 /*
3457 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame_buffer(cpi, frame);
3458 if (cpi->source->y_crop_width == ref_buf->y_crop_width &&
3459 cpi->source->y_crop_height == ref_buf->y_crop_height) {
3460 write_global_motion_params(&cm->global_motion[frame],
3461 &cm->prev_frame->global_motion[frame], wb,
3462 cm->allow_high_precision_mv);
3463 } else {
3464 assert(cm->global_motion[frame].wmtype == IDENTITY &&
3465 "Invalid warp type for frames of different resolutions");
3466 }
3467 */
3468 /*
3469 printf("Frame %d/%d: Enc Ref %d: %d %d %d %d\n",
3470 cm->current_video_frame, cm->show_frame, frame,
3471 cm->global_motion[frame].wmmat[0],
3472 cm->global_motion[frame].wmmat[1], cm->global_motion[frame].wmmat[2],
3473 cm->global_motion[frame].wmmat[3]);
3474 */
3475 }
3476}
Sarah Parker3e579a62017-08-23 16:53:20 -07003477
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003478#if !CONFIG_OBU
3479static void write_uncompressed_header_frame(AV1_COMP *cpi,
3480 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003481 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003482 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3483
Yaowu Xuf883b422016-08-30 14:01:10 -07003484 aom_wb_write_literal(wb, AOM_FRAME_MARKER, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003485
3486 write_profile(cm->profile, wb);
3487
Yaowu Xuc27fc142016-08-22 16:08:15 -07003488 // NOTE: By default all coded frames to be used as a reference
3489 cm->is_reference_frame = 1;
3490
3491 if (cm->show_existing_frame) {
3492 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3493 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3494
3495 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003496 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003497 "Buffer %d does not contain a reconstructed frame",
3498 frame_to_show);
3499 }
3500 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3501
Yaowu Xuf883b422016-08-30 14:01:10 -07003502 aom_wb_write_bit(wb, 1); // show_existing_frame
3503 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003504
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003505#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003506 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003507 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003508 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
3509 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
3510 /* Add a zero byte to prevent emulation of superframe marker */
3511 /* Same logic as when when terminating the entropy coder */
3512 /* Consider to have this logic only one place */
3513 aom_wb_write_literal(wb, 0, 8);
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003514 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003515#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003516
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003517#if CONFIG_FILM_GRAIN
3518 if (cm->film_grain_params_present) write_film_grain_params(cm, wb);
3519#endif
3520
Zoe Liub4991202017-12-21 15:31:06 -08003521#if CONFIG_FWD_KF
3522 if (cm->reset_decoder_state && !frame_bufs[frame_to_show].intra_only) {
3523 aom_internal_error(
3524 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3525 "show_existing_frame to reset state on non-intra_only");
3526 }
3527 aom_wb_write_bit(wb, cm->reset_decoder_state);
3528#endif // CONFIG_FWD_KF
3529
Yaowu Xuc27fc142016-08-22 16:08:15 -07003530 return;
3531 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003532 aom_wb_write_bit(wb, 0); // show_existing_frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07003533 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003534
Yaowu Xuf883b422016-08-30 14:01:10 -07003535 aom_wb_write_bit(wb, cm->frame_type);
3536 aom_wb_write_bit(wb, cm->show_frame);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003537 if (cm->frame_type != KEY_FRAME)
3538 if (!cm->show_frame) aom_wb_write_bit(wb, cm->intra_only);
Yaowu Xuf883b422016-08-30 14:01:10 -07003539 aom_wb_write_bit(wb, cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003540
Pavel Frolov3b95c502017-10-01 21:35:24 +03003541 if (frame_is_intra_only(cm)) {
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003542#if CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003543 write_sequence_header(cpi, wb);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003544#endif // CONFIG_REFERENCE_BUFFER
Joe Youngdb5eb4c2018-02-16 17:30:40 -08003545#if CONFIG_INTRA_EDGE2
3546 aom_wb_write_bit(wb, cm->disable_intra_edge_filter);
3547#endif // CONFIG_INTRA_EDGE2
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003548 }
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00003549
3550 if (cm->seq_params.force_screen_content_tools == 2) {
3551 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
3552 } else {
3553 assert(cm->allow_screen_content_tools ==
3554 cm->seq_params.force_screen_content_tools);
3555 }
3556
3557#if CONFIG_AMVR
3558 if (cm->allow_screen_content_tools) {
3559 if (cm->seq_params.force_integer_mv == 2) {
3560 aom_wb_write_bit(wb, cm->cur_frame_force_integer_mv);
3561 } else {
3562 assert(cm->cur_frame_force_integer_mv == cm->seq_params.force_integer_mv);
3563 }
3564 } else {
3565 assert(cm->cur_frame_force_integer_mv == 0);
3566 }
3567#endif // CONFIG_AMVR
3568
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003569#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003570 cm->invalid_delta_frame_id_minus1 = 0;
3571 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003572 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003573 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003574 }
3575#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003576
3577#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003578 if (cm->width > cm->seq_params.max_frame_width ||
3579 cm->height > cm->seq_params.max_frame_height) {
3580 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3581 "Frame dimensions are larger than the maximum values");
3582 }
Urvang Joshi94ad3702017-12-06 11:38:08 -08003583#if CONFIG_HORZONLY_FRAME_SUPERRES
David Barker22171312017-11-20 11:26:04 +00003584 const int coded_width = cm->superres_upscaled_width;
3585 const int coded_height = cm->superres_upscaled_height;
3586#else
3587 const int coded_width = cm->width;
3588 const int coded_height = cm->height;
Urvang Joshi94ad3702017-12-06 11:38:08 -08003589#endif // CONFIG_HORZONLY_FRAME_SUPERRES
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003590 int frame_size_override_flag =
David Barker22171312017-11-20 11:26:04 +00003591 (coded_width != cm->seq_params.max_frame_width ||
3592 coded_height != cm->seq_params.max_frame_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003593 aom_wb_write_bit(wb, frame_size_override_flag);
3594#endif
3595
Zoe Liu6b172bb2018-02-15 17:41:41 -08003596#if CONFIG_FRAME_REFS_SIGNALING
3597 cm->frame_refs_short_signaling = 0;
3598#endif // CONFIG_FRAME_REFS_SIGNALING
3599
Yaowu Xuc27fc142016-08-22 16:08:15 -07003600 if (cm->frame_type == KEY_FRAME) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003601 write_bitdepth_colorspace_sampling(cm, wb);
Andrey Norkin28e9ce22018-01-08 10:11:21 -08003602#if CONFIG_TIMING_INFO_IN_SEQ_HEADERS
3603 // timing_info
3604 write_timing_info_header(cm, wb);
3605#endif
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003606#if CONFIG_FILM_GRAIN
3607 aom_wb_write_bit(wb, cm->film_grain_params_present);
3608#endif
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003609#if CONFIG_FRAME_SIZE
3610 write_frame_size(cm, frame_size_override_flag, wb);
3611#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003612 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003613#endif
Hui Su85878782017-11-07 14:56:31 -08003614#if CONFIG_INTRABC
David Barker218556e2018-02-14 14:23:12 +00003615#if CONFIG_HORZONLY_FRAME_SUPERRES
3616 assert(av1_superres_unscaled(cm) ||
3617 !(cm->allow_intrabc && NO_FILTER_FOR_IBC));
3618 if (cm->allow_screen_content_tools &&
3619 (av1_superres_unscaled(cm) || !NO_FILTER_FOR_IBC))
3620#else
3621 if (cm->allow_screen_content_tools)
3622#endif
3623 aom_wb_write_bit(wb, cm->allow_intrabc);
Hui Su85878782017-11-07 14:56:31 -08003624#endif // CONFIG_INTRABC
Hui Su1cb1c002018-02-05 18:21:20 -08003625#if CONFIG_CDF_UPDATE_MODE
3626 aom_wb_write_literal(wb, cm->cdf_update_mode, 2);
3627#endif // CONFIG_CDF_UPDATE_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003628 } else {
Thomas Daedea6a854b2017-06-22 17:49:11 -07003629#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -07003630 if (!cm->error_resilient_mode) {
3631 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003632 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003633 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3634 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003635 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003636 cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
3637 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003638 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003639 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3640 }
3641 }
Thomas Daedea6a854b2017-06-22 17:49:11 -07003642#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003643 cpi->refresh_frame_mask = get_refresh_mask(cpi);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003644
3645 if (cm->intra_only) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003646 write_bitdepth_colorspace_sampling(cm, wb);
Andrey Norkin28e9ce22018-01-08 10:11:21 -08003647#if CONFIG_TIMING_INFO_IN_SEQ_HEADERS
3648 write_timing_info_header(cm, wb);
3649#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003650
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003651#if CONFIG_FILM_GRAIN
3652 aom_wb_write_bit(wb, cm->film_grain_params_present);
3653#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003654 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003655#if CONFIG_FRAME_SIZE
3656 write_frame_size(cm, frame_size_override_flag, wb);
3657#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003658 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003659#endif
Hui Sudf89ee32017-11-21 11:47:58 -08003660#if CONFIG_INTRABC
David Barker218556e2018-02-14 14:23:12 +00003661#if CONFIG_HORZONLY_FRAME_SUPERRES
3662 assert(av1_superres_unscaled(cm) ||
3663 !(cm->allow_intrabc && NO_FILTER_FOR_IBC));
3664 if (cm->allow_screen_content_tools &&
3665 (av1_superres_unscaled(cm) || !NO_FILTER_FOR_IBC))
3666#else
Hui Sudf89ee32017-11-21 11:47:58 -08003667 if (cm->allow_screen_content_tools)
David Barker218556e2018-02-14 14:23:12 +00003668#endif
Hui Sudf89ee32017-11-21 11:47:58 -08003669 aom_wb_write_bit(wb, cm->allow_intrabc);
3670#endif // CONFIG_INTRABC
Hui Su1cb1c002018-02-05 18:21:20 -08003671#if CONFIG_CDF_UPDATE_MODE
3672 aom_wb_write_literal(wb, cm->cdf_update_mode, 2);
3673#endif // CONFIG_CDF_UPDATE_MODE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003674 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003675 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003676
Yaowu Xuc27fc142016-08-22 16:08:15 -07003677 if (!cpi->refresh_frame_mask) {
3678 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
3679 // will not be used as a reference
3680 cm->is_reference_frame = 0;
3681 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003682
Zoe Liu6b172bb2018-02-15 17:41:41 -08003683#if CONFIG_FRAME_REFS_SIGNALING
3684 // TODO(zoeliu@google.com): To complete the encoder-side implementation
3685 // for the scenario cm->frame_refs_short_signaling == 1.
3686 assert(cm->frame_refs_short_signaling == 0);
3687 // NOTE: Error resilient mode turns off frame_refs_short_signaling
3688 // automatically.
3689 if (!cm->error_resilient_mode)
3690 aom_wb_write_bit(wb, cm->frame_refs_short_signaling);
3691 else
3692 assert(cm->frame_refs_short_signaling == 0);
3693
3694 if (cm->frame_refs_short_signaling) {
3695 assert(get_ref_frame_map_idx(cpi, LAST_FRAME) != INVALID_IDX);
3696 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, LAST_FRAME),
3697 REF_FRAMES_LOG2);
3698 assert(get_ref_frame_map_idx(cpi, GOLDEN_FRAME) != INVALID_IDX);
3699 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, GOLDEN_FRAME),
3700 REF_FRAMES_LOG2);
3701 }
3702#endif // CONFIG_FRAME_REFS_SIGNALING
3703
Zoe Liuf40a9572017-10-13 12:37:19 -07003704 for (MV_REFERENCE_FRAME ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME;
3705 ++ref_frame) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003706 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
Zoe Liu6b172bb2018-02-15 17:41:41 -08003707#if CONFIG_FRAME_REFS_SIGNALING
3708 if (!cm->frame_refs_short_signaling)
3709#endif // CONFIG_FRAME_REFS_SIGNALING
3710 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
3711 REF_FRAMES_LOG2);
3712
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003713#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003714 if (cm->seq_params.frame_id_numbers_present_flag) {
3715 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003716 int frame_id_len = cm->seq_params.frame_id_length;
3717 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003718 int delta_frame_id_minus1 =
3719 ((cm->current_frame_id - cm->ref_frame_id[i] +
3720 (1 << frame_id_len)) %
3721 (1 << frame_id_len)) -
3722 1;
3723 if (delta_frame_id_minus1 < 0 ||
3724 delta_frame_id_minus1 >= (1 << diff_len))
3725 cm->invalid_delta_frame_id_minus1 = 1;
3726 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003727 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003728#endif // CONFIG_REFERENCE_BUFFER
Yaowu Xuc27fc142016-08-22 16:08:15 -07003729 }
3730
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003731#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003732 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003733 write_frame_size_with_refs(cpi, wb);
3734 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003735 write_frame_size(cm, frame_size_override_flag, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003736 }
3737#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003738 write_frame_size_with_refs(cpi, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003739#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003740
RogerZhou3b635242017-09-19 10:06:46 -07003741#if CONFIG_AMVR
RogerZhou10a03802017-10-26 11:49:48 -07003742 if (cm->cur_frame_force_integer_mv) {
3743 cm->allow_high_precision_mv = 0;
3744 } else {
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003745#if !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003746 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003747#endif // !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003748 }
3749#else
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003750#if !CONFIG_EIGHTH_PEL_MV_ONLY
Yaowu Xuf883b422016-08-30 14:01:10 -07003751 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Debargha Mukherjeeb2147752017-11-01 07:00:45 -07003752#endif // !CONFIG_EIGHTH_PEL_MV_ONLY
RogerZhou10a03802017-10-26 11:49:48 -07003753#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003754 fix_interp_filter(cm, cpi->td.counts);
Angie Chiang5678ad92016-11-21 09:38:40 -08003755 write_frame_interp_filter(cm->interp_filter, wb);
Jingning Hane17ebe92017-11-03 15:25:42 -07003756 if (frame_might_use_prev_frame_mvs(cm))
3757 aom_wb_write_bit(wb, cm->use_ref_frame_mvs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003758 }
3759 }
3760
Jingning Hanc723b342017-08-24 11:19:46 -07003761 if (cm->show_frame == 0) {
3762 int arf_offset = AOMMIN(
3763 (MAX_GF_INTERVAL - 1),
3764 cpi->twopass.gf_group.arf_src_offset[cpi->twopass.gf_group.index]);
Jingning Hanc723b342017-08-24 11:19:46 -07003765 int brf_offset =
3766 cpi->twopass.gf_group.brf_src_offset[cpi->twopass.gf_group.index];
3767
3768 arf_offset = AOMMIN((MAX_GF_INTERVAL - 1), arf_offset + brf_offset);
Cheng Chend300f0e2017-12-01 10:46:23 -08003769 aom_wb_write_literal(wb, arf_offset, FRAME_OFFSET_BITS);
Jingning Hanc723b342017-08-24 11:19:46 -07003770 }
Jingning Hanc723b342017-08-24 11:19:46 -07003771
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003772#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003773 if (cm->seq_params.frame_id_numbers_present_flag) {
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003774 cm->refresh_mask =
3775 cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
3776 }
3777#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003778
Rupert Swarbrick84b05ac2017-10-27 18:10:53 +01003779#if CONFIG_EXT_TILE
3780 const int might_bwd_adapt =
3781 !(cm->error_resilient_mode || cm->large_scale_tile);
3782#else
3783 const int might_bwd_adapt = !cm->error_resilient_mode;
3784#endif // CONFIG_EXT_TILE
3785 if (might_bwd_adapt) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003786 aom_wb_write_bit(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003787 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
3788 }
Thomas Daededa4d8b92017-06-05 15:44:14 -07003789#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuf883b422016-08-30 14:01:10 -07003790 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
Thomas Daededa4d8b92017-06-05 15:44:14 -07003791#endif
Cyril Concolato3b5afc12017-12-15 12:54:15 -08003792
3793#if CONFIG_TILE_INFO_FIRST
3794 write_tile_info(cm, wb);
3795#endif
3796
Yaowu Xuc27fc142016-08-22 16:08:15 -07003797 encode_loopfilter(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003798 encode_quantization(cm, wb);
3799 encode_segmentation(cm, xd, wb);
Arild Fuldseth07441162016-08-15 15:07:52 +02003800 {
Thomas Davies28444be2017-10-13 18:12:25 +01003801 int delta_q_allowed = 1;
3802#if !CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003803 int i;
3804 struct segmentation *const seg = &cm->seg;
3805 int segment_quantizer_active = 0;
3806 for (i = 0; i < MAX_SEGMENTS; i++) {
3807 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3808 segment_quantizer_active = 1;
3809 }
3810 }
Thomas Davies28444be2017-10-13 18:12:25 +01003811 delta_q_allowed = !segment_quantizer_active;
3812#endif
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01003813
Yaowu Xu288f8162017-10-10 15:03:22 -07003814 if (cm->delta_q_present_flag) assert(cm->base_qindex > 0);
Thomas Davies28444be2017-10-13 18:12:25 +01003815 // Segment quantizer and delta_q both allowed if CONFIG_EXT_DELTA_Q
3816 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02003817 aom_wb_write_bit(wb, cm->delta_q_present_flag);
3818 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01003819 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
Arild Fuldseth07441162016-08-15 15:07:52 +02003820 xd->prev_qindex = cm->base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07003821#if CONFIG_EXT_DELTA_Q
Fangwen Fu231fe422017-04-24 17:52:29 -07003822 aom_wb_write_bit(wb, cm->delta_lf_present_flag);
3823 if (cm->delta_lf_present_flag) {
3824 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_lf_res) - 1, 2);
Cheng Chen880166a2017-10-02 17:48:48 -07003825 xd->prev_delta_lf_from_base = 0;
Cheng Chena97394f2017-09-27 15:05:14 -07003826#if CONFIG_LOOPFILTER_LEVEL
Cheng Chen880166a2017-10-02 17:48:48 -07003827 aom_wb_write_bit(wb, cm->delta_lf_multi);
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +00003828 const int frame_lf_count =
3829 av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
3830 for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id)
Cheng Chena97394f2017-09-27 15:05:14 -07003831 xd->prev_delta_lf[lf_id] = 0;
3832#endif // CONFIG_LOOPFILTER_LEVEL
Fangwen Fu231fe422017-04-24 17:52:29 -07003833 }
3834#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003835 }
3836 }
3837 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003838 if (!cm->all_lossless) {
3839 encode_cdef(cm, wb);
3840 }
Thomas Daedef636d5c2017-06-29 13:48:27 -07003841#if CONFIG_LOOP_RESTORATION
3842 encode_restoration_mode(cm, wb);
3843#endif // CONFIG_LOOP_RESTORATION
3844 write_tx_mode(cm, &cm->tx_mode, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003845
3846 if (cpi->allow_comp_inter_inter) {
3847 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003848
Yaowu Xuf883b422016-08-30 14:01:10 -07003849 aom_wb_write_bit(wb, use_hybrid_pred);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003850 }
Zoe Liu4b847e12017-12-07 12:44:45 -08003851
3852#if CONFIG_EXT_SKIP
3853 if (cm->is_skip_mode_allowed) aom_wb_write_bit(wb, cm->skip_mode_flag);
3854#endif // CONFIG_EXT_SKIP
3855
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07003856 write_compound_tools(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003857
Sarah Parkere68a3e42017-02-16 14:03:24 -08003858 aom_wb_write_bit(wb, cm->reduced_tx_set_used);
Sarah Parkere68a3e42017-02-16 14:03:24 -08003859
Sarah Parkerf289f9f2017-09-12 18:50:02 -07003860 if (!frame_is_intra_only(cm)) write_global_motion(cpi, wb);
Sarah Parker3e579a62017-08-23 16:53:20 -07003861
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003862#if CONFIG_FILM_GRAIN
3863 if (cm->film_grain_params_present && cm->show_frame)
3864 write_film_grain_params(cm, wb);
3865#endif
3866
Cyril Concolato3b5afc12017-12-15 12:54:15 -08003867#if !CONFIG_TILE_INFO_FIRST
Yaowu Xuc27fc142016-08-22 16:08:15 -07003868 write_tile_info(cm, wb);
Cyril Concolato3b5afc12017-12-15 12:54:15 -08003869#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003870}
3871
Zoe Liu48acf882018-02-21 12:16:50 -08003872#else // CONFIG_OBU
3873
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003874// New function based on HLS R18
3875static void write_uncompressed_header_obu(AV1_COMP *cpi,
Jingning Handa11e692017-12-19 08:45:08 -08003876#if CONFIG_EXT_TILE
3877 struct aom_write_bit_buffer *saved_wb,
3878#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003879 struct aom_write_bit_buffer *wb) {
3880 AV1_COMMON *const cm = &cpi->common;
3881 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3882
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003883 // NOTE: By default all coded frames to be used as a reference
3884 cm->is_reference_frame = 1;
3885
3886 if (cm->show_existing_frame) {
3887 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3888 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3889
3890 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
3891 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3892 "Buffer %d does not contain a reconstructed frame",
3893 frame_to_show);
3894 }
3895 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3896
3897 aom_wb_write_bit(wb, 1); // show_existing_frame
3898 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
3899
3900#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003901 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003902 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003903 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
3904 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
3905 /* Add a zero byte to prevent emulation of superframe marker */
3906 /* Same logic as when when terminating the entropy coder */
3907 /* Consider to have this logic only one place */
3908 aom_wb_write_literal(wb, 0, 8);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003909 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003910#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003911
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003912#if CONFIG_FILM_GRAIN
3913 if (cm->film_grain_params_present && cm->show_frame) {
3914 int flip_back_update_parameters_flag = 0;
3915 if (cm->frame_type == KEY_FRAME &&
3916 cm->film_grain_params.update_parameters == 0) {
3917 cm->film_grain_params.update_parameters = 1;
3918 flip_back_update_parameters_flag = 1;
3919 }
3920 write_film_grain_params(cm, wb);
3921
3922 if (flip_back_update_parameters_flag)
3923 cm->film_grain_params.update_parameters = 0;
3924 }
3925#endif
3926
Zoe Liub4991202017-12-21 15:31:06 -08003927#if CONFIG_FWD_KF
3928 if (cm->reset_decoder_state && !frame_bufs[frame_to_show].intra_only) {
3929 aom_internal_error(
3930 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3931 "show_existing_frame to reset state on non-intra_only");
3932 }
3933 aom_wb_write_bit(wb, cm->reset_decoder_state);
3934#endif // CONFIG_FWD_KF
3935
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003936 return;
3937 } else {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003938 aom_wb_write_bit(wb, 0); // show_existing_frame
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003939 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003940
3941 cm->frame_type = cm->intra_only ? INTRA_ONLY_FRAME : cm->frame_type;
3942 aom_wb_write_literal(wb, cm->frame_type, 2);
3943
3944 if (cm->intra_only) cm->frame_type = INTRA_ONLY_FRAME;
3945
3946 aom_wb_write_bit(wb, cm->show_frame);
3947 aom_wb_write_bit(wb, cm->error_resilient_mode);
3948
Joe Youngdb5eb4c2018-02-16 17:30:40 -08003949#if CONFIG_INTRA_EDGE2
3950 if (frame_is_intra_only(cm)) {
3951 aom_wb_write_bit(wb, cm->disable_intra_edge_filter);
3952 }
3953#endif // CONFIG_INTRA_EDGE2
3954
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00003955 if (cm->seq_params.force_screen_content_tools == 2) {
3956 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
3957 } else {
3958 assert(cm->allow_screen_content_tools ==
3959 cm->seq_params.force_screen_content_tools);
3960 }
3961
3962#if CONFIG_AMVR
3963 if (cm->allow_screen_content_tools) {
3964 if (cm->seq_params.force_integer_mv == 2) {
3965 aom_wb_write_bit(wb, cm->cur_frame_force_integer_mv);
3966 } else {
3967 assert(cm->cur_frame_force_integer_mv == cm->seq_params.force_integer_mv);
3968 }
3969 } else {
3970 assert(cm->cur_frame_force_integer_mv == 0);
3971 }
3972#endif // CONFIG_AMVR
3973
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003974#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01003975 cm->invalid_delta_frame_id_minus1 = 0;
3976 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003977 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003978 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003979 }
3980#endif // CONFIG_REFERENCE_BUFFER
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003981
3982#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003983 if (cm->width > cm->seq_params.max_frame_width ||
3984 cm->height > cm->seq_params.max_frame_height) {
3985 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3986 "Frame dimensions are larger than the maximum values");
3987 }
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003988 int frame_size_override_flag =
3989 (cm->width != cm->seq_params.max_frame_width ||
3990 cm->height != cm->seq_params.max_frame_height);
3991 aom_wb_write_bit(wb, frame_size_override_flag);
3992#endif
3993
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003994 if (cm->frame_type == KEY_FRAME) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003995#if CONFIG_FRAME_SIZE
3996 write_frame_size(cm, frame_size_override_flag, wb);
3997#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003998 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003999#endif
Hui Suad7536b2017-12-13 15:48:11 -08004000#if CONFIG_INTRABC
David Barker218556e2018-02-14 14:23:12 +00004001#if CONFIG_HORZONLY_FRAME_SUPERRES
4002 assert(av1_superres_unscaled(cm) ||
4003 !(cm->allow_intrabc && NO_FILTER_FOR_IBC));
4004 if (cm->allow_screen_content_tools &&
4005 (av1_superres_unscaled(cm) || !NO_FILTER_FOR_IBC))
4006#else
4007 if (cm->allow_screen_content_tools)
4008#endif
4009 aom_wb_write_bit(wb, cm->allow_intrabc);
Hui Suad7536b2017-12-13 15:48:11 -08004010#endif // CONFIG_INTRABC
Hui Su1cb1c002018-02-05 18:21:20 -08004011#if CONFIG_CDF_UPDATE_MODE
4012 aom_wb_write_literal(wb, cm->cdf_update_mode, 2);
4013#endif // CONFIG_CDF_UPDATE_MODE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004014 } else if (cm->frame_type == INTRA_ONLY_FRAME) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004015#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4016 if (!cm->error_resilient_mode) {
4017 if (cm->intra_only) {
4018 aom_wb_write_bit(wb,
4019 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4020 }
4021 }
4022#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004023 cpi->refresh_frame_mask = get_refresh_mask(cpi);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004024
4025 if (cm->intra_only) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004026 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004027#if CONFIG_FRAME_SIZE
4028 write_frame_size(cm, frame_size_override_flag, wb);
4029#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004030 write_frame_size(cm, wb);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004031#endif
Hui Suad7536b2017-12-13 15:48:11 -08004032#if CONFIG_INTRABC
David Barker218556e2018-02-14 14:23:12 +00004033#if CONFIG_HORZONLY_FRAME_SUPERRES
4034 assert(av1_superres_unscaled(cm) ||
4035 !(cm->allow_intrabc && NO_FILTER_FOR_IBC));
4036 if (cm->allow_screen_content_tools &&
4037 (av1_superres_unscaled(cm) || !NO_FILTER_FOR_IBC))
4038#else
Hui Suad7536b2017-12-13 15:48:11 -08004039 if (cm->allow_screen_content_tools)
David Barker218556e2018-02-14 14:23:12 +00004040#endif
Hui Suad7536b2017-12-13 15:48:11 -08004041 aom_wb_write_bit(wb, cm->allow_intrabc);
4042#endif // CONFIG_INTRABC
Hui Su1cb1c002018-02-05 18:21:20 -08004043#if CONFIG_CDF_UPDATE_MODE
4044 aom_wb_write_literal(wb, cm->cdf_update_mode, 2);
4045#endif // CONFIG_CDF_UPDATE_MODE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004046 }
Zoe Liu48acf882018-02-21 12:16:50 -08004047 } else if (cm->frame_type == INTER_FRAME || cm->frame_type == S_FRAME) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004048 MV_REFERENCE_FRAME ref_frame;
Zoe Liu48acf882018-02-21 12:16:50 -08004049
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004050#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4051 if (!cm->error_resilient_mode) {
4052 aom_wb_write_bit(wb, cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
4053 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
4054 aom_wb_write_bit(wb,
4055 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4056 }
4057#endif
4058
Zoe Liu48acf882018-02-21 12:16:50 -08004059 if (cm->frame_type == INTER_FRAME) {
4060 cpi->refresh_frame_mask = get_refresh_mask(cpi);
4061 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
4062 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004063
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004064 if (!cpi->refresh_frame_mask) {
4065 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
4066 // will not be used as a reference
4067 cm->is_reference_frame = 0;
4068 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004069
4070 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4071 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
4072 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
4073 REF_FRAMES_LOG2);
Zoe Liu48acf882018-02-21 12:16:50 -08004074 if (cm->frame_type == S_FRAME) {
4075 assert(cm->ref_frame_sign_bias[ref_frame] == 0);
4076 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004077#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004078 if (cm->seq_params.frame_id_numbers_present_flag) {
4079 int i = get_ref_frame_map_idx(cpi, ref_frame);
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02004080 int frame_id_len = cm->seq_params.frame_id_length;
4081 int diff_len = cm->seq_params.delta_frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01004082 int delta_frame_id_minus1 =
4083 ((cm->current_frame_id - cm->ref_frame_id[i] +
4084 (1 << frame_id_len)) %
4085 (1 << frame_id_len)) -
4086 1;
4087 if (delta_frame_id_minus1 < 0 ||
4088 delta_frame_id_minus1 >= (1 << diff_len))
4089 cm->invalid_delta_frame_id_minus1 = 1;
4090 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004091 }
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004092#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004093 }
4094
4095#if CONFIG_FRAME_SIZE
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004096 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004097 write_frame_size_with_refs(cpi, wb);
4098 } else {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01004099 write_frame_size(cm, frame_size_override_flag, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004100 }
4101#else
4102 write_frame_size_with_refs(cpi, wb);
4103#endif
4104
4105#if CONFIG_AMVR
RogerZhou0bf36902017-12-19 13:51:10 -08004106 if (cm->cur_frame_force_integer_mv) {
4107 cm->allow_high_precision_mv = 0;
4108 } else {
4109 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
4110 }
4111#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004112 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
RogerZhou0bf36902017-12-19 13:51:10 -08004113#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004114 fix_interp_filter(cm, cpi->td.counts);
4115 write_frame_interp_filter(cm->interp_filter, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004116 if (frame_might_use_prev_frame_mvs(cm)) {
Jingning Han923f8272017-12-14 10:50:12 -08004117 aom_wb_write_bit(wb, cm->use_ref_frame_mvs);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004118 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004119 }
4120
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004121 if (cm->show_frame == 0) {
4122 int arf_offset = AOMMIN(
4123 (MAX_GF_INTERVAL - 1),
4124 cpi->twopass.gf_group.arf_src_offset[cpi->twopass.gf_group.index]);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004125 int brf_offset =
4126 cpi->twopass.gf_group.brf_src_offset[cpi->twopass.gf_group.index];
4127
4128 arf_offset = AOMMIN((MAX_GF_INTERVAL - 1), arf_offset + brf_offset);
Cheng Chend300f0e2017-12-01 10:46:23 -08004129 aom_wb_write_literal(wb, arf_offset, FRAME_OFFSET_BITS);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004130 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004131
4132#if CONFIG_REFERENCE_BUFFER
David Barker5e70a112017-10-03 14:28:17 +01004133 if (cm->seq_params.frame_id_numbers_present_flag) {
Debargha Mukherjee778023d2017-09-26 17:50:27 -07004134 cm->refresh_mask =
4135 cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
4136 }
4137#endif // CONFIG_REFERENCE_BUFFER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004138
Jingning Handa11e692017-12-19 08:45:08 -08004139#if CONFIG_EXT_TILE
4140 const int might_bwd_adapt =
4141 !(cm->error_resilient_mode || cm->large_scale_tile);
4142#else
4143 const int might_bwd_adapt = !cm->error_resilient_mode;
4144#endif // CONFIG_EXT_TILE
4145
4146 if (might_bwd_adapt) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004147 aom_wb_write_bit(
4148 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
4149 }
4150#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
4151 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
4152#endif
Cyril Concolato3b5afc12017-12-15 12:54:15 -08004153#if CONFIG_TILE_INFO_FIRST
4154 write_tile_info(cm, wb);
4155#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004156 encode_loopfilter(cm, wb);
4157 encode_quantization(cm, wb);
4158 encode_segmentation(cm, xd, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004159 {
Thomas Davies28444be2017-10-13 18:12:25 +01004160 int delta_q_allowed = 1;
4161#if !CONFIG_EXT_DELTA_Q
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004162 int i;
4163 struct segmentation *const seg = &cm->seg;
4164 int segment_quantizer_active = 0;
4165 for (i = 0; i < MAX_SEGMENTS; i++) {
4166 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
4167 segment_quantizer_active = 1;
4168 }
4169 }
Thomas Davies28444be2017-10-13 18:12:25 +01004170 delta_q_allowed = !segment_quantizer_active;
4171#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004172
4173 if (cm->delta_q_present_flag)
Thomas Davies28444be2017-10-13 18:12:25 +01004174 assert(delta_q_allowed == 1 && cm->base_qindex > 0);
4175 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004176 aom_wb_write_bit(wb, cm->delta_q_present_flag);
4177 if (cm->delta_q_present_flag) {
4178 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
4179 xd->prev_qindex = cm->base_qindex;
4180#if CONFIG_EXT_DELTA_Q
Hui Su22a51d92018-01-16 13:02:18 -08004181#if CONFIG_INTRABC
4182 if (cm->allow_intrabc && NO_FILTER_FOR_IBC)
4183 assert(cm->delta_lf_present_flag == 0);
4184 else
4185#endif // CONFIG_INTRABC
4186 aom_wb_write_bit(wb, cm->delta_lf_present_flag);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004187 if (cm->delta_lf_present_flag) {
4188 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_lf_res) - 1, 2);
Yaowu Xub02d0b12017-12-15 01:32:34 +00004189 xd->prev_delta_lf_from_base = 0;
Cheng Chena97394f2017-09-27 15:05:14 -07004190#if CONFIG_LOOPFILTER_LEVEL
Yaowu Xub02d0b12017-12-15 01:32:34 +00004191 aom_wb_write_bit(wb, cm->delta_lf_multi);
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +00004192 const int frame_lf_count =
4193 av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
4194 for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id)
Cheng Chena97394f2017-09-27 15:05:14 -07004195 xd->prev_delta_lf[lf_id] = 0;
4196#endif // CONFIG_LOOPFILTER_LEVEL
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004197 }
4198#endif // CONFIG_EXT_DELTA_Q
4199 }
4200 }
4201 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004202 if (!cm->all_lossless) {
4203 encode_cdef(cm, wb);
4204 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004205#if CONFIG_LOOP_RESTORATION
4206 encode_restoration_mode(cm, wb);
4207#endif // CONFIG_LOOP_RESTORATION
4208 write_tx_mode(cm, &cm->tx_mode, wb);
4209
4210 if (cpi->allow_comp_inter_inter) {
4211 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004212
4213 aom_wb_write_bit(wb, use_hybrid_pred);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004214 }
Zoe Liu4b847e12017-12-07 12:44:45 -08004215
4216#if CONFIG_EXT_SKIP
4217#if 0
4218 printf("\n[ENCODER] Frame=%d, is_skip_mode_allowed=%d, skip_mode_flag=%d\n\n",
4219 (int)cm->frame_offset, cm->is_skip_mode_allowed, cm->skip_mode_flag);
4220#endif // 0
4221 if (cm->is_skip_mode_allowed) aom_wb_write_bit(wb, cm->skip_mode_flag);
4222#endif // CONFIG_EXT_SKIP
4223
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004224 write_compound_tools(cm, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004225
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004226 aom_wb_write_bit(wb, cm->reduced_tx_set_used);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004227
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004228 if (!frame_is_intra_only(cm)) write_global_motion(cpi, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004229
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08004230#if CONFIG_FILM_GRAIN
4231 if (cm->film_grain_params_present && cm->show_frame) {
4232 int flip_back_update_parameters_flag = 0;
4233 if (cm->frame_type == KEY_FRAME &&
4234 cm->film_grain_params.update_parameters == 0) {
4235 cm->film_grain_params.update_parameters = 1;
4236 flip_back_update_parameters_flag = 1;
4237 }
4238 write_film_grain_params(cm, wb);
4239
4240 if (flip_back_update_parameters_flag)
4241 cm->film_grain_params.update_parameters = 0;
4242 }
4243#endif
4244
Cyril Concolato3b5afc12017-12-15 12:54:15 -08004245#if !CONFIG_TILE_INFO_FIRST
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004246 write_tile_info(cm, wb);
Jingning Handa11e692017-12-19 08:45:08 -08004247
4248#if CONFIG_EXT_TILE
4249 *saved_wb = *wb;
4250 // Write tile size magnitudes
Jingning Han2e7d66d2018-01-02 11:27:48 -08004251 if (cm->tile_rows * cm->tile_cols > 1 && cm->large_scale_tile) {
Jingning Handa11e692017-12-19 08:45:08 -08004252 // Note that the last item in the uncompressed header is the data
4253 // describing tile configuration.
4254 // Number of bytes in tile column size - 1
4255 aom_wb_write_literal(wb, 0, 2);
4256
4257 // Number of bytes in tile size - 1
4258 aom_wb_write_literal(wb, 0, 2);
4259 }
Cyril Concolato3b5afc12017-12-15 12:54:15 -08004260#endif
Jingning Handa11e692017-12-19 08:45:08 -08004261#endif // !CONFIG_TILE_INFO_FIRST
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004262}
4263#endif // CONFIG_OBU
4264
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004265#if !CONFIG_OBU || CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004266static int choose_size_bytes(uint32_t size, int spare_msbs) {
4267 // Choose the number of bytes required to represent size, without
4268 // using the 'spare_msbs' number of most significant bits.
4269
4270 // Make sure we will fit in 4 bytes to start with..
4271 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
4272
4273 // Normalise to 32 bits
4274 size <<= spare_msbs;
4275
4276 if (size >> 24 != 0)
4277 return 4;
4278 else if (size >> 16 != 0)
4279 return 3;
4280 else if (size >> 8 != 0)
4281 return 2;
4282 else
4283 return 1;
4284}
4285
4286static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) {
4287 switch (sz) {
4288 case 1: dst[0] = (uint8_t)(val & 0xff); break;
4289 case 2: mem_put_le16(dst, val); break;
4290 case 3: mem_put_le24(dst, val); break;
4291 case 4: mem_put_le32(dst, val); break;
James Zern06c372d2017-04-20 16:08:29 -07004292 default: assert(0 && "Invalid size"); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004293 }
4294}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004295
Yaowu Xuf883b422016-08-30 14:01:10 -07004296static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004297 const uint32_t data_size, const uint32_t max_tile_size,
4298 const uint32_t max_tile_col_size,
4299 int *const tile_size_bytes,
4300 int *const tile_col_size_bytes) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004301 // Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
4302 int tsb;
4303 int tcsb;
4304
Yaowu Xuc27fc142016-08-22 16:08:15 -07004305#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004306 if (cm->large_scale_tile) {
4307 // The top bit in the tile size field indicates tile copy mode, so we
4308 // have 1 less bit to code the tile size
4309 tsb = choose_size_bytes(max_tile_size, 1);
4310 tcsb = choose_size_bytes(max_tile_col_size, 0);
4311 } else {
4312#endif // CONFIG_EXT_TILE
4313 tsb = choose_size_bytes(max_tile_size, 0);
4314 tcsb = 4; // This is ignored
4315 (void)max_tile_col_size;
4316#if CONFIG_EXT_TILE
4317 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004318#endif // CONFIG_EXT_TILE
4319
4320 assert(tsb > 0);
4321 assert(tcsb > 0);
4322
4323 *tile_size_bytes = tsb;
4324 *tile_col_size_bytes = tcsb;
4325
4326 if (tsb == 4 && tcsb == 4) {
4327 return data_size;
4328 } else {
4329 uint32_t wpos = 0;
4330 uint32_t rpos = 0;
4331
4332#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004333 if (cm->large_scale_tile) {
4334 int tile_row;
4335 int tile_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004336
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004337 for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
4338 // All but the last column has a column header
4339 if (tile_col < cm->tile_cols - 1) {
4340 uint32_t tile_col_size = mem_get_le32(dst + rpos);
4341 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004342
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004343 // Adjust the tile column size by the number of bytes removed
4344 // from the tile size fields.
4345 tile_col_size -= (4 - tsb) * cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004346
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004347 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
4348 wpos += tcsb;
4349 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004350
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004351 for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
4352 // All, including the last row has a header
4353 uint32_t tile_header = mem_get_le32(dst + rpos);
4354 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004355
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004356 // If this is a copy tile, we need to shift the MSB to the
4357 // top bit of the new width, and there is no data to copy.
4358 if (tile_header >> 31 != 0) {
4359 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
4360 mem_put_varsize(dst + wpos, tsb, tile_header);
4361 wpos += tsb;
4362 } else {
4363 mem_put_varsize(dst + wpos, tsb, tile_header);
4364 wpos += tsb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004365
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004366 memmove(dst + wpos, dst + rpos, tile_header);
4367 rpos += tile_header;
4368 wpos += tile_header;
4369 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004370 }
4371 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004372 } else {
4373#endif // CONFIG_EXT_TILE
4374 const int n_tiles = cm->tile_cols * cm->tile_rows;
4375 int n;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004376
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004377 for (n = 0; n < n_tiles; n++) {
4378 int tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004379
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004380 if (n == n_tiles - 1) {
4381 tile_size = data_size - rpos;
4382 } else {
4383 tile_size = mem_get_le32(dst + rpos);
4384 rpos += 4;
4385 mem_put_varsize(dst + wpos, tsb, tile_size);
4386 wpos += tsb;
4387 }
4388
4389 memmove(dst + wpos, dst + rpos, tile_size);
4390
4391 rpos += tile_size;
4392 wpos += tile_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004393 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004394#if CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004395 }
4396#endif // CONFIG_EXT_TILE
4397
4398 assert(rpos > wpos);
4399 assert(rpos == data_size);
4400
4401 return wpos;
4402 }
4403}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004404#endif
4405
4406#if CONFIG_OBU
Soo-Chul Han38427e82017-09-27 15:06:13 -04004407
4408uint32_t write_obu_header(OBU_TYPE obu_type, int obu_extension,
4409 uint8_t *const dst) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004410 struct aom_write_bit_buffer wb = { dst, 0 };
4411 uint32_t size = 0;
4412
Soo-Chul Han38427e82017-09-27 15:06:13 -04004413 // first bit is obu_forbidden_bit according to R19
4414 aom_wb_write_literal(&wb, 0, 1);
4415 aom_wb_write_literal(&wb, (int)obu_type, 4);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004416 aom_wb_write_literal(&wb, 0, 2);
4417 aom_wb_write_literal(&wb, obu_extension ? 1 : 0, 1);
4418 if (obu_extension) {
4419 aom_wb_write_literal(&wb, obu_extension & 0xFF, 8);
4420 }
4421
4422 size = aom_wb_bytes_written(&wb);
4423 return size;
4424}
4425
Tom Finegan41150ad2018-01-23 11:42:55 -08004426#if CONFIG_OBU_SIZING
4427int write_uleb_obu_size(uint32_t obu_size, uint8_t *dest) {
4428 size_t coded_obu_size = 0;
4429
Tom Fineganf4129062018-02-08 08:32:42 -08004430 if (aom_uleb_encode(obu_size, sizeof(obu_size), dest, &coded_obu_size) != 0)
Tom Finegan41150ad2018-01-23 11:42:55 -08004431 return AOM_CODEC_ERROR;
Tom Finegan41150ad2018-01-23 11:42:55 -08004432
4433 return AOM_CODEC_OK;
4434}
4435#endif // CONFIG_OBU_SIZING
4436
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004437static uint32_t write_sequence_header_obu(AV1_COMP *cpi, uint8_t *const dst
4438#if CONFIG_SCALABILITY
4439 ,
4440 uint8_t enhancement_layers_cnt) {
4441#else
Johannb0ef6ff2018-02-08 14:32:21 -08004442) {
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004443#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004444 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004445 struct aom_write_bit_buffer wb = { dst, 0 };
4446 uint32_t size = 0;
4447
4448 write_profile(cm->profile, &wb);
4449
4450 aom_wb_write_literal(&wb, 0, 4);
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004451#if CONFIG_SCALABILITY
4452 aom_wb_write_literal(&wb, enhancement_layers_cnt, 2);
4453 int i;
4454 for (i = 1; i <= enhancement_layers_cnt; i++) {
4455 aom_wb_write_literal(&wb, 0, 4);
4456 }
4457#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004458
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01004459 write_sequence_header(cpi, &wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004460
4461 // color_config
4462 write_bitdepth_colorspace_sampling(cm, &wb);
4463
Andrey Norkin28e9ce22018-01-08 10:11:21 -08004464#if CONFIG_TIMING_INFO_IN_SEQ_HEADERS
4465 // timing_info
4466 write_timing_info_header(cm, &wb);
4467#endif
4468
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08004469#if CONFIG_FILM_GRAIN
4470 aom_wb_write_bit(&wb, cm->film_grain_params_present);
4471#endif
4472
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004473 size = aom_wb_bytes_written(&wb);
4474 return size;
4475}
4476
Jingning Handa11e692017-12-19 08:45:08 -08004477static uint32_t write_frame_header_obu(AV1_COMP *cpi,
4478#if CONFIG_EXT_TILE
4479 struct aom_write_bit_buffer *saved_wb,
4480#endif
4481 uint8_t *const dst) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004482 AV1_COMMON *const cm = &cpi->common;
4483 struct aom_write_bit_buffer wb = { dst, 0 };
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004484 uint32_t total_size = 0;
Yunqing Wange7142e12018-01-17 11:20:12 -08004485 uint32_t uncompressed_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004486
Jingning Handa11e692017-12-19 08:45:08 -08004487 write_uncompressed_header_obu(cpi,
4488#if CONFIG_EXT_TILE
4489 saved_wb,
4490#endif
4491 &wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004492
4493 if (cm->show_existing_frame) {
4494 total_size = aom_wb_bytes_written(&wb);
4495 return total_size;
4496 }
4497
Cyril Concolato3b5afc12017-12-15 12:54:15 -08004498#if !CONFIG_TILE_INFO_FIRST
Jingning Handa11e692017-12-19 08:45:08 -08004499// write the tile length code (Always 4 bytes for now)
4500#if CONFIG_EXT_TILE
4501 if (!cm->large_scale_tile)
4502#endif
4503 aom_wb_write_literal(&wb, 3, 2);
Cyril Concolato3b5afc12017-12-15 12:54:15 -08004504#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004505
Yunqing Wange7142e12018-01-17 11:20:12 -08004506 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
4507 total_size = uncompressed_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004508 return total_size;
4509}
4510
4511static uint32_t write_tile_group_header(uint8_t *const dst, int startTile,
4512 int endTile, int tiles_log2) {
4513 struct aom_write_bit_buffer wb = { dst, 0 };
4514 uint32_t size = 0;
4515
4516 aom_wb_write_literal(&wb, startTile, tiles_log2);
4517 aom_wb_write_literal(&wb, endTile, tiles_log2);
4518
4519 size = aom_wb_bytes_written(&wb);
4520 return size;
4521}
4522
4523static uint32_t write_tiles_in_tg_obus(AV1_COMP *const cpi, uint8_t *const dst,
4524 unsigned int *max_tile_size,
4525 unsigned int *max_tile_col_size,
Jingning Handa11e692017-12-19 08:45:08 -08004526#if CONFIG_EXT_TILE
4527 struct aom_write_bit_buffer *saved_wb,
4528#endif
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004529 uint8_t obu_extension_header) {
Thomas Davies4822e142017-10-10 11:30:36 +01004530 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004531 aom_writer mode_bc;
4532 int tile_row, tile_col;
4533 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
4534 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
4535 uint32_t total_size = 0;
4536 const int tile_cols = cm->tile_cols;
4537 const int tile_rows = cm->tile_rows;
4538 unsigned int tile_size = 0;
4539 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
4540 // Fixed size tile groups for the moment
4541 const int num_tg_hdrs = cm->num_tg;
4542 const int tg_size =
4543#if CONFIG_EXT_TILE
4544 (cm->large_scale_tile)
4545 ? 1
4546 :
4547#endif // CONFIG_EXT_TILE
4548 (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
4549 int tile_count = 0;
4550 int curr_tg_data_size = 0;
4551 uint8_t *data = dst;
4552 int new_tg = 1;
4553#if CONFIG_EXT_TILE
4554 const int have_tiles = tile_cols * tile_rows > 1;
4555#endif
4556
Thomas Davies4822e142017-10-10 11:30:36 +01004557 cm->largest_tile_id = 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004558 *max_tile_size = 0;
4559 *max_tile_col_size = 0;
4560
4561#if CONFIG_EXT_TILE
4562 if (cm->large_scale_tile) {
Jingning Handa11e692017-12-19 08:45:08 -08004563 uint32_t tg_hdr_size =
4564 write_obu_header(OBU_TILE_GROUP, 0, data + PRE_OBU_SIZE_BYTES);
Jingning Han5737b4c2018-01-02 10:27:58 -08004565 tg_hdr_size += PRE_OBU_SIZE_BYTES;
Jingning Handa11e692017-12-19 08:45:08 -08004566 data += tg_hdr_size;
4567
4568 int tile_size_bytes;
4569 int tile_col_size_bytes;
4570
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004571 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
4572 TileInfo tile_info;
4573 const int is_last_col = (tile_col == tile_cols - 1);
4574 const uint32_t col_offset = total_size;
4575
4576 av1_tile_set_col(&tile_info, cm, tile_col);
4577
4578 // The last column does not have a column header
4579 if (!is_last_col) total_size += 4;
4580
4581 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
4582 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
4583 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
4584 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
4585 const int data_offset = have_tiles ? 4 : 0;
4586 const int tile_idx = tile_row * tile_cols + tile_col;
4587 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
4588 av1_tile_set_row(&tile_info, cm, tile_row);
4589
Jingning Handa11e692017-12-19 08:45:08 -08004590 buf->data = dst + total_size + tg_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004591
4592 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
4593 // even for the last one, unless no tiling is used at all.
4594 total_size += data_offset;
4595 // Initialise tile context from the frame context
4596 this_tile->tctx = *cm->fc;
4597 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07004598 mode_bc.allow_update_cdf = !cm->large_scale_tile;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004599 aom_start_encode(&mode_bc, buf->data + data_offset);
4600 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
4601 assert(tok == tok_end);
4602 aom_stop_encode(&mode_bc);
4603 tile_size = mode_bc.pos;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004604 buf->size = tile_size;
4605
4606 // Record the maximum tile size we see, so we can compact headers later.
Thomas Davies4822e142017-10-10 11:30:36 +01004607 if (tile_size > *max_tile_size) {
4608 *max_tile_size = tile_size;
Thomas Davies4822e142017-10-10 11:30:36 +01004609 cm->largest_tile_id = tile_cols * tile_row + tile_col;
Thomas Davies4822e142017-10-10 11:30:36 +01004610 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004611
4612 if (have_tiles) {
4613 // tile header: size of this tile, or copy offset
4614 uint32_t tile_header = tile_size;
4615 const int tile_copy_mode =
4616 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256)
4617 ? 1
4618 : 0;
4619
4620 // If tile_copy_mode = 1, check if this tile is a copy tile.
4621 // Very low chances to have copy tiles on the key frames, so don't
4622 // search on key frames to reduce unnecessary search.
4623 if (cm->frame_type != KEY_FRAME && tile_copy_mode) {
4624 const int idendical_tile_offset =
4625 find_identical_tile(tile_row, tile_col, tile_buffers);
4626
4627 if (idendical_tile_offset > 0) {
4628 tile_size = 0;
4629 tile_header = idendical_tile_offset | 0x80;
4630 tile_header <<= 24;
4631 }
4632 }
4633
4634 mem_put_le32(buf->data, tile_header);
4635 }
4636
4637 total_size += tile_size;
4638 }
4639
4640 if (!is_last_col) {
4641 uint32_t col_size = total_size - col_offset - 4;
Jingning Handa11e692017-12-19 08:45:08 -08004642 mem_put_le32(dst + col_offset + tg_hdr_size, col_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004643
4644 // If it is not final packing, record the maximum tile column size we
4645 // see, otherwise, check if the tile size is out of the range.
4646 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
4647 }
4648 }
Jingning Handa11e692017-12-19 08:45:08 -08004649
4650 if (have_tiles) {
4651 total_size =
4652 remux_tiles(cm, data, total_size, *max_tile_size, *max_tile_col_size,
4653 &tile_size_bytes, &tile_col_size_bytes);
4654 }
4655
4656 // Now fill in the gaps in the uncompressed header.
4657 if (have_tiles) {
4658 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
4659 aom_wb_write_literal(saved_wb, tile_col_size_bytes - 1, 2);
4660
4661 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
4662 aom_wb_write_literal(saved_wb, tile_size_bytes - 1, 2);
4663 }
Jingning Handa11e692017-12-19 08:45:08 -08004664 total_size += tg_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004665 } else {
4666#endif // CONFIG_EXT_TILE
4667
4668 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
4669 TileInfo tile_info;
4670 const int is_last_row = (tile_row == tile_rows - 1);
4671 av1_tile_set_row(&tile_info, cm, tile_row);
4672
4673 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
4674 const int tile_idx = tile_row * tile_cols + tile_col;
4675 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
4676 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
4677 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
4678 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
4679 const int is_last_col = (tile_col == tile_cols - 1);
4680 const int is_last_tile = is_last_col && is_last_row;
4681 int is_last_tile_in_tg = 0;
4682
4683 if (new_tg) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004684 data = dst + total_size;
4685 // A new tile group begins at this tile. Write the obu header and
4686 // tile group header
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004687 curr_tg_data_size = write_obu_header(
4688 OBU_TILE_GROUP, obu_extension_header, data + PRE_OBU_SIZE_BYTES);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004689 if (n_log2_tiles)
4690 curr_tg_data_size += write_tile_group_header(
Soo-Chul Han38427e82017-09-27 15:06:13 -04004691 data + curr_tg_data_size + PRE_OBU_SIZE_BYTES, tile_idx,
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004692 AOMMIN(tile_idx + tg_size - 1, tile_cols * tile_rows - 1),
4693 n_log2_tiles);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004694 total_size += curr_tg_data_size + PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004695 new_tg = 0;
4696 tile_count = 0;
4697 }
4698 tile_count++;
4699 av1_tile_set_col(&tile_info, cm, tile_col);
4700
4701 if (tile_count == tg_size || tile_idx == (tile_cols * tile_rows - 1)) {
4702 is_last_tile_in_tg = 1;
4703 new_tg = 1;
4704 } else {
4705 is_last_tile_in_tg = 0;
4706 }
4707
4708#if CONFIG_DEPENDENT_HORZTILES
4709 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
4710#endif
4711 buf->data = dst + total_size;
4712
Tom Fineganff863952017-12-22 11:41:14 -08004713 // The last tile of the tile group does not have a header.
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004714 if (!is_last_tile_in_tg) total_size += 4;
4715
4716 // Initialise tile context from the frame context
4717 this_tile->tctx = *cm->fc;
4718 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07004719 mode_bc.allow_update_cdf = 1;
Soo-Chul Han13f0d9c2017-10-22 21:55:52 -04004720#if CONFIG_LOOP_RESTORATION
Debargha Mukherjeea78c8f52018-01-31 11:14:38 -08004721 const int num_planes = av1_num_planes(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00004722 av1_reset_loop_restoration(&cpi->td.mb.e_mbd, num_planes);
Soo-Chul Han13f0d9c2017-10-22 21:55:52 -04004723#endif // CONFIG_LOOP_RESTORATION
4724
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004725 aom_start_encode(&mode_bc, dst + total_size);
4726 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004727 aom_stop_encode(&mode_bc);
4728 tile_size = mode_bc.pos;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004729 assert(tile_size > 0);
4730
4731 curr_tg_data_size += (tile_size + (is_last_tile_in_tg ? 0 : 4));
4732 buf->size = tile_size;
Thomas Davies4822e142017-10-10 11:30:36 +01004733 if (tile_size > *max_tile_size) {
4734 cm->largest_tile_id = tile_cols * tile_row + tile_col;
4735 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004736 if (!is_last_tile) {
4737 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
4738 }
Thomas Davies4822e142017-10-10 11:30:36 +01004739
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004740 if (!is_last_tile_in_tg) {
4741 // size of this tile
4742 mem_put_le32(buf->data, tile_size);
4743 } else {
Tom Finegan41150ad2018-01-23 11:42:55 -08004744// write current tile group size
4745#if CONFIG_OBU_SIZING
Tom Fineganf2d40f62018-02-01 11:52:49 -08004746 const size_t length_field_size =
4747 aom_uleb_size_in_bytes(curr_tg_data_size);
4748 memmove(data + length_field_size, data, curr_tg_data_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004749 if (write_uleb_obu_size(curr_tg_data_size, data) != AOM_CODEC_OK)
4750 assert(0);
Tom Fineganf2d40f62018-02-01 11:52:49 -08004751 curr_tg_data_size += length_field_size;
4752 total_size += length_field_size;
Tom Finegan41150ad2018-01-23 11:42:55 -08004753#else
4754 mem_put_le32(data, curr_tg_data_size);
4755#endif // CONFIG_OBU_SIZING
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004756 }
4757
4758 total_size += tile_size;
4759 }
4760 }
4761#if CONFIG_EXT_TILE
4762 }
4763#endif // CONFIG_EXT_TILE
4764 return (uint32_t)total_size;
4765}
4766
Tom Finegan41150ad2018-01-23 11:42:55 -08004767#endif // CONFIG_OBU
Yaowu Xuc27fc142016-08-22 16:08:15 -07004768
Tom Finegane4099e32018-01-23 12:01:51 -08004769int av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004770 uint8_t *data = dst;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004771 uint32_t data_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004772 unsigned int max_tile_size;
4773 unsigned int max_tile_col_size;
Tom Finegan8695e982018-01-23 21:59:24 +00004774#if CONFIG_OBU
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004775 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004776 uint32_t obu_size;
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004777#if CONFIG_SCALABILITY
4778 const uint8_t enhancement_layers_cnt = cm->enhancement_layers_cnt;
4779 const uint8_t obu_extension_header =
4780 cm->temporal_layer_id << 5 | cm->enhancement_layer_id << 3 | 0;
4781#else
4782 uint8_t obu_extension_header = 0;
4783#endif // CONFIG_SCALABILITY
Tom Finegan41150ad2018-01-23 11:42:55 -08004784#endif // CONFIG_OBU
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004785
Angie Chiangb11aedf2017-03-10 17:31:46 -08004786#if CONFIG_BITSTREAM_DEBUG
4787 bitstream_queue_reset_write();
4788#endif
4789
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004790#if CONFIG_OBU
Soo-Chul Han38427e82017-09-27 15:06:13 -04004791 // The TD is now written outside the frame encode loop
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004792
4793 // write sequence header obu if KEY_FRAME, preceded by 4-byte size
4794 if (cm->frame_type == KEY_FRAME) {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004795 obu_size =
4796 write_obu_header(OBU_SEQUENCE_HEADER, 0, data + PRE_OBU_SIZE_BYTES);
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004797
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004798#if CONFIG_SCALABILITY
Tom Fineganf2d40f62018-02-01 11:52:49 -08004799 obu_size += write_sequence_header_obu(
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004800 cpi, data + PRE_OBU_SIZE_BYTES + obu_size, enhancement_layers_cnt);
4801#else
Tom Fineganf2d40f62018-02-01 11:52:49 -08004802 obu_size +=
4803 write_sequence_header_obu(cpi, data + PRE_OBU_SIZE_BYTES + obu_size);
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004804#endif // CONFIG_SCALABILITY
Tom Finegan41150ad2018-01-23 11:42:55 -08004805
4806#if CONFIG_OBU_SIZING
Tom Fineganf2d40f62018-02-01 11:52:49 -08004807 const size_t length_field_size = aom_uleb_size_in_bytes(obu_size);
4808 memmove(data + length_field_size, data, obu_size);
4809
Tom Finegan41150ad2018-01-23 11:42:55 -08004810 if (write_uleb_obu_size(obu_size, data) != AOM_CODEC_OK)
4811 return AOM_CODEC_ERROR;
4812#else
Tom Fineganf2d40f62018-02-01 11:52:49 -08004813 const size_t length_field_size = PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004814 mem_put_le32(data, obu_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004815#endif // CONFIG_OBU_SIZING
4816
Tom Fineganf2d40f62018-02-01 11:52:49 -08004817 data += obu_size + length_field_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004818 }
4819
Jingning Handa11e692017-12-19 08:45:08 -08004820#if CONFIG_EXT_TILE
4821 struct aom_write_bit_buffer saved_wb;
4822#endif
4823
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004824 // write frame header obu, preceded by 4-byte size
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004825 obu_size = write_obu_header(OBU_FRAME_HEADER, obu_extension_header,
Tom Fineganf2d40f62018-02-01 11:52:49 -08004826 data + PRE_OBU_SIZE_BYTES);
4827 obu_size += write_frame_header_obu(cpi,
Jingning Handa11e692017-12-19 08:45:08 -08004828#if CONFIG_EXT_TILE
Tom Fineganf2d40f62018-02-01 11:52:49 -08004829 &saved_wb,
Jingning Handa11e692017-12-19 08:45:08 -08004830#endif
Tom Fineganf2d40f62018-02-01 11:52:49 -08004831 data + PRE_OBU_SIZE_BYTES + obu_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004832
4833#if CONFIG_OBU_SIZING
Tom Fineganf2d40f62018-02-01 11:52:49 -08004834 const size_t length_field_size = aom_uleb_size_in_bytes(obu_size);
4835 memmove(data + length_field_size, data, obu_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004836 if (write_uleb_obu_size(obu_size, data) != AOM_CODEC_OK)
4837 return AOM_CODEC_ERROR;
4838#else
Tom Fineganf2d40f62018-02-01 11:52:49 -08004839 const size_t length_field_size = PRE_OBU_SIZE_BYTES;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004840 mem_put_le32(data, obu_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004841#endif // CONFIG_OBU_SIZING
4842
Tom Fineganf2d40f62018-02-01 11:52:49 -08004843 data += obu_size + length_field_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004844
4845 if (cm->show_existing_frame) {
4846 data_size = 0;
4847 } else {
4848 // Each tile group obu will be preceded by 4-byte size of the tile group
4849 // obu
Tom Fineganf2d40f62018-02-01 11:52:49 -08004850 data_size =
4851 write_tiles_in_tg_obus(cpi, data, &max_tile_size, &max_tile_col_size,
Jingning Handa11e692017-12-19 08:45:08 -08004852#if CONFIG_EXT_TILE
Tom Fineganf2d40f62018-02-01 11:52:49 -08004853 &saved_wb,
Jingning Handa11e692017-12-19 08:45:08 -08004854#endif
Tom Fineganf2d40f62018-02-01 11:52:49 -08004855 obu_extension_header);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004856 }
4857
Soo-Chul Hanf8589862018-01-24 03:13:14 +00004858#endif // CONFIG_OBU
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004859
Jingning Handa11e692017-12-19 08:45:08 -08004860#if CONFIG_EXT_TILE && !CONFIG_OBU
Jingning Handa11e692017-12-19 08:45:08 -08004861 uint32_t uncompressed_hdr_size;
4862 struct aom_write_bit_buffer saved_wb;
4863 struct aom_write_bit_buffer wb = { data, 0 };
4864 const int have_tiles = cm->tile_cols * cm->tile_rows > 1;
4865 int tile_size_bytes;
4866 int tile_col_size_bytes;
4867
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004868 if (cm->large_scale_tile) {
Soo-Chul Han38427e82017-09-27 15:06:13 -04004869#if !CONFIG_OBU
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004870 write_uncompressed_header_frame(cpi, &wb);
Soo-Chul Han38427e82017-09-27 15:06:13 -04004871#else
4872 write_uncompressed_header_obu(cpi, &wb);
4873#endif
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004874
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004875 if (cm->show_existing_frame) {
4876 *size = aom_wb_bytes_written(&wb);
Tom Fineganf2d40f62018-02-01 11:52:49 -08004877 return AOM_CODEC_OK;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004878 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004879
4880 // We do not know these in advance. Output placeholder bit.
4881 saved_wb = wb;
4882 // Write tile size magnitudes
4883 if (have_tiles) {
4884 // Note that the last item in the uncompressed header is the data
4885 // describing tile configuration.
4886 // Number of bytes in tile column size - 1
4887 aom_wb_write_literal(&wb, 0, 2);
4888
4889 // Number of bytes in tile size - 1
4890 aom_wb_write_literal(&wb, 0, 2);
4891 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004892
Yunqing Wange7142e12018-01-17 11:20:12 -08004893 uncompressed_hdr_size = (uint32_t)aom_wb_bytes_written(&wb);
4894 aom_clear_system_state();
4895 data += uncompressed_hdr_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004896
Yunqing Wangb041d8a2017-11-15 12:31:18 -08004897#define EXT_TILE_DEBUG 0
4898#if EXT_TILE_DEBUG
4899 {
4900 char fn[20] = "./fh";
4901 fn[4] = cm->current_video_frame / 100 + '0';
4902 fn[5] = (cm->current_video_frame % 100) / 10 + '0';
4903 fn[6] = (cm->current_video_frame % 10) + '0';
4904 fn[7] = '\0';
Yunqing Wange7142e12018-01-17 11:20:12 -08004905 av1_print_uncompressed_frame_header(data - uncompressed_hdr_size,
4906 uncompressed_hdr_size, fn);
Yunqing Wangb041d8a2017-11-15 12:31:18 -08004907 }
4908#endif // EXT_TILE_DEBUG
4909#undef EXT_TILE_DEBUG
4910
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004911 // Write the encoded tile data
4912 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
4913 } else {
4914#endif // CONFIG_EXT_TILE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004915#if !CONFIG_OBU
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004916 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004917#endif
Jingning Handa11e692017-12-19 08:45:08 -08004918#if CONFIG_EXT_TILE && !CONFIG_OBU
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004919 }
4920#endif // CONFIG_EXT_TILE
Jingning Handa11e692017-12-19 08:45:08 -08004921#if CONFIG_EXT_TILE && !CONFIG_OBU
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004922 if (cm->large_scale_tile) {
4923 if (have_tiles) {
4924 data_size =
4925 remux_tiles(cm, data, data_size, max_tile_size, max_tile_col_size,
4926 &tile_size_bytes, &tile_col_size_bytes);
4927 }
4928
4929 data += data_size;
4930
4931 // Now fill in the gaps in the uncompressed header.
4932 if (have_tiles) {
4933 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
4934 aom_wb_write_literal(&saved_wb, tile_col_size_bytes - 1, 2);
4935
4936 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
4937 aom_wb_write_literal(&saved_wb, tile_size_bytes - 1, 2);
4938 }
Tom Finegane4099e32018-01-23 12:01:51 -08004939
4940 if (compressed_hdr_size > 0xffff) return AOM_CODEC_ERROR;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004941 } else {
4942#endif // CONFIG_EXT_TILE
4943 data += data_size;
Jingning Handa11e692017-12-19 08:45:08 -08004944#if CONFIG_EXT_TILE && !CONFIG_OBU
Yunqing Wangeeb08a92017-07-07 21:25:18 -07004945 }
4946#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004947 *size = data - dst;
Tom Finegane4099e32018-01-23 12:01:51 -08004948 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004949}