blob: 108230ddac758ba1bfcffb091b69e896a60b880b [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070017#include "aom_dsp/aom_dsp_common.h"
Debargha Mukherjee47748b52017-03-24 12:20:49 -070018#include "aom_dsp/binary_codes_writer.h"
Cheng Chenc7855b12017-09-05 10:49:08 -070019#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070020#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070021#include "aom_ports/mem_ops.h"
22#include "aom_ports/system_state.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070023#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070025#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070026
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010027#include "av1/common/cdef.h"
Luc Trudeaud183b642017-11-28 11:42:37 -050028#include "av1/common/cfl.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070029#include "av1/common/entropy.h"
30#include "av1/common/entropymode.h"
31#include "av1/common/entropymv.h"
32#include "av1/common/mvref_common.h"
Thomas Daviesf6936102016-09-05 16:51:31 +010033#include "av1/common/odintrin.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070034#include "av1/common/pred_common.h"
35#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080036#include "av1/common/reconintra.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070037#include "av1/common/seg_common.h"
38#include "av1/common/tile_common.h"
39
Yaowu Xuc27fc142016-08-22 16:08:15 -070040#include "av1/encoder/bitstream.h"
41#include "av1/encoder/cost.h"
42#include "av1/encoder/encodemv.h"
Hui Suec73b442018-01-04 12:47:53 -080043#include "av1/encoder/encodetxb.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070044#include "av1/encoder/mcomp.h"
hui sud13c24a2017-04-07 16:13:07 -070045#include "av1/encoder/palette.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070046#include "av1/encoder/segmentation.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#include "av1/encoder/tokenize.h"
48
Di Chen56586622017-06-09 13:49:44 -070049#define ENC_MISMATCH_DEBUG 0
Zoe Liu85b66462017-04-20 14:28:19 -070050
Yaowu Xuf883b422016-08-30 14:01:10 -070051static INLINE void write_uniform(aom_writer *w, int n, int v) {
hui su37499292017-04-26 09:49:53 -070052 const int l = get_unsigned_bits(n);
53 const int m = (1 << l) - n;
Yaowu Xuc27fc142016-08-22 16:08:15 -070054 if (l == 0) return;
55 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -070056 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070057 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -070058 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
59 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070060 }
61}
62
Rupert Swarbrick6c545212017-09-01 17:17:25 +010063static void loop_restoration_write_sb_coeffs(const AV1_COMMON *const cm,
64 MACROBLOCKD *xd,
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +010065 const RestorationUnitInfo *rui,
Yue Chen44391512018-03-13 15:37:26 -070066 aom_writer *const w, int plane,
67 FRAME_COUNTS *counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -070068
Jingning Han3e4c6a62017-12-14 14:50:57 -080069static void write_intra_mode_kf(FRAME_CONTEXT *frame_ctx, const MODE_INFO *mi,
70 const MODE_INFO *above_mi,
71 const MODE_INFO *left_mi, PREDICTION_MODE mode,
72 aom_writer *w) {
Alex Converse28744302017-04-13 14:46:22 -070073 assert(!is_intrabc_block(&mi->mbmi));
Jingning Han9010e202017-12-14 14:48:09 -080074 (void)mi;
Jingning Han9010e202017-12-14 14:48:09 -080075 aom_write_symbol(w, mode, get_y_mode_cdf(frame_ctx, above_mi, left_mi),
Jingning Hanf04254f2017-03-08 10:51:35 -080076 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -080077}
Yaowu Xuc27fc142016-08-22 16:08:15 -070078
Thomas Davies1de6c882017-01-11 17:47:49 +000079static void write_inter_mode(aom_writer *w, PREDICTION_MODE mode,
Zoe Liu7f24e1b2017-03-17 17:42:05 -070080 FRAME_CONTEXT *ec_ctx, const int16_t mode_ctx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070081 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Yaowu Xuc27fc142016-08-22 16:08:15 -070082
Thomas Davies149eda52017-06-12 18:11:55 +010083 aom_write_symbol(w, mode != NEWMV, ec_ctx->newmv_cdf[newmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -070084
Jingning Hanf2b87bd2017-05-18 16:27:30 -070085 if (mode != NEWMV) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -070086 const int16_t zeromv_ctx =
87 (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Sarah Parker2b9ec2e2017-10-30 17:34:08 -070088 aom_write_symbol(w, mode != GLOBALMV, ec_ctx->zeromv_cdf[zeromv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -070089
Sarah Parker2b9ec2e2017-10-30 17:34:08 -070090 if (mode != GLOBALMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070091 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Thomas Davies149eda52017-06-12 18:11:55 +010092 aom_write_symbol(w, mode != NEARESTMV, ec_ctx->refmv_cdf[refmv_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -070093 }
94 }
Yaowu Xuc27fc142016-08-22 16:08:15 -070095}
96
Thomas Davies149eda52017-06-12 18:11:55 +010097static void write_drl_idx(FRAME_CONTEXT *ec_ctx, const MB_MODE_INFO *mbmi,
Yaowu Xuf883b422016-08-30 14:01:10 -070098 const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) {
99 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700100
101 assert(mbmi->ref_mv_idx < 3);
102
Sebastien Alaiwan34d55662017-11-15 09:36:03 +0100103 const int new_mv = mbmi->mode == NEWMV || mbmi->mode == NEW_NEWMV;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000104 if (new_mv) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700105 int idx;
106 for (idx = 0; idx < 2; ++idx) {
Jingning Hanb4fc74d2017-12-21 14:34:03 -0800107 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700108 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700109 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700110
Thomas Davies149eda52017-06-12 18:11:55 +0100111 aom_write_symbol(w, mbmi->ref_mv_idx != idx, ec_ctx->drl_cdf[drl_ctx],
112 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700113 if (mbmi->ref_mv_idx == idx) return;
114 }
115 }
116 return;
117 }
118
David Barker3dfba992017-04-03 16:10:09 +0100119 if (have_nearmv_in_inter_mode(mbmi->mode)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700120 int idx;
121 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
122 for (idx = 1; idx < 3; ++idx) {
Jingning Hanb4fc74d2017-12-21 14:34:03 -0800123 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700124 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700125 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
Thomas Davies149eda52017-06-12 18:11:55 +0100126 aom_write_symbol(w, mbmi->ref_mv_idx != (idx - 1),
127 ec_ctx->drl_cdf[drl_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700128 if (mbmi->ref_mv_idx == (idx - 1)) return;
129 }
130 }
131 return;
132 }
133}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700134
Thomas Davies8c08a332017-06-26 17:30:34 +0100135static void write_inter_compound_mode(AV1_COMMON *cm, MACROBLOCKD *xd,
136 aom_writer *w, PREDICTION_MODE mode,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700137 const int16_t mode_ctx) {
Thomas Davies8c08a332017-06-26 17:30:34 +0100138 assert(is_inter_compound_mode(mode));
Thomas Davies8c08a332017-06-26 17:30:34 +0100139 (void)cm;
140 aom_write_symbol(w, INTER_COMPOUND_OFFSET(mode),
141 xd->tile_ctx->inter_compound_mode_cdf[mode_ctx],
142 INTER_COMPOUND_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700143}
Zoe Liu85b66462017-04-20 14:28:19 -0700144
Thomas Davies985bfc32017-06-27 16:51:26 +0100145static void write_tx_size_vartx(const AV1_COMMON *cm, MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700146 const MB_MODE_INFO *mbmi, TX_SIZE tx_size,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700147 int depth, int blk_row, int blk_col,
148 aom_writer *w) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100149 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
150 (void)cm;
Jingning Hanf65b8702016-10-31 12:13:20 -0700151 const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0);
152 const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0);
153
Yaowu Xuc27fc142016-08-22 16:08:15 -0700154 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
155
Jingning Han571189c2016-10-24 10:38:43 -0700156 if (depth == MAX_VARTX_DEPTH) {
Jingning Han331662e2017-05-30 17:03:32 -0700157 txfm_partition_update(xd->above_txfm_context + blk_col,
158 xd->left_txfm_context + blk_row, tx_size, tx_size);
Jingning Han94d5bfc2016-10-21 10:14:36 -0700159 return;
160 }
161
Hui Su7167d952018-02-01 16:33:12 -0800162 const int ctx = txfm_partition_context(xd->above_txfm_context + blk_col,
163 xd->left_txfm_context + blk_row,
164 mbmi->sb_type, tx_size);
165 const int txb_size_index =
166 av1_get_txb_size_index(mbmi->sb_type, blk_row, blk_col);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000167 const int write_txfm_partition =
Hui Su7167d952018-02-01 16:33:12 -0800168 tx_size == mbmi->inter_tx_size[txb_size_index];
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000169 if (write_txfm_partition) {
Thomas Davies985bfc32017-06-27 16:51:26 +0100170 aom_write_symbol(w, 0, ec_ctx->txfm_partition_cdf[ctx], 2);
Thomas Davies985bfc32017-06-27 16:51:26 +0100171
Jingning Han331662e2017-05-30 17:03:32 -0700172 txfm_partition_update(xd->above_txfm_context + blk_col,
173 xd->left_txfm_context + blk_row, tx_size, tx_size);
Yue Chend6bdd462017-07-19 16:05:43 -0700174 // TODO(yuec): set correct txfm partition update for qttx
Yaowu Xuc27fc142016-08-22 16:08:15 -0700175 } else {
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800176 const TX_SIZE sub_txs = sub_tx_size_map[1][tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700177 const int bsw = tx_size_wide_unit[sub_txs];
178 const int bsh = tx_size_high_unit[sub_txs];
Jingning Hanf64062f2016-11-02 16:22:18 -0700179
Thomas Davies985bfc32017-06-27 16:51:26 +0100180 aom_write_symbol(w, 1, ec_ctx->txfm_partition_cdf[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700181
David Barker16c64e32017-08-23 16:54:59 +0100182 if (sub_txs == TX_4X4) {
Jingning Han331662e2017-05-30 17:03:32 -0700183 txfm_partition_update(xd->above_txfm_context + blk_col,
184 xd->left_txfm_context + blk_row, sub_txs, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700185 return;
186 }
187
Yue Chen0797a202017-10-27 17:24:56 -0700188 assert(bsw > 0 && bsh > 0);
189 for (int row = 0; row < tx_size_high_unit[tx_size]; row += bsh)
190 for (int col = 0; col < tx_size_wide_unit[tx_size]; col += bsw) {
191 int offsetr = blk_row + row;
192 int offsetc = blk_col + col;
193 write_tx_size_vartx(cm, xd, mbmi, sub_txs, depth + 1, offsetr, offsetc,
194 w);
195 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700196 }
197}
198
Yaowu Xuf883b422016-08-30 14:01:10 -0700199static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd,
200 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700201 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
202 const BLOCK_SIZE bsize = mbmi->sb_type;
Thomas Davies15580c52017-03-09 13:53:42 +0000203 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
204 (void)cm;
Rupert Swarbrickfcff0b22017-10-05 09:26:04 +0100205 if (block_signals_txsize(bsize)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700206 const TX_SIZE tx_size = mbmi->tx_size;
Yaowu Xu25ff26a2018-02-26 11:20:10 -0800207 const int tx_size_ctx = get_tx_size_context(xd);
Debargha Mukherjee0fa057f2017-12-06 17:06:29 -0800208 const int depth = tx_size_to_depth(tx_size, bsize, 0);
209 const int max_depths = bsize_to_max_depth(bsize, 0);
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800210 const int32_t tx_size_cat = bsize_to_tx_size_cat(bsize, 0);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800211
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800212 assert(depth >= 0 && depth <= max_depths);
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800213 assert(!is_inter_block(mbmi));
Yue Chen49587a72016-09-28 17:09:47 -0700214 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700215
Thomas Davies15580c52017-03-09 13:53:42 +0000216 aom_write_symbol(w, depth, ec_ctx->tx_size_cdf[tx_size_cat][tx_size_ctx],
Debargha Mukherjee6147b1b2017-11-08 08:31:09 -0800217 max_depths + 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700218 }
219}
220
Yaowu Xuf883b422016-08-30 14:01:10 -0700221static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
222 int segment_id, const MODE_INFO *mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700223 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
224 return 1;
225 } else {
226 const int skip = mi->mbmi.skip;
Zoe Liue646daa2017-10-17 15:28:46 -0700227 const int ctx = av1_get_skip_context(xd);
Thomas Davies61e3e372017-04-04 16:10:23 +0100228 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Davies61e3e372017-04-04 16:10:23 +0100229 aom_write_symbol(w, skip, ec_ctx->skip_cdfs[ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700230 return skip;
231 }
232}
233
Zoe Liuf40a9572017-10-13 12:37:19 -0700234static int write_skip_mode(const AV1_COMMON *cm, const MACROBLOCKD *xd,
235 int segment_id, const MODE_INFO *mi, aom_writer *w) {
236 if (!cm->skip_mode_flag) return 0;
237 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
238 return 0;
239 }
240 const int skip_mode = mi->mbmi.skip_mode;
241 if (!is_comp_ref_allowed(mi->mbmi.sb_type)) {
242 assert(!skip_mode);
243 return 0;
244 }
245 const int ctx = av1_get_skip_mode_context(xd);
246 aom_write_symbol(w, skip_mode, xd->tile_ctx->skip_mode_cdfs[ctx], 2);
247 return skip_mode;
248}
Zoe Liuf40a9572017-10-13 12:37:19 -0700249
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100250static void write_is_inter(const AV1_COMMON *cm, const MACROBLOCKD *xd,
251 int segment_id, aom_writer *w, const int is_inter) {
252 if (!segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
Frederic Barbier5e911422017-12-12 17:17:07 +0100253 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)
254#if CONFIG_SEGMENT_GLOBALMV
255 || segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV)
256#endif
Johannb0ef6ff2018-02-08 14:32:21 -0800257 )
Frederic Barbier5e911422017-12-12 17:17:07 +0100258 if (!av1_is_valid_scale(&cm->frame_refs[0].sf))
259 return; // LAST_FRAME not valid for reference
260
Yue Chen170678a2017-10-17 13:43:10 -0700261 const int ctx = av1_get_intra_inter_context(xd);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100262 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100263 aom_write_symbol(w, is_inter, ec_ctx->intra_inter_cdf[ctx], 2);
Thomas Daviesf6ad9352017-04-19 11:38:06 +0100264 }
265}
266
Thomas Daviesd9b57262017-06-27 17:43:25 +0100267static void write_motion_mode(const AV1_COMMON *cm, MACROBLOCKD *xd,
268 const MODE_INFO *mi, aom_writer *w) {
Sarah Parker19234cc2017-03-10 16:43:25 -0800269 const MB_MODE_INFO *mbmi = &mi->mbmi;
Thomas Daviesd9b57262017-06-27 17:43:25 +0100270
Sebastien Alaiwan48795802017-10-30 12:07:13 +0100271 MOTION_MODE last_motion_mode_allowed =
Yue Chen5380cb52018-02-23 15:33:21 -0800272 cm->switchable_motion_mode
273 ? motion_mode_allowed(cm->global_motion, xd, mi)
274 : SIMPLE_TRANSLATION;
275 assert(mbmi->motion_mode <= last_motion_mode_allowed);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000276 switch (last_motion_mode_allowed) {
277 case SIMPLE_TRANSLATION: break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000278 case OBMC_CAUSAL:
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000279 aom_write_symbol(w, mbmi->motion_mode == OBMC_CAUSAL,
280 xd->tile_ctx->obmc_cdf[mbmi->sb_type], 2);
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000281 break;
Rupert Swarbrickcf772762017-11-03 16:41:07 +0000282 default:
283 aom_write_symbol(w, mbmi->motion_mode,
284 xd->tile_ctx->motion_mode_cdf[mbmi->sb_type],
285 MOTION_MODES);
Yue Chen69f18e12016-09-08 14:48:15 -0700286 }
Yue Chen69f18e12016-09-08 14:48:15 -0700287}
Wei-Ting Lin85a8f702017-06-22 13:55:15 -0700288
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000289static void write_delta_qindex(const AV1_COMMON *cm, const MACROBLOCKD *xd,
290 int delta_qindex, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +0200291 int sign = delta_qindex < 0;
292 int abs = sign ? -delta_qindex : delta_qindex;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000293 int rem_bits, thr;
Thomas Daviesf6936102016-09-05 16:51:31 +0100294 int smallval = abs < DELTA_Q_SMALL ? 1 : 0;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000295 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
296 (void)cm;
Thomas Daviesf6936102016-09-05 16:51:31 +0100297
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000298 aom_write_symbol(w, AOMMIN(abs, DELTA_Q_SMALL), ec_ctx->delta_q_cdf,
299 DELTA_Q_PROBS + 1);
Thomas Daviesf6936102016-09-05 16:51:31 +0100300
301 if (!smallval) {
302 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
303 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100304 aom_write_literal(w, rem_bits - 1, 3);
Thomas Daviesf6936102016-09-05 16:51:31 +0100305 aom_write_literal(w, abs - thr, rem_bits);
Arild Fuldseth07441162016-08-15 15:07:52 +0200306 }
307 if (abs > 0) {
308 aom_write_bit(w, sign);
309 }
310}
Thomas Daviesf6936102016-09-05 16:51:31 +0100311
Fangwen Fu231fe422017-04-24 17:52:29 -0700312#if CONFIG_EXT_DELTA_Q
313static void write_delta_lflevel(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Debargha Mukherjee2382b142018-02-26 14:31:32 -0800314 int lf_id, int delta_lflevel, aom_writer *w) {
Fangwen Fu231fe422017-04-24 17:52:29 -0700315 int sign = delta_lflevel < 0;
316 int abs = sign ? -delta_lflevel : delta_lflevel;
317 int rem_bits, thr;
318 int smallval = abs < DELTA_LF_SMALL ? 1 : 0;
Fangwen Fu231fe422017-04-24 17:52:29 -0700319 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
320 (void)cm;
Fangwen Fu231fe422017-04-24 17:52:29 -0700321
Cheng Chen880166a2017-10-02 17:48:48 -0700322 if (cm->delta_lf_multi) {
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +0000323 assert(lf_id >= 0 && lf_id < (av1_num_planes(cm) > 1 ? FRAME_LF_COUNT
324 : FRAME_LF_COUNT - 2));
Cheng Chen880166a2017-10-02 17:48:48 -0700325 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL),
326 ec_ctx->delta_lf_multi_cdf[lf_id], DELTA_LF_PROBS + 1);
327 } else {
328 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
329 DELTA_LF_PROBS + 1);
330 }
Fangwen Fu231fe422017-04-24 17:52:29 -0700331
332 if (!smallval) {
333 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
334 thr = (1 << rem_bits) + 1;
Thomas Davies3b93e8e2017-09-20 09:59:07 +0100335 aom_write_literal(w, rem_bits - 1, 3);
Fangwen Fu231fe422017-04-24 17:52:29 -0700336 aom_write_literal(w, abs - thr, rem_bits);
337 }
338 if (abs > 0) {
339 aom_write_bit(w, sign);
340 }
341}
Fangwen Fu231fe422017-04-24 17:52:29 -0700342#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +0200343
Sarah Parker99e7daa2017-08-29 10:30:13 -0700344static void pack_map_tokens(aom_writer *w, const TOKENEXTRA **tp, int n,
345 int num) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700346 const TOKENEXTRA *p = *tp;
hui su40b9e7f2017-07-13 18:15:56 -0700347 write_uniform(w, n, p->token); // The first color index.
348 ++p;
349 --num;
350 for (int i = 0; i < num; ++i) {
Sarah Parker0cf4d9f2017-08-18 13:09:14 -0700351 aom_write_symbol(w, p->token, p->color_map_cdf, n);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700352 ++p;
353 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700354 *tp = p;
355}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700356
Jingning Hana2285692017-10-25 15:14:31 -0700357static void pack_txb_tokens(aom_writer *w, AV1_COMMON *cm, MACROBLOCK *const x,
Jingning Han4fe5f672017-05-19 15:46:07 -0700358 const TOKENEXTRA **tp,
Jingning Hana2285692017-10-25 15:14:31 -0700359 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
360 MB_MODE_INFO *mbmi, int plane,
Jingning Han4fe5f672017-05-19 15:46:07 -0700361 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
362 int block, int blk_row, int blk_col,
363 TX_SIZE tx_size, TOKEN_STATS *token_stats) {
364 const struct macroblockd_plane *const pd = &xd->plane[plane];
Jingning Han4fe5f672017-05-19 15:46:07 -0700365 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
366 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
367
368 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
369
Debargha Mukherjee8aec7f32017-12-20 15:48:49 -0800370 const TX_SIZE plane_tx_size =
Debargha Mukherjee6396d3a2017-12-21 00:12:38 -0800371 plane ? av1_get_uv_tx_size(mbmi, pd->subsampling_x, pd->subsampling_y)
Hui Su7167d952018-02-01 16:33:12 -0800372 : mbmi->inter_tx_size[av1_get_txb_size_index(plane_bsize, blk_row,
373 blk_col)];
Jingning Han4fe5f672017-05-19 15:46:07 -0700374
Debargha Mukherjee6396d3a2017-12-21 00:12:38 -0800375 if (tx_size == plane_tx_size || plane) {
Jingning Han4fe5f672017-05-19 15:46:07 -0700376 TOKEN_STATS tmp_token_stats;
377 init_token_stats(&tmp_token_stats);
378
Jingning Han4fe5f672017-05-19 15:46:07 -0700379 tran_low_t *tcoeff = BLOCK_OFFSET(x->mbmi_ext->tcoeff[plane], block);
380 uint16_t eob = x->mbmi_ext->eobs[plane][block];
381 TXB_CTX txb_ctx = { x->mbmi_ext->txb_skip_ctx[plane][block],
382 x->mbmi_ext->dc_sign_ctx[plane][block] };
Luc Trudeau2eb9b842017-12-13 11:19:16 -0500383 av1_write_coeffs_txb(cm, xd, w, blk_row, blk_col, plane, tx_size, tcoeff,
384 eob, &txb_ctx);
Jingning Han4fe5f672017-05-19 15:46:07 -0700385#if CONFIG_RD_DEBUG
386 token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
387 token_stats->cost += tmp_token_stats.cost;
388#endif
389 } else {
Debargha Mukherjeee4e18fc2017-12-06 23:43:24 -0800390 const TX_SIZE sub_txs = sub_tx_size_map[1][tx_size];
Yue Chen0797a202017-10-27 17:24:56 -0700391 const int bsw = tx_size_wide_unit[sub_txs];
392 const int bsh = tx_size_high_unit[sub_txs];
Jingning Han4fe5f672017-05-19 15:46:07 -0700393
Yue Chen0797a202017-10-27 17:24:56 -0700394 assert(bsw > 0 && bsh > 0);
Jingning Han4fe5f672017-05-19 15:46:07 -0700395
Yue Chen0797a202017-10-27 17:24:56 -0700396 for (int r = 0; r < tx_size_high_unit[tx_size]; r += bsh) {
397 for (int c = 0; c < tx_size_wide_unit[tx_size]; c += bsw) {
398 const int offsetr = blk_row + r;
399 const int offsetc = blk_col + c;
400 const int step = bsh * bsw;
Jingning Han4fe5f672017-05-19 15:46:07 -0700401
Yue Chen0797a202017-10-27 17:24:56 -0700402 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
Jingning Han4fe5f672017-05-19 15:46:07 -0700403
Yue Chen0797a202017-10-27 17:24:56 -0700404 pack_txb_tokens(w, cm, x, tp, tok_end, xd, mbmi, plane, plane_bsize,
405 bit_depth, block, offsetr, offsetc, sub_txs,
406 token_stats);
407 block += step;
408 }
Jingning Han4fe5f672017-05-19 15:46:07 -0700409 }
410 }
411}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700412
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000413#if CONFIG_SPATIAL_SEGMENTATION
Frederic Barbier0c4a6322018-02-22 10:14:00 +0100414static INLINE void set_spatial_segment_id(const AV1_COMMON *const cm,
415 uint8_t *segment_ids,
416 BLOCK_SIZE bsize, int mi_row,
417 int mi_col, int segment_id) {
418 const int mi_offset = mi_row * cm->mi_cols + mi_col;
419 const int bw = mi_size_wide[bsize];
420 const int bh = mi_size_high[bsize];
421 const int xmis = AOMMIN(cm->mi_cols - mi_col, bw);
422 const int ymis = AOMMIN(cm->mi_rows - mi_row, bh);
423 int x, y;
424
425 for (y = 0; y < ymis; ++y)
426 for (x = 0; x < xmis; ++x)
427 segment_ids[mi_offset + y * cm->mi_cols + x] = segment_id;
428}
429
Frederic Barbier19f0a232018-03-07 12:28:20 +0100430int av1_neg_interleave(int x, int ref, int max) {
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100431 const int diff = x - ref;
432 if (!ref) return x;
Frederic Barbier541429a2018-03-07 14:33:42 +0100433 if (ref >= (max - 1)) return -x + max - 1;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100434 if (2 * ref < max) {
435 if (abs(diff) <= ref) {
436 if (diff > 0)
437 return (diff << 1) - 1;
438 else
439 return ((-diff) << 1);
440 }
441 return x;
442 } else {
443 if (abs(diff) < (max - ref)) {
444 if (diff > 0)
445 return (diff << 1) - 1;
446 else
447 return ((-diff) << 1);
448 }
449 return (max - x) - 1;
450 }
451}
452
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000453static void write_segment_id(AV1_COMP *cpi, const MB_MODE_INFO *const mbmi,
454 aom_writer *w, const struct segmentation *seg,
455 struct segmentation_probs *segp, int mi_row,
456 int mi_col, int skip) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000457 if (!seg->enabled || !seg->update_map) return;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100458
Hui Sucff20b92018-03-13 15:35:52 -0700459 AV1_COMMON *const cm = &cpi->common;
460 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
461 int cdf_num;
462 const int pred = av1_get_spatial_seg_pred(cm, xd, mi_row, mi_col, &cdf_num);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100463
464 if (skip) {
Hui Suad7551e2018-03-14 11:13:31 -0700465 // Still need to transmit tx size for intra blocks even if skip is
466 // true. Changing segment_id may make the tx size become invalid, e.g
467 // changing from lossless to lossy.
468 assert(is_inter_block(mbmi) || !cpi->has_lossless_segment);
469
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000470 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type, mi_row,
471 mi_col, pred);
472 set_spatial_segment_id(cm, cpi->segmentation_map, mbmi->sb_type, mi_row,
473 mi_col, pred);
474 /* mbmi is read only but we need to update segment_id */
475 ((MB_MODE_INFO *)mbmi)->segment_id = pred;
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100476 return;
477 }
478
Hui Sucff20b92018-03-13 15:35:52 -0700479 const int coded_id =
Frederic Barbier19f0a232018-03-07 12:28:20 +0100480 av1_neg_interleave(mbmi->segment_id, pred, seg->last_active_segid + 1);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000481 aom_cdf_prob *pred_cdf = segp->spatial_pred_seg_cdf[cdf_num];
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100482 aom_write_symbol(w, coded_id, pred_cdf, 8);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000483 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type, mi_row,
484 mi_col, mbmi->segment_id);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +0100485}
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000486#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700487static void write_segment_id(aom_writer *w, const struct segmentation *seg,
Thomas9ac55082016-09-23 18:04:17 +0100488 struct segmentation_probs *segp, int segment_id) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400489 if (seg->enabled && seg->update_map) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400490 aom_write_symbol(w, segment_id, segp->tree_cdf, MAX_SEGMENTS);
Nathan E. Eggef627e582016-08-19 20:06:51 -0400491 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700492}
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000493#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700494
Thomas Davies315f5782017-06-14 15:14:55 +0100495#define WRITE_REF_BIT(bname, pname) \
Thomas Davies0fbd2b72017-09-12 10:49:45 +0100496 aom_write_symbol(w, bname, av1_get_pred_cdf_##pname(xd), 2)
Thomas Davies315f5782017-06-14 15:14:55 +0100497
Yaowu Xuc27fc142016-08-22 16:08:15 -0700498// This function encodes the reference frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700499static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd,
500 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700501 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
502 const int is_compound = has_second_ref(mbmi);
503 const int segment_id = mbmi->segment_id;
504
505 // If segment level coding of this signal is disabled...
506 // or the segment allows multiple reference frame options
507 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
508 assert(!is_compound);
509 assert(mbmi->ref_frame[0] ==
510 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
David Barkerd92f3562017-10-09 17:46:23 +0100511 }
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700512#if CONFIG_SEGMENT_GLOBALMV
David Barkerd92f3562017-10-09 17:46:23 +0100513 else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP) ||
Sarah Parker2b9ec2e2017-10-30 17:34:08 -0700514 segfeature_active(&cm->seg, segment_id, SEG_LVL_GLOBALMV))
David Barkerd92f3562017-10-09 17:46:23 +0100515#else
516 else if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP))
517#endif
518 {
519 assert(!is_compound);
520 assert(mbmi->ref_frame[0] == LAST_FRAME);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700521 } else {
522 // does the feature use compound prediction or not
523 // (if not specified at the frame/segment level)
524 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Debargha Mukherjee0f248c42017-09-07 12:40:18 -0700525 if (is_comp_ref_allowed(mbmi->sb_type))
Thomas Davies860def62017-06-14 10:00:03 +0100526 aom_write_symbol(w, is_compound, av1_get_reference_mode_cdf(cm, xd), 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700527 } else {
528 assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE));
529 }
530
531 if (is_compound) {
Zoe Liuc082bbc2017-05-17 13:31:37 -0700532 const COMP_REFERENCE_TYPE comp_ref_type = has_uni_comp_refs(mbmi)
533 ? UNIDIR_COMP_REFERENCE
534 : BIDIR_COMP_REFERENCE;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100535 aom_write_symbol(w, comp_ref_type, av1_get_comp_reference_type_cdf(xd),
536 2);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700537
538 if (comp_ref_type == UNIDIR_COMP_REFERENCE) {
539 const int bit = mbmi->ref_frame[0] == BWDREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800540 WRITE_REF_BIT(bit, uni_comp_ref_p);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700541
Zoe Liuc082bbc2017-05-17 13:31:37 -0700542 if (!bit) {
Zoe Liufcf5fa22017-06-26 16:00:38 -0700543 assert(mbmi->ref_frame[0] == LAST_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100544 const int bit1 = mbmi->ref_frame[1] == LAST3_FRAME ||
545 mbmi->ref_frame[1] == GOLDEN_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800546 WRITE_REF_BIT(bit1, uni_comp_ref_p1);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100547 if (bit1) {
548 const int bit2 = mbmi->ref_frame[1] == GOLDEN_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800549 WRITE_REF_BIT(bit2, uni_comp_ref_p2);
Zoe Liufcf5fa22017-06-26 16:00:38 -0700550 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700551 } else {
552 assert(mbmi->ref_frame[1] == ALTREF_FRAME);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700553 }
554
555 return;
556 }
Zoe Liufcf5fa22017-06-26 16:00:38 -0700557
558 assert(comp_ref_type == BIDIR_COMP_REFERENCE);
Zoe Liuc082bbc2017-05-17 13:31:37 -0700559
Yaowu Xuc27fc142016-08-22 16:08:15 -0700560 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
561 mbmi->ref_frame[0] == LAST3_FRAME);
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100562 WRITE_REF_BIT(bit, comp_ref_p);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700563
Yaowu Xuc27fc142016-08-22 16:08:15 -0700564 if (!bit) {
Zoe Liu87818282017-11-26 17:09:59 -0800565 const int bit1 = mbmi->ref_frame[0] == LAST2_FRAME;
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100566 WRITE_REF_BIT(bit1, comp_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700567 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100568 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
569 WRITE_REF_BIT(bit2, comp_ref_p2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700570 }
Zoe Liu7b1ec7a2017-05-24 22:28:24 -0700571
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100572 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800573 WRITE_REF_BIT(bit_bwd, comp_bwdref_p);
Zoe Liue9b15e22017-07-19 15:53:01 -0700574
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100575 if (!bit_bwd) {
Zoe Liu49172952018-01-23 14:32:31 -0800576 WRITE_REF_BIT(mbmi->ref_frame[1] == ALTREF2_FRAME, comp_bwdref_p1);
Zoe Liue9b15e22017-07-19 15:53:01 -0700577 }
Zoe Liue9b15e22017-07-19 15:53:01 -0700578
Yaowu Xuc27fc142016-08-22 16:08:15 -0700579 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -0700580 const int bit0 = (mbmi->ref_frame[0] <= ALTREF_FRAME &&
581 mbmi->ref_frame[0] >= BWDREF_FRAME);
Zoe Liu49172952018-01-23 14:32:31 -0800582 WRITE_REF_BIT(bit0, single_ref_p1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700583
584 if (bit0) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100585 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800586 WRITE_REF_BIT(bit1, single_ref_p2);
Zoe Liue9b15e22017-07-19 15:53:01 -0700587
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100588 if (!bit1) {
Zoe Liu49172952018-01-23 14:32:31 -0800589 WRITE_REF_BIT(mbmi->ref_frame[0] == ALTREF2_FRAME, single_ref_p6);
Zoe Liue9b15e22017-07-19 15:53:01 -0700590 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700591 } else {
592 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
593 mbmi->ref_frame[0] == GOLDEN_FRAME);
Zoe Liu49172952018-01-23 14:32:31 -0800594 WRITE_REF_BIT(bit2, single_ref_p3);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700595
596 if (!bit2) {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100597 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800598 WRITE_REF_BIT(bit3, single_ref_p4);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700599 } else {
Sebastien Alaiwan4be6cb32017-11-02 18:04:11 +0100600 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
Zoe Liu49172952018-01-23 14:32:31 -0800601 WRITE_REF_BIT(bit4, single_ref_p5);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700602 }
603 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700604 }
605 }
606}
607
Yue Chena9383622018-03-08 14:37:09 -0800608static void write_filter_intra_mode_info(const AV1_COMMON *cm,
609 const MACROBLOCKD *xd,
hui su5db97432016-10-14 16:10:14 -0700610 const MB_MODE_INFO *const mbmi,
611 aom_writer *w) {
Yue Chena9383622018-03-08 14:37:09 -0800612 if (av1_filter_intra_allowed(cm, mbmi)) {
Yue Chenb0571872017-12-18 18:12:59 -0800613 aom_write_symbol(w, mbmi->filter_intra_mode_info.use_filter_intra,
Yue Chen45dfb792018-03-01 13:02:40 -0800614 xd->tile_ctx->filter_intra_cdfs[mbmi->sb_type], 2);
Yue Chenb0571872017-12-18 18:12:59 -0800615 if (mbmi->filter_intra_mode_info.use_filter_intra) {
hui su5db97432016-10-14 16:10:14 -0700616 const FILTER_INTRA_MODE mode =
Yue Chenb0571872017-12-18 18:12:59 -0800617 mbmi->filter_intra_mode_info.filter_intra_mode;
Yue Chen994dba22017-12-19 15:27:26 -0800618 aom_write_symbol(w, mode, xd->tile_ctx->filter_intra_mode_cdf,
Yue Chen63ce36f2017-10-10 23:37:31 -0700619 FILTER_INTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700620 }
621 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700622}
623
Luc Trudeau866da792018-02-12 11:13:34 -0500624static void write_angle_delta(aom_writer *w, int angle_delta,
625 aom_cdf_prob *cdf) {
Luc Trudeau866da792018-02-12 11:13:34 -0500626 aom_write_symbol(w, angle_delta + MAX_ANGLE_DELTA, cdf,
627 2 * MAX_ANGLE_DELTA + 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700628}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700629
Angie Chiang5678ad92016-11-21 09:38:40 -0800630static void write_mb_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd,
631 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700632 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700633 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
Thomas Davies77c7c402017-01-11 17:58:54 +0000634 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Jingning Han203b1d32017-01-12 16:00:13 -0800635
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700636 if (!av1_is_interp_needed(xd)) {
Rupert Swarbrick27e90292017-09-28 17:46:50 +0100637 assert(mbmi->interp_filters ==
638 av1_broadcast_interp_filter(
639 av1_unswitchable_filter(cm->interp_filter)));
Debargha Mukherjee0df711f2017-05-02 16:00:20 -0700640 return;
641 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700642 if (cm->interp_filter == SWITCHABLE) {
Jingning Han203b1d32017-01-12 16:00:13 -0800643 int dir;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700644 for (dir = 0; dir < 2; ++dir) {
Jingning Han4a173352018-03-01 17:54:07 -0800645 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
646 InterpFilter filter =
647 av1_extract_interp_filter(mbmi->interp_filters, dir);
648 aom_write_symbol(w, filter, ec_ctx->switchable_interp_cdf[ctx],
649 SWITCHABLE_FILTERS);
650 ++cpi->interp_filter_selected[0][filter];
Jingning Han127c8232018-02-22 16:54:13 -0800651 if (cm->seq_params.enable_dual_filter == 0) return;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700652 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700653 }
654}
655
hui su33567b22017-04-30 16:40:19 -0700656// Transmit color values with delta encoding. Write the first value as
657// literal, and the deltas between each value and the previous one. "min_val" is
658// the smallest possible value of the deltas.
659static void delta_encode_palette_colors(const int *colors, int num,
660 int bit_depth, int min_val,
661 aom_writer *w) {
662 if (num <= 0) return;
hui sufa4ff852017-05-15 12:20:50 -0700663 assert(colors[0] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700664 aom_write_literal(w, colors[0], bit_depth);
665 if (num == 1) return;
666 int max_delta = 0;
667 int deltas[PALETTE_MAX_SIZE];
668 memset(deltas, 0, sizeof(deltas));
669 for (int i = 1; i < num; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700670 assert(colors[i] < (1 << bit_depth));
hui su33567b22017-04-30 16:40:19 -0700671 const int delta = colors[i] - colors[i - 1];
672 deltas[i - 1] = delta;
673 assert(delta >= min_val);
674 if (delta > max_delta) max_delta = delta;
675 }
676 const int min_bits = bit_depth - 3;
677 int bits = AOMMAX(av1_ceil_log2(max_delta + 1 - min_val), min_bits);
hui sufa4ff852017-05-15 12:20:50 -0700678 assert(bits <= bit_depth);
hui su33567b22017-04-30 16:40:19 -0700679 int range = (1 << bit_depth) - colors[0] - min_val;
hui sud13c24a2017-04-07 16:13:07 -0700680 aom_write_literal(w, bits - min_bits, 2);
hui su33567b22017-04-30 16:40:19 -0700681 for (int i = 0; i < num - 1; ++i) {
682 aom_write_literal(w, deltas[i] - min_val, bits);
683 range -= deltas[i];
684 bits = AOMMIN(bits, av1_ceil_log2(range));
hui sud13c24a2017-04-07 16:13:07 -0700685 }
686}
687
hui su33567b22017-04-30 16:40:19 -0700688// Transmit luma palette color values. First signal if each color in the color
689// cache is used. Those colors that are not in the cache are transmitted with
690// delta encoding.
691static void write_palette_colors_y(const MACROBLOCKD *const xd,
692 const PALETTE_MODE_INFO *const pmi,
693 int bit_depth, aom_writer *w) {
694 const int n = pmi->palette_size[0];
hui su33567b22017-04-30 16:40:19 -0700695 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700696 const int n_cache = av1_get_palette_cache(xd, 0, color_cache);
hui su33567b22017-04-30 16:40:19 -0700697 int out_cache_colors[PALETTE_MAX_SIZE];
698 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
699 const int n_out_cache =
700 av1_index_color_cache(color_cache, n_cache, pmi->palette_colors, n,
701 cache_color_found, out_cache_colors);
702 int n_in_cache = 0;
703 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
704 const int found = cache_color_found[i];
705 aom_write_bit(w, found);
706 n_in_cache += found;
707 }
708 assert(n_in_cache + n_out_cache == n);
709 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 1, w);
710}
711
712// Write chroma palette color values. U channel is handled similarly to the luma
713// channel. For v channel, either use delta encoding or transmit raw values
714// directly, whichever costs less.
715static void write_palette_colors_uv(const MACROBLOCKD *const xd,
716 const PALETTE_MODE_INFO *const pmi,
hui sud13c24a2017-04-07 16:13:07 -0700717 int bit_depth, aom_writer *w) {
hui sud13c24a2017-04-07 16:13:07 -0700718 const int n = pmi->palette_size[1];
hui sud13c24a2017-04-07 16:13:07 -0700719 const uint16_t *colors_u = pmi->palette_colors + PALETTE_MAX_SIZE;
720 const uint16_t *colors_v = pmi->palette_colors + 2 * PALETTE_MAX_SIZE;
hui sud13c24a2017-04-07 16:13:07 -0700721 // U channel colors.
hui su33567b22017-04-30 16:40:19 -0700722 uint16_t color_cache[2 * PALETTE_MAX_SIZE];
Hui Su3748bc22017-08-23 11:30:41 -0700723 const int n_cache = av1_get_palette_cache(xd, 1, color_cache);
hui su33567b22017-04-30 16:40:19 -0700724 int out_cache_colors[PALETTE_MAX_SIZE];
725 uint8_t cache_color_found[2 * PALETTE_MAX_SIZE];
726 const int n_out_cache = av1_index_color_cache(
727 color_cache, n_cache, colors_u, n, cache_color_found, out_cache_colors);
728 int n_in_cache = 0;
729 for (int i = 0; i < n_cache && n_in_cache < n; ++i) {
730 const int found = cache_color_found[i];
731 aom_write_bit(w, found);
732 n_in_cache += found;
hui sud13c24a2017-04-07 16:13:07 -0700733 }
hui su33567b22017-04-30 16:40:19 -0700734 delta_encode_palette_colors(out_cache_colors, n_out_cache, bit_depth, 0, w);
735
736 // V channel colors. Don't use color cache as the colors are not sorted.
hui sud13c24a2017-04-07 16:13:07 -0700737 const int max_val = 1 << bit_depth;
738 int zero_count = 0, min_bits_v = 0;
739 int bits_v =
740 av1_get_palette_delta_bits_v(pmi, bit_depth, &zero_count, &min_bits_v);
741 const int rate_using_delta =
742 2 + bit_depth + (bits_v + 1) * (n - 1) - zero_count;
743 const int rate_using_raw = bit_depth * n;
744 if (rate_using_delta < rate_using_raw) { // delta encoding
hui sufa4ff852017-05-15 12:20:50 -0700745 assert(colors_v[0] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700746 aom_write_bit(w, 1);
747 aom_write_literal(w, bits_v - min_bits_v, 2);
748 aom_write_literal(w, colors_v[0], bit_depth);
hui su33567b22017-04-30 16:40:19 -0700749 for (int i = 1; i < n; ++i) {
hui sufa4ff852017-05-15 12:20:50 -0700750 assert(colors_v[i] < (1 << bit_depth));
hui sud13c24a2017-04-07 16:13:07 -0700751 if (colors_v[i] == colors_v[i - 1]) { // No need to signal sign bit.
752 aom_write_literal(w, 0, bits_v);
753 continue;
754 }
755 const int delta = abs((int)colors_v[i] - colors_v[i - 1]);
756 const int sign_bit = colors_v[i] < colors_v[i - 1];
757 if (delta <= max_val - delta) {
758 aom_write_literal(w, delta, bits_v);
759 aom_write_bit(w, sign_bit);
760 } else {
761 aom_write_literal(w, max_val - delta, bits_v);
762 aom_write_bit(w, !sign_bit);
763 }
764 }
765 } else { // Transmit raw values.
766 aom_write_bit(w, 0);
hui sufa4ff852017-05-15 12:20:50 -0700767 for (int i = 0; i < n; ++i) {
768 assert(colors_v[i] < (1 << bit_depth));
769 aom_write_literal(w, colors_v[i], bit_depth);
770 }
hui sud13c24a2017-04-07 16:13:07 -0700771 }
772}
hui sud13c24a2017-04-07 16:13:07 -0700773
Yaowu Xuf883b422016-08-30 14:01:10 -0700774static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Hui Su8b618f62017-12-20 12:03:35 -0800775 const MODE_INFO *const mi, int mi_row,
776 int mi_col, aom_writer *w) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000777 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700778 const MB_MODE_INFO *const mbmi = &mi->mbmi;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700779 const BLOCK_SIZE bsize = mbmi->sb_type;
Hui Su473cf892017-11-08 18:14:31 -0800780 assert(av1_allow_palette(cm->allow_screen_content_tools, bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700781 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
Hui Suc1f411b2017-12-19 15:58:28 -0800782 const int bsize_ctx = av1_get_palette_bsize_ctx(bsize);
Rupert Swarbrick6f9cd942017-08-02 15:57:18 +0100783
Yaowu Xuc27fc142016-08-22 16:08:15 -0700784 if (mbmi->mode == DC_PRED) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800785 const int n = pmi->palette_size[0];
Hui Sudb685552018-01-12 16:38:33 -0800786 const int palette_y_mode_ctx = av1_get_palette_mode_ctx(xd);
Thomas Davies59f92312017-08-23 00:33:12 +0100787 aom_write_symbol(
788 w, n > 0,
Hui Suc1f411b2017-12-19 15:58:28 -0800789 xd->tile_ctx->palette_y_mode_cdf[bsize_ctx][palette_y_mode_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700790 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +0100791 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Hui Suc1f411b2017-12-19 15:58:28 -0800792 xd->tile_ctx->palette_y_size_cdf[bsize_ctx],
Thomas Daviesce7272d2017-07-04 16:11:08 +0100793 PALETTE_SIZES);
hui su33567b22017-04-30 16:40:19 -0700794 write_palette_colors_y(xd, pmi, cm->bit_depth, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700795 }
796 }
797
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +0000798 const int uv_dc_pred =
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +0000799 num_planes > 1 && mbmi->uv_mode == UV_DC_PRED &&
Hui Su8b618f62017-12-20 12:03:35 -0800800 is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
801 xd->plane[1].subsampling_y);
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +0000802 if (uv_dc_pred) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800803 const int n = pmi->palette_size[1];
804 const int palette_uv_mode_ctx = (pmi->palette_size[0] > 0);
Thomas Davies59f92312017-08-23 00:33:12 +0100805 aom_write_symbol(w, n > 0,
806 xd->tile_ctx->palette_uv_mode_cdf[palette_uv_mode_ctx], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700807 if (n > 0) {
Thomas Daviesce7272d2017-07-04 16:11:08 +0100808 aom_write_symbol(w, n - PALETTE_MIN_SIZE,
Hui Suc1f411b2017-12-19 15:58:28 -0800809 xd->tile_ctx->palette_uv_size_cdf[bsize_ctx],
Thomas Daviesce7272d2017-07-04 16:11:08 +0100810 PALETTE_SIZES);
hui su33567b22017-04-30 16:40:19 -0700811 write_palette_colors_uv(xd, pmi, cm->bit_depth, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700812 }
813 }
814}
815
Angie Chiangc31ea682017-04-13 16:20:54 -0700816void av1_write_tx_type(const AV1_COMMON *const cm, const MACROBLOCKD *xd,
Luc Trudeau2eb9b842017-12-13 11:19:16 -0500817 int blk_row, int blk_col, int plane, TX_SIZE tx_size,
Angie Chiangc31ea682017-04-13 16:20:54 -0700818 aom_writer *w) {
819 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Jingning Han2a4da942016-11-03 18:31:30 -0700820 const int is_inter = is_inter_block(mbmi);
Thomas Daviescef09622017-01-11 17:27:12 +0000821 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Thomas Daviescef09622017-01-11 17:27:12 +0000822
Angie Chiangc31ea682017-04-13 16:20:54 -0700823 // Only y plane's tx_type is transmitted
Angie Chiang39b06eb2017-04-14 09:52:29 -0700824 if (plane > 0) return;
825 PLANE_TYPE plane_type = get_plane_type(plane);
Sarah Parker7c71cc02018-01-29 12:27:58 -0800826 TX_TYPE tx_type = av1_get_tx_type(plane_type, xd, blk_row, blk_col, tx_size,
827 cm->reduced_tx_set_used);
Angie Chiangc31ea682017-04-13 16:20:54 -0700828
Hui Su99350a62018-01-11 16:41:09 -0800829 const TX_SIZE square_tx_size = txsize_sqr_map[tx_size];
830 const BLOCK_SIZE bsize = mbmi->sb_type;
831 if (get_ext_tx_types(tx_size, bsize, is_inter, cm->reduced_tx_set_used) > 1 &&
832 ((!cm->seg.enabled && cm->base_qindex > 0) ||
833 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
834 !mbmi->skip &&
835 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
836 const TxSetType tx_set_type =
837 get_ext_tx_set_type(tx_size, bsize, is_inter, cm->reduced_tx_set_used);
838 const int eset =
839 get_ext_tx_set(tx_size, bsize, is_inter, cm->reduced_tx_set_used);
840 // eset == 0 should correspond to a set with only DCT_DCT and there
841 // is no need to send the tx_type
842 assert(eset > 0);
843 assert(av1_ext_tx_used[tx_set_type][tx_type]);
844 if (is_inter) {
845 aom_write_symbol(w, av1_ext_tx_ind[tx_set_type][tx_type],
846 ec_ctx->inter_ext_tx_cdf[eset][square_tx_size],
847 av1_num_ext_tx_set[tx_set_type]);
848 } else {
Hui Su99350a62018-01-11 16:41:09 -0800849 PREDICTION_MODE intra_dir;
850 if (mbmi->filter_intra_mode_info.use_filter_intra)
851 intra_dir =
852 fimode_to_intradir[mbmi->filter_intra_mode_info.filter_intra_mode];
853 else
854 intra_dir = mbmi->mode;
855 aom_write_symbol(
856 w, av1_ext_tx_ind[tx_set_type][tx_type],
857 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][intra_dir],
858 av1_num_ext_tx_set[tx_set_type]);
Lester Lu432012f2017-08-17 14:39:29 -0700859 }
Jingning Han2a4da942016-11-03 18:31:30 -0700860 }
861}
862
Jingning Hanf04254f2017-03-08 10:51:35 -0800863static void write_intra_mode(FRAME_CONTEXT *frame_ctx, BLOCK_SIZE bsize,
864 PREDICTION_MODE mode, aom_writer *w) {
Hui Su814f41e2017-10-02 12:21:24 -0700865 aom_write_symbol(w, mode, frame_ctx->y_mode_cdf[size_group_lookup[bsize]],
Jingning Hanf04254f2017-03-08 10:51:35 -0800866 INTRA_MODES);
Jingning Hanf04254f2017-03-08 10:51:35 -0800867}
868
869static void write_intra_uv_mode(FRAME_CONTEXT *frame_ctx,
Luc Trudeaud6d9eee2017-07-12 12:36:50 -0400870 UV_PREDICTION_MODE uv_mode,
David Michael Barrcb3a8ef2018-01-06 15:48:49 +0900871 PREDICTION_MODE y_mode,
Luc Trudeau3ec16a32018-03-01 20:58:09 -0500872 CFL_ALLOWED_TYPE cfl_allowed, aom_writer *w) {
David Michael Barrcb3a8ef2018-01-06 15:48:49 +0900873 aom_write_symbol(w, uv_mode, frame_ctx->uv_mode_cdf[cfl_allowed][y_mode],
874 UV_INTRA_MODES - !cfl_allowed);
Jingning Hanf04254f2017-03-08 10:51:35 -0800875}
876
David Michael Barrf6eaa152017-07-19 19:42:28 +0900877static void write_cfl_alphas(FRAME_CONTEXT *const ec_ctx, int idx,
878 int joint_sign, aom_writer *w) {
879 aom_write_symbol(w, joint_sign, ec_ctx->cfl_sign_cdf, CFL_JOINT_SIGNS);
880 // Magnitudes are only signaled for nonzero codes.
881 if (CFL_SIGN_U(joint_sign) != CFL_SIGN_ZERO) {
882 aom_cdf_prob *cdf_u = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_U(joint_sign)];
883 aom_write_symbol(w, CFL_IDX_U(idx), cdf_u, CFL_ALPHABET_SIZE);
884 }
885 if (CFL_SIGN_V(joint_sign) != CFL_SIGN_ZERO) {
886 aom_cdf_prob *cdf_v = ec_ctx->cfl_alpha_cdf[CFL_CONTEXT_V(joint_sign)];
887 aom_write_symbol(w, CFL_IDX_V(idx), cdf_v, CFL_ALPHABET_SIZE);
888 }
Luc Trudeauf5334002017-04-25 12:21:26 -0400889}
Luc Trudeauf5334002017-04-25 12:21:26 -0400890
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200891static void write_cdef(AV1_COMMON *cm, aom_writer *w, int skip, int mi_col,
892 int mi_row) {
Urvang Joshie60599a2018-03-06 12:57:02 -0800893 if (cm->all_lossless || (cm->allow_intrabc && NO_FILTER_FOR_IBC)) {
894 // Initialize to indicate no CDEF for safety.
895 cm->cdef_bits = 0;
896 cm->cdef_strengths[0] = 0;
897 cm->nb_cdef_strengths = 1;
898 cm->cdef_uv_strengths[0] = 0;
Hui Su7b8ce8d2018-02-23 23:03:01 -0800899 return;
900 }
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200901
902 const int m = ~((1 << (6 - MI_SIZE_LOG2)) - 1);
903 const MB_MODE_INFO *mbmi =
904 &cm->mi_grid_visible[(mi_row & m) * cm->mi_stride + (mi_col & m)]->mbmi;
905 // Initialise when at top left part of the superblock
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000906 if (!(mi_row & (cm->seq_params.mib_size - 1)) &&
907 !(mi_col & (cm->seq_params.mib_size - 1))) { // Top left?
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200908 cm->cdef_preset[0] = cm->cdef_preset[1] = cm->cdef_preset[2] =
909 cm->cdef_preset[3] = -1;
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200910 }
911
Debargha Mukherjee2ccf4b92018-02-27 17:30:46 -0800912 // Emit CDEF param at first non-skip coding block
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200913 const int mask = 1 << (6 - MI_SIZE_LOG2);
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +0000914 const int index = cm->seq_params.sb_size == BLOCK_128X128
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200915 ? !!(mi_col & mask) + 2 * !!(mi_row & mask)
916 : 0;
917 if (cm->cdef_preset[index] == -1 && !skip) {
918 aom_write_literal(w, mbmi->cdef_strength, cm->cdef_bits);
919 cm->cdef_preset[index] = mbmi->cdef_strength;
920 }
Steinar Midtskogen6c24b022017-09-15 09:46:39 +0200921}
922
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000923static void write_inter_segment_id(AV1_COMP *cpi, aom_writer *w,
924 const struct segmentation *const seg,
925 struct segmentation_probs *const segp,
926 int mi_row, int mi_col, int skip,
927 int preskip) {
928 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
929 const MODE_INFO *mi = xd->mi[0];
930 const MB_MODE_INFO *const mbmi = &mi->mbmi;
931#if CONFIG_SPATIAL_SEGMENTATION
932 AV1_COMMON *const cm = &cpi->common;
933#else
934 (void)mi_row;
935 (void)mi_col;
936 (void)skip;
937 (void)preskip;
938#endif
939
940 if (seg->update_map) {
941#if CONFIG_SPATIAL_SEGMENTATION
942 if (preskip) {
Rostislav Pehlivanovdd1a80c2018-03-05 21:26:45 +0000943 if (!seg->preskip_segid) return;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000944 } else {
Rostislav Pehlivanovdd1a80c2018-03-05 21:26:45 +0000945 if (seg->preskip_segid) return;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000946 if (skip) {
Rostislav Pehlivanov2d4322b2018-01-11 17:19:58 +0000947 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 1);
Hui Su52b1ba22017-12-27 14:25:25 -0800948 if (seg->temporal_update) ((MB_MODE_INFO *)mbmi)->seg_id_predicted = 0;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +0000949 return;
950 }
951 }
952#endif
953 if (seg->temporal_update) {
954 const int pred_flag = mbmi->seg_id_predicted;
955 aom_cdf_prob *pred_cdf = av1_get_pred_cdf_seg_id(segp, xd);
956 aom_write_symbol(w, pred_flag, pred_cdf, 2);
957 if (!pred_flag) {
958#if CONFIG_SPATIAL_SEGMENTATION
959 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
960#else
961 write_segment_id(w, seg, segp, mbmi->segment_id);
962#endif
963 }
964#if CONFIG_SPATIAL_SEGMENTATION
965 if (pred_flag) {
966 set_spatial_segment_id(cm, cm->current_frame_seg_map, mbmi->sb_type,
967 mi_row, mi_col, mbmi->segment_id);
968 }
969#endif
970 } else {
971#if CONFIG_SPATIAL_SEGMENTATION
972 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
973#else
974 write_segment_id(w, seg, segp, mbmi->segment_id);
975#endif
976 }
977 }
978}
979
Angie Chiangc31ea682017-04-13 16:20:54 -0700980static void pack_inter_mode_mvs(AV1_COMP *cpi, const int mi_row,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +0200981 const int mi_col, aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700982 AV1_COMMON *const cm = &cpi->common;
Arild Fuldseth07441162016-08-15 15:07:52 +0200983 MACROBLOCK *const x = &cpi->td.mb;
984 MACROBLOCKD *const xd = &x->e_mbd;
Thomas Davies24523292017-01-11 16:56:47 +0000985 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Angie Chiangc31ea682017-04-13 16:20:54 -0700986 const MODE_INFO *mi = xd->mi[0];
Thomas Davies24523292017-01-11 16:56:47 +0000987
Yaowu Xuc27fc142016-08-22 16:08:15 -0700988 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +0100989 struct segmentation_probs *const segp = &ec_ctx->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700990 const MB_MODE_INFO *const mbmi = &mi->mbmi;
991 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
992 const PREDICTION_MODE mode = mbmi->mode;
993 const int segment_id = mbmi->segment_id;
994 const BLOCK_SIZE bsize = mbmi->sb_type;
995 const int allow_hp = cm->allow_high_precision_mv;
996 const int is_inter = is_inter_block(mbmi);
997 const int is_compound = has_second_ref(mbmi);
998 int skip, ref;
David Barker45390c12017-02-20 14:44:40 +0000999 (void)mi_row;
1000 (void)mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001001
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001002 write_inter_segment_id(cpi, w, seg, segp, mi_row, mi_col, 0, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001003
Zoe Liuf40a9572017-10-13 12:37:19 -07001004 write_skip_mode(cm, xd, segment_id, mi, w);
1005
Hui Sue4974132018-03-09 12:06:52 -08001006 assert(IMPLIES(mbmi->skip_mode, mbmi->skip));
1007 skip = mbmi->skip_mode ? 1 : write_skip(cm, xd, segment_id, mi, w);
Zoe Liuf40a9572017-10-13 12:37:19 -07001008
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001009#if CONFIG_SPATIAL_SEGMENTATION
1010 write_inter_segment_id(cpi, w, seg, segp, mi_row, mi_col, skip, 0);
1011#endif
Zoe Liuf40a9572017-10-13 12:37:19 -07001012
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001013 write_cdef(cm, w, skip, mi_col, mi_row);
1014
Arild Fuldseth07441162016-08-15 15:07:52 +02001015 if (cm->delta_q_present_flag) {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001016 int super_block_upper_left =
1017 ((mi_row & (cm->seq_params.mib_size - 1)) == 0) &&
1018 ((mi_col & (cm->seq_params.mib_size - 1)) == 0);
1019 if ((bsize != cm->seq_params.sb_size || skip == 0) &&
1020 super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001021 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001022 int reduced_delta_qindex =
1023 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001024 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001025 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001026#if CONFIG_EXT_DELTA_Q
Cheng Chena97394f2017-09-27 15:05:14 -07001027 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07001028 if (cm->delta_lf_multi) {
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +00001029 const int frame_lf_count =
1030 av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
1031 for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id) {
Cheng Chen880166a2017-10-02 17:48:48 -07001032 int reduced_delta_lflevel =
1033 (mbmi->curr_delta_lf[lf_id] - xd->prev_delta_lf[lf_id]) /
1034 cm->delta_lf_res;
1035 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
1036 xd->prev_delta_lf[lf_id] = mbmi->curr_delta_lf[lf_id];
1037 }
1038 } else {
Cheng Chena97394f2017-09-27 15:05:14 -07001039 int reduced_delta_lflevel =
Cheng Chen880166a2017-10-02 17:48:48 -07001040 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
Cheng Chena97394f2017-09-27 15:05:14 -07001041 cm->delta_lf_res;
Cheng Chen880166a2017-10-02 17:48:48 -07001042 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
1043 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -07001044 }
1045 }
Fangwen Fu231fe422017-04-24 17:52:29 -07001046#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001047 }
1048 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001049
Zoe Liu93c35022018-02-27 17:15:13 -08001050 if (!mbmi->skip_mode) write_is_inter(cm, xd, mbmi->segment_id, w, is_inter);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001051
Zoe Liu56644192017-12-19 13:16:18 -08001052 if (mbmi->skip_mode) return;
Zoe Liuf40a9572017-10-13 12:37:19 -07001053
Yaowu Xuc27fc142016-08-22 16:08:15 -07001054 if (!is_inter) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001055 write_intra_mode(ec_ctx, bsize, mode, w);
Luc Trudeau866da792018-02-12 11:13:34 -05001056 const int use_angle_delta = av1_use_angle_delta(bsize);
1057
Hui Su7fb93972018-02-20 21:18:03 -08001058 if (use_angle_delta && av1_is_directional_mode(mode)) {
Luc Trudeau866da792018-02-12 11:13:34 -05001059 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_Y],
1060 ec_ctx->angle_delta_cdf[mode - V_PRED]);
1061 }
1062
David Barkerc2a680e2018-02-07 15:53:53 +00001063 if (!cm->seq_params.monochrome &&
1064 is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
Debargha Mukherjeece340c32018-02-26 09:18:33 -08001065 xd->plane[1].subsampling_y)) {
Luc Trudeau866da792018-02-12 11:13:34 -05001066 const UV_PREDICTION_MODE uv_mode = mbmi->uv_mode;
Luc Trudeau866da792018-02-12 11:13:34 -05001067 write_intra_uv_mode(ec_ctx, uv_mode, mode, is_cfl_allowed(mbmi), w);
1068 if (uv_mode == UV_CFL_PRED)
David Michael Barr23198662017-06-19 23:19:48 +09001069 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Hui Su7fb93972018-02-20 21:18:03 -08001070 if (use_angle_delta && av1_is_directional_mode(get_uv_mode(uv_mode))) {
Luc Trudeau866da792018-02-12 11:13:34 -05001071 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_UV],
1072 ec_ctx->angle_delta_cdf[uv_mode - V_PRED]);
1073 }
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001074 }
Luc Trudeaub09b55d2017-04-26 10:06:35 -04001075
Hui Sue87fb232017-10-05 15:00:15 -07001076 if (av1_allow_palette(cm->allow_screen_content_tools, bsize))
Hui Su8b618f62017-12-20 12:03:35 -08001077 write_palette_mode_info(cm, xd, mi, mi_row, mi_col, w);
Yue Chen45dfb792018-03-01 13:02:40 -08001078
Yue Chena9383622018-03-08 14:37:09 -08001079 write_filter_intra_mode_info(cm, xd, mbmi, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001080 } else {
Yaowu Xub0d0d002016-11-22 09:26:43 -08001081 int16_t mode_ctx;
Zoe Liufa8bad12018-01-23 14:32:31 -08001082
1083 av1_collect_neighbors_ref_counts(xd);
1084
Yaowu Xuc27fc142016-08-22 16:08:15 -07001085 write_ref_frames(cm, xd, w);
1086
Jingning Han7ae50fd2018-02-05 16:33:40 -08001087 mode_ctx =
1088 av1_mode_context_analyzer(mbmi_ext->mode_context, mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001089
1090 // If segment skip is not enabled code the mode.
1091 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001092 if (is_inter_compound_mode(mode))
1093 write_inter_compound_mode(cm, xd, w, mode, mode_ctx);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001094 else if (is_inter_singleref_mode(mode))
1095 write_inter_mode(w, mode, ec_ctx, mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001096
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001097 if (mode == NEWMV || mode == NEW_NEWMV || have_nearmv_in_inter_mode(mode))
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001098 write_drl_idx(ec_ctx, mbmi, mbmi_ext, w);
1099 else
1100 assert(mbmi->ref_mv_idx == 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001101 }
1102
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001103 if (mode == NEWMV || mode == NEW_NEWMV) {
1104 int_mv ref_mv;
1105 for (ref = 0; ref < 1 + is_compound; ++ref) {
Jingning Hanf050fc12018-03-09 14:53:33 -08001106 nmv_context *nmvc = &ec_ctx->nmvc;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001107 ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0];
1108 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv, nmvc,
1109 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001110 }
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001111 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
Jingning Hanf050fc12018-03-09 14:53:33 -08001112 nmv_context *nmvc = &ec_ctx->nmvc;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001113 av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv,
1114 &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv, nmvc,
1115 allow_hp);
1116 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
Jingning Hanf050fc12018-03-09 14:53:33 -08001117 nmv_context *nmvc = &ec_ctx->nmvc;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001118 av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv,
1119 &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv, nmvc,
1120 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001121 }
1122
Yaowu Xuc27fc142016-08-22 16:08:15 -07001123 if (cpi->common.reference_mode != COMPOUND_REFERENCE &&
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07001124 cpi->common.allow_interintra_compound && is_interintra_allowed(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001125 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
1126 const int bsize_group = size_group_lookup[bsize];
Thomas Daviescff91712017-07-07 11:49:55 +01001127 aom_write_symbol(w, interintra, ec_ctx->interintra_cdf[bsize_group], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001128 if (interintra) {
Thomas Davies299ff042017-06-27 13:41:59 +01001129 aom_write_symbol(w, mbmi->interintra_mode,
1130 ec_ctx->interintra_mode_cdf[bsize_group],
1131 INTERINTRA_MODES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001132 if (is_interintra_wedge_used(bsize)) {
Thomas Daviescff91712017-07-07 11:49:55 +01001133 aom_write_symbol(w, mbmi->use_wedge_interintra,
1134 ec_ctx->wedge_interintra_cdf[bsize], 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001135 if (mbmi->use_wedge_interintra) {
Yue Chen73335fa2017-12-20 23:33:41 -08001136#if II_WEDGE_IDX_ENTROPY_CODING
1137 aom_write_symbol(w, mbmi->interintra_wedge_index,
1138 ec_ctx->wedge_idx_cdf[bsize], 16);
1139#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001140 aom_write_literal(w, mbmi->interintra_wedge_index,
1141 get_wedge_bits_lookup(bsize));
Yue Chen73335fa2017-12-20 23:33:41 -08001142#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001143 assert(mbmi->interintra_wedge_sign == 0);
1144 }
1145 }
1146 }
1147 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001148
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001149 if (mbmi->ref_frame[1] != INTRA_FRAME) write_motion_mode(cm, xd, mi, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001150
Cheng Chen33a13d92017-11-28 16:49:59 -08001151 // First write idx to indicate current compound inter prediction mode group
1152 // Group A (0): jnt_comp, compound_average
1153 // Group B (1): interintra, compound_segment, wedge
1154 if (has_second_ref(mbmi)) {
Zoe Liu5f11e912017-12-05 23:23:56 -08001155 const int masked_compound_used =
1156 is_any_masked_compound_used(bsize) && cm->allow_masked_compound;
Cheng Chen5a881722017-11-30 17:05:10 -08001157
Zoe Liu5f11e912017-12-05 23:23:56 -08001158 if (masked_compound_used) {
Cheng Chen5a881722017-11-30 17:05:10 -08001159 const int ctx_comp_group_idx = get_comp_group_idx_context(xd);
1160 aom_write_symbol(w, mbmi->comp_group_idx,
1161 ec_ctx->comp_group_idx_cdf[ctx_comp_group_idx], 2);
Zoe Liu5f11e912017-12-05 23:23:56 -08001162 } else {
1163 assert(mbmi->comp_group_idx == 0);
Cheng Chen5a881722017-11-30 17:05:10 -08001164 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001165
1166 if (mbmi->comp_group_idx == 0) {
1167 if (mbmi->compound_idx)
1168 assert(mbmi->interinter_compound_type == COMPOUND_AVERAGE);
1169
Imdad Sardharwalla4d8f7772018-03-15 12:14:48 +00001170 if (cm->seq_params.enable_jnt_comp) {
David Barkere21f4d92018-02-26 16:37:24 +00001171 const int comp_index_ctx = get_comp_index_context(cm, xd);
1172 aom_write_symbol(w, mbmi->compound_idx,
1173 ec_ctx->compound_index_cdf[comp_index_ctx], 2);
1174 } else {
1175 assert(mbmi->compound_idx == 1);
1176 }
Cheng Chen33a13d92017-11-28 16:49:59 -08001177 } else {
Zoe Liu5f11e912017-12-05 23:23:56 -08001178 assert(cpi->common.reference_mode != SINGLE_REFERENCE &&
1179 is_inter_compound_mode(mbmi->mode) &&
1180 mbmi->motion_mode == SIMPLE_TRANSLATION);
1181 assert(masked_compound_used);
1182 // compound_segment, wedge
Cheng Chen33a13d92017-11-28 16:49:59 -08001183 assert(mbmi->interinter_compound_type == COMPOUND_WEDGE ||
1184 mbmi->interinter_compound_type == COMPOUND_SEG);
Cheng Chen33a13d92017-11-28 16:49:59 -08001185
Zoe Liu5f11e912017-12-05 23:23:56 -08001186 if (is_interinter_compound_used(COMPOUND_WEDGE, bsize))
1187 aom_write_symbol(w, mbmi->interinter_compound_type - 1,
1188 ec_ctx->compound_type_cdf[bsize],
1189 COMPOUND_TYPES - 1);
1190
1191 if (mbmi->interinter_compound_type == COMPOUND_WEDGE) {
1192 assert(is_interinter_compound_used(COMPOUND_WEDGE, bsize));
Yue Chen73335fa2017-12-20 23:33:41 -08001193#if WEDGE_IDX_ENTROPY_CODING
1194 aom_write_symbol(w, mbmi->wedge_index, ec_ctx->wedge_idx_cdf[bsize],
1195 16);
1196#else
Zoe Liu5f11e912017-12-05 23:23:56 -08001197 aom_write_literal(w, mbmi->wedge_index, get_wedge_bits_lookup(bsize));
Yue Chen73335fa2017-12-20 23:33:41 -08001198#endif
Zoe Liu5f11e912017-12-05 23:23:56 -08001199 aom_write_bit(w, mbmi->wedge_sign);
1200 } else {
1201 assert(mbmi->interinter_compound_type == COMPOUND_SEG);
1202 aom_write_literal(w, mbmi->mask_type, MAX_SEG_MASK_BITS);
Cheng Chen33a13d92017-11-28 16:49:59 -08001203 }
1204 }
1205 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001206
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07001207 write_mb_interp_filter(cpi, xd, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001208 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001209}
1210
Hui Suc2232cf2017-10-11 17:32:56 -07001211static void write_intrabc_info(AV1_COMMON *cm, MACROBLOCKD *xd,
1212 const MB_MODE_INFO_EXT *mbmi_ext,
Jingning Han088217b2018-02-23 21:55:21 -08001213 aom_writer *w) {
1214 (void)cm;
Hui Suc2232cf2017-10-11 17:32:56 -07001215 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1216 int use_intrabc = is_intrabc_block(mbmi);
1217 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1218 aom_write_symbol(w, use_intrabc, ec_ctx->intrabc_cdf, 2);
1219 if (use_intrabc) {
1220 assert(mbmi->mode == DC_PRED);
1221 assert(mbmi->uv_mode == UV_DC_PRED);
Hui Su1fbe32a2018-02-26 21:44:54 -08001222 assert(mbmi->motion_mode == SIMPLE_TRANSLATION);
Hui Suc2232cf2017-10-11 17:32:56 -07001223 int_mv dv_ref = mbmi_ext->ref_mvs[INTRA_FRAME][0];
1224 av1_encode_dv(w, &mbmi->mv[0].as_mv, &dv_ref.as_mv, &ec_ctx->ndvc);
Hui Suc2232cf2017-10-11 17:32:56 -07001225 }
1226}
Hui Suc2232cf2017-10-11 17:32:56 -07001227
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001228static void write_mb_modes_kf(AV1_COMP *cpi, MACROBLOCKD *xd,
Alex Converse44c2bad2017-05-11 09:36:10 -07001229 const MB_MODE_INFO_EXT *mbmi_ext,
Jingning Han36fe3202017-02-20 22:31:49 -08001230 const int mi_row, const int mi_col,
Angie Chiangc31ea682017-04-13 16:20:54 -07001231 aom_writer *w) {
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001232 AV1_COMMON *const cm = &cpi->common;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001233 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001234 const struct segmentation *const seg = &cm->seg;
Thomas Davies9f5cedd2017-07-10 09:20:32 +01001235 struct segmentation_probs *const segp = &ec_ctx->seg;
Angie Chiangc31ea682017-04-13 16:20:54 -07001236 const MODE_INFO *const mi = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001237 const MODE_INFO *const above_mi = xd->above_mi;
1238 const MODE_INFO *const left_mi = xd->left_mi;
1239 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1240 const BLOCK_SIZE bsize = mbmi->sb_type;
Luc Trudeau866da792018-02-12 11:13:34 -05001241 const PREDICTION_MODE mode = mbmi->mode;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001242
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001243#if CONFIG_SPATIAL_SEGMENTATION
Rostislav Pehlivanovdd1a80c2018-03-05 21:26:45 +00001244 if (seg->preskip_segid && seg->update_map)
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001245 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, 0);
1246#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001247 if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id);
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001248#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001249
Alex Converse619576b2017-05-10 15:14:18 -07001250 const int skip = write_skip(cm, xd, mbmi->segment_id, mi, w);
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001251
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001252#if CONFIG_SPATIAL_SEGMENTATION
Rostislav Pehlivanovdd1a80c2018-03-05 21:26:45 +00001253 if (!seg->preskip_segid && seg->update_map)
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00001254 write_segment_id(cpi, mbmi, w, seg, segp, mi_row, mi_col, skip);
Rostislav Pehlivanovf624dd52017-10-24 16:46:09 +01001255#endif
Steinar Midtskogen6c24b022017-09-15 09:46:39 +02001256
1257 write_cdef(cm, w, skip, mi_col, mi_row);
1258
Arild Fuldseth07441162016-08-15 15:07:52 +02001259 if (cm->delta_q_present_flag) {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001260 int super_block_upper_left =
1261 ((mi_row & (cm->seq_params.mib_size - 1)) == 0) &&
1262 ((mi_col & (cm->seq_params.mib_size - 1)) == 0);
1263 if ((bsize != cm->seq_params.sb_size || skip == 0) &&
1264 super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001265 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001266 int reduced_delta_qindex =
1267 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001268 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001269 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001270#if CONFIG_EXT_DELTA_Q
Cheng Chena97394f2017-09-27 15:05:14 -07001271 if (cm->delta_lf_present_flag) {
Cheng Chen880166a2017-10-02 17:48:48 -07001272 if (cm->delta_lf_multi) {
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +00001273 const int frame_lf_count =
1274 av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
1275 for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id) {
Cheng Chen880166a2017-10-02 17:48:48 -07001276 int reduced_delta_lflevel =
1277 (mbmi->curr_delta_lf[lf_id] - xd->prev_delta_lf[lf_id]) /
1278 cm->delta_lf_res;
1279 write_delta_lflevel(cm, xd, lf_id, reduced_delta_lflevel, w);
1280 xd->prev_delta_lf[lf_id] = mbmi->curr_delta_lf[lf_id];
1281 }
1282 } else {
Cheng Chena97394f2017-09-27 15:05:14 -07001283 int reduced_delta_lflevel =
Cheng Chen880166a2017-10-02 17:48:48 -07001284 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
Cheng Chena97394f2017-09-27 15:05:14 -07001285 cm->delta_lf_res;
Cheng Chen880166a2017-10-02 17:48:48 -07001286 write_delta_lflevel(cm, xd, -1, reduced_delta_lflevel, w);
1287 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
Cheng Chena97394f2017-09-27 15:05:14 -07001288 }
1289 }
Fangwen Fu231fe422017-04-24 17:52:29 -07001290#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001291 }
1292 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001293
Hui Sueb2fd5c2017-12-15 14:38:01 -08001294 if (av1_allow_intrabc(cm)) {
Jingning Han088217b2018-02-23 21:55:21 -08001295 write_intrabc_info(cm, xd, mbmi_ext, w);
Hui Suc2232cf2017-10-11 17:32:56 -07001296 if (is_intrabc_block(mbmi)) return;
Alex Converse28744302017-04-13 14:46:22 -07001297 }
Hui Suc2232cf2017-10-11 17:32:56 -07001298
Luc Trudeau866da792018-02-12 11:13:34 -05001299 write_intra_mode_kf(ec_ctx, mi, above_mi, left_mi, mode, w);
1300
1301 const int use_angle_delta = av1_use_angle_delta(bsize);
Hui Su7fb93972018-02-20 21:18:03 -08001302 if (use_angle_delta && av1_is_directional_mode(mode)) {
Luc Trudeau866da792018-02-12 11:13:34 -05001303 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_Y],
1304 ec_ctx->angle_delta_cdf[mode - V_PRED]);
1305 }
Jingning Han0b7cbe62017-03-08 10:22:47 -08001306
David Barkerc2a680e2018-02-07 15:53:53 +00001307 if (!cm->seq_params.monochrome &&
1308 is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
Debargha Mukherjeece340c32018-02-26 09:18:33 -08001309 xd->plane[1].subsampling_y)) {
Luc Trudeau866da792018-02-12 11:13:34 -05001310 const UV_PREDICTION_MODE uv_mode = mbmi->uv_mode;
Luc Trudeau866da792018-02-12 11:13:34 -05001311 write_intra_uv_mode(ec_ctx, uv_mode, mode, is_cfl_allowed(mbmi), w);
1312 if (uv_mode == UV_CFL_PRED)
David Michael Barr23198662017-06-19 23:19:48 +09001313 write_cfl_alphas(ec_ctx, mbmi->cfl_alpha_idx, mbmi->cfl_alpha_signs, w);
Hui Su7fb93972018-02-20 21:18:03 -08001314 if (use_angle_delta && av1_is_directional_mode(get_uv_mode(uv_mode))) {
Luc Trudeau866da792018-02-12 11:13:34 -05001315 write_angle_delta(w, mbmi->angle_delta[PLANE_TYPE_UV],
1316 ec_ctx->angle_delta_cdf[uv_mode - V_PRED]);
1317 }
Luc Trudeau2c317902017-04-28 11:06:50 -04001318 }
Debargha Mukherjee6ea917e2017-10-19 09:31:29 -07001319
Hui Sue87fb232017-10-05 15:00:15 -07001320 if (av1_allow_palette(cm->allow_screen_content_tools, bsize))
Hui Su8b618f62017-12-20 12:03:35 -08001321 write_palette_mode_info(cm, xd, mi, mi_row, mi_col, w);
Yue Chen45dfb792018-03-01 13:02:40 -08001322
Yue Chena9383622018-03-08 14:37:09 -08001323 write_filter_intra_mode_info(cm, xd, mbmi, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001324}
1325
Angie Chiangd4022822016-11-02 18:30:25 -07001326#if CONFIG_RD_DEBUG
1327static void dump_mode_info(MODE_INFO *mi) {
1328 printf("\nmi->mbmi.mi_row == %d\n", mi->mbmi.mi_row);
1329 printf("&& mi->mbmi.mi_col == %d\n", mi->mbmi.mi_col);
1330 printf("&& mi->mbmi.sb_type == %d\n", mi->mbmi.sb_type);
1331 printf("&& mi->mbmi.tx_size == %d\n", mi->mbmi.tx_size);
Jingning Han2fac8a42017-12-14 16:26:00 -08001332 printf("&& mi->mbmi.mode == %d\n", mi->mbmi.mode);
Angie Chiangd4022822016-11-02 18:30:25 -07001333}
Angie Chiangd02001d2016-11-06 15:31:49 -08001334static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats,
1335 int plane) {
1336 if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) {
1337 int r, c;
1338 printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n",
1339 plane, rd_stats->txb_coeff_cost[plane], token_stats->cost);
Angie Chiangd02001d2016-11-06 15:31:49 -08001340 printf("rd txb_coeff_cost_map\n");
1341 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1342 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1343 printf("%d ", rd_stats->txb_coeff_cost_map[plane][r][c]);
1344 }
1345 printf("\n");
1346 }
1347
1348 printf("pack txb_coeff_cost_map\n");
1349 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1350 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1351 printf("%d ", token_stats->txb_coeff_cost_map[r][c]);
1352 }
1353 printf("\n");
1354 }
Angie Chiangd02001d2016-11-06 15:31:49 -08001355 return 1;
1356 }
1357 return 0;
1358}
Angie Chiangd4022822016-11-02 18:30:25 -07001359#endif
1360
Di Chen56586622017-06-09 13:49:44 -07001361#if ENC_MISMATCH_DEBUG
1362static void enc_dump_logs(AV1_COMP *cpi, int mi_row, int mi_col) {
1363 AV1_COMMON *const cm = &cpi->common;
1364 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1365 MODE_INFO *m;
1366 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1367 m = xd->mi[0];
1368 if (is_inter_block(&m->mbmi)) {
Zoe Liuf40a9572017-10-13 12:37:19 -07001369#define FRAME_TO_CHECK 11
Zoe Liu17af2742017-10-06 10:36:42 -07001370 if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
Di Chen56586622017-06-09 13:49:44 -07001371 const MB_MODE_INFO *const mbmi = &m->mbmi;
1372 const BLOCK_SIZE bsize = mbmi->sb_type;
1373
1374 int_mv mv[2];
1375 int is_comp_ref = has_second_ref(&m->mbmi);
1376 int ref;
1377
1378 for (ref = 0; ref < 1 + is_comp_ref; ++ref)
1379 mv[ref].as_mv = m->mbmi.mv[ref].as_mv;
1380
1381 if (!is_comp_ref) {
Sebastien Alaiwan34d55662017-11-15 09:36:03 +01001382 mv[1].as_int = 0;
Di Chen56586622017-06-09 13:49:44 -07001383 }
Di Chen56586622017-06-09 13:49:44 -07001384
Di Chen56586622017-06-09 13:49:44 -07001385 MACROBLOCK *const x = &cpi->td.mb;
Di Chen56586622017-06-09 13:49:44 -07001386 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
Zoe Liuf40a9572017-10-13 12:37:19 -07001387 const int16_t mode_ctx =
1388 is_comp_ref ? mbmi_ext->compound_mode_context[mbmi->ref_frame[0]]
1389 : av1_mode_context_analyzer(mbmi_ext->mode_context,
Luc Trudeau15a18e32017-12-13 14:15:25 -05001390 mbmi->ref_frame);
Zoe Liuf40a9572017-10-13 12:37:19 -07001391
Di Chen56586622017-06-09 13:49:44 -07001392 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
1393 int16_t zeromv_ctx = -1;
1394 int16_t refmv_ctx = -1;
Zoe Liuf40a9572017-10-13 12:37:19 -07001395
Di Chen56586622017-06-09 13:49:44 -07001396 if (mbmi->mode != NEWMV) {
Sarah Parker2b9ec2e2017-10-30 17:34:08 -07001397 zeromv_ctx = (mode_ctx >> GLOBALMV_OFFSET) & GLOBALMV_CTX_MASK;
Jingning Han59b12632018-02-12 10:44:52 -08001398 if (mbmi->mode != GLOBALMV)
Di Chen56586622017-06-09 13:49:44 -07001399 refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Di Chen56586622017-06-09 13:49:44 -07001400 }
1401
Zoe Liuf40a9572017-10-13 12:37:19 -07001402 printf(
1403 "=== ENCODER ===: "
1404 "Frame=%d, (mi_row,mi_col)=(%d,%d), skip_mode=%d, mode=%d, bsize=%d, "
1405 "show_frame=%d, mv[0]=(%d,%d), mv[1]=(%d,%d), ref[0]=%d, "
1406 "ref[1]=%d, motion_mode=%d, mode_ctx=%d, "
1407 "newmv_ctx=%d, zeromv_ctx=%d, refmv_ctx=%d, tx_size=%d\n",
1408 cm->current_video_frame, mi_row, mi_col, mbmi->skip_mode, mbmi->mode,
1409 bsize, cm->show_frame, mv[0].as_mv.row, mv[0].as_mv.col,
1410 mv[1].as_mv.row, mv[1].as_mv.col, mbmi->ref_frame[0],
1411 mbmi->ref_frame[1], mbmi->motion_mode, mode_ctx, newmv_ctx,
1412 zeromv_ctx, refmv_ctx, mbmi->tx_size);
Di Chen56586622017-06-09 13:49:44 -07001413 }
1414 }
1415}
1416#endif // ENC_MISMATCH_DEBUG
1417
Yue Chen64550b62017-01-12 12:18:22 -08001418static void write_mbmi_b(AV1_COMP *cpi, const TileInfo *const tile,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001419 aom_writer *w, int mi_row, int mi_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001420 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001421 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1422 MODE_INFO *m;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001423 int bh, bw;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001424 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1425 m = xd->mi[0];
1426
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001427 assert(m->mbmi.sb_type <= cm->seq_params.sb_size ||
Rupert Swarbrick72678572017-08-02 12:05:26 +01001428 (m->mbmi.sb_type >= BLOCK_SIZES && m->mbmi.sb_type < BLOCK_SIZES_ALL));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001429
Jingning Hanc709e1f2016-12-06 14:48:09 -08001430 bh = mi_size_high[m->mbmi.sb_type];
1431 bw = mi_size_wide[m->mbmi.sb_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001432
1433 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1434
Yaowu Xu4a947652018-03-17 12:39:40 -07001435 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
Yushin Cho77bba8d2016-11-04 16:36:56 -07001436
Jingning Han02384572018-02-21 11:28:59 -08001437 xd->above_txfm_context =
1438 cm->above_txfm_context + (mi_col << TX_UNIT_WIDE_LOG2);
1439 xd->left_txfm_context = xd->left_txfm_context_buffer +
1440 ((mi_row & MAX_MIB_MASK) << TX_UNIT_HIGH_LOG2);
1441
Yaowu Xuc27fc142016-08-22 16:08:15 -07001442 if (frame_is_intra_only(cm)) {
Hui Su293f2812018-02-26 14:41:18 -08001443 write_mb_modes_kf(cpi, xd, cpi->td.mb.mbmi_ext, mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001444 } else {
Angie Chiang38edf682017-02-21 15:13:09 -08001445 // has_subpel_mv_component needs the ref frame buffers set up to look
1446 // up if they are scaled. has_subpel_mv_component is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07001447 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
1448 set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
Zoe Liu85b66462017-04-20 14:28:19 -07001449
Di Chen56586622017-06-09 13:49:44 -07001450#if ENC_MISMATCH_DEBUG
Di Chen56586622017-06-09 13:49:44 -07001451 enc_dump_logs(cpi, mi_row, mi_col);
1452#endif // ENC_MISMATCH_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001453
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001454 pack_inter_mode_mvs(cpi, mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001455 }
Yue Chen64550b62017-01-12 12:18:22 -08001456}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001457
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001458static void write_inter_txb_coeff(AV1_COMMON *const cm, MACROBLOCK *const x,
1459 MB_MODE_INFO *const mbmi, aom_writer *w,
1460 const TOKENEXTRA **tok,
1461 const TOKENEXTRA *const tok_end,
1462 TOKEN_STATS *token_stats, const int row,
1463 const int col, int *block, const int plane) {
1464 MACROBLOCKD *const xd = &x->e_mbd;
1465 const struct macroblockd_plane *const pd = &xd->plane[plane];
Debargha Mukherjee19619882017-11-22 13:13:14 -08001466 const BLOCK_SIZE bsize = mbmi->sb_type;
1467 const BLOCK_SIZE bsizec =
1468 scale_chroma_bsize(bsize, pd->subsampling_x, pd->subsampling_y);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001469
Debargha Mukherjee5d149e12017-12-14 12:49:51 -08001470 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsizec, pd);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001471
Debargha Mukherjee19619882017-11-22 13:13:14 -08001472 TX_SIZE max_tx_size = get_vartx_max_txsize(
Debargha Mukherjee891a8772017-11-22 10:09:37 -08001473 xd, plane_bsize, pd->subsampling_x || pd->subsampling_y);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001474 const int step =
1475 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
1476 const int bkw = tx_size_wide_unit[max_tx_size];
1477 const int bkh = tx_size_high_unit[max_tx_size];
1478
1479 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, pd);
1480 int mu_blocks_wide = block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
1481 int mu_blocks_high = block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
1482
1483 int blk_row, blk_col;
1484
1485 const int num_4x4_w = block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
1486 const int num_4x4_h = block_size_high[plane_bsize] >> tx_size_wide_log2[0];
1487
Jingning Hancdbc47f2018-01-12 16:21:07 -08001488 const int unit_height =
1489 AOMMIN(mu_blocks_high + (row >> pd->subsampling_y), num_4x4_h);
1490 const int unit_width =
1491 AOMMIN(mu_blocks_wide + (col >> pd->subsampling_x), num_4x4_w);
1492 for (blk_row = row >> pd->subsampling_y; blk_row < unit_height;
1493 blk_row += bkh) {
1494 for (blk_col = col >> pd->subsampling_x; blk_col < unit_width;
1495 blk_col += bkw) {
Sebastien Alaiwancad5ebc2018-02-20 16:18:20 +01001496 pack_txb_tokens(w, cm, x, tok, tok_end, xd, mbmi, plane, plane_bsize,
1497 cm->bit_depth, *block, blk_row, blk_col, max_tx_size,
1498 token_stats);
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001499 *block += step;
1500 }
1501 }
1502}
1503
Yue Chen64550b62017-01-12 12:18:22 -08001504static void write_tokens_b(AV1_COMP *cpi, const TileInfo *const tile,
1505 aom_writer *w, const TOKENEXTRA **tok,
1506 const TOKENEXTRA *const tok_end, int mi_row,
1507 int mi_col) {
1508 AV1_COMMON *const cm = &cpi->common;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001509 const int num_planes = av1_num_planes(cm);
Yue Chen64550b62017-01-12 12:18:22 -08001510 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Wei-Ting Lin1d46d902017-06-26 15:57:18 -07001511 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1512 MODE_INFO *const m = *(cm->mi_grid_visible + mi_offset);
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001513 MB_MODE_INFO *const mbmi = &m->mbmi;
Yue Chen64550b62017-01-12 12:18:22 -08001514 int plane;
1515 int bh, bw;
Yushin Cho258a0242017-03-06 13:53:01 -08001516 MACROBLOCK *const x = &cpi->td.mb;
Yue Chen64550b62017-01-12 12:18:22 -08001517 (void)tok;
1518 (void)tok_end;
Wei-Ting Lin1d46d902017-06-26 15:57:18 -07001519 xd->mi = cm->mi_grid_visible + mi_offset;
Yue Chen64550b62017-01-12 12:18:22 -08001520
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001521 assert(mbmi->sb_type <= cm->seq_params.sb_size ||
Rupert Swarbrick72678572017-08-02 12:05:26 +01001522 (mbmi->sb_type >= BLOCK_SIZES && mbmi->sb_type < BLOCK_SIZES_ALL));
Yue Chen64550b62017-01-12 12:18:22 -08001523
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001524 bh = mi_size_high[mbmi->sb_type];
1525 bw = mi_size_wide[mbmi->sb_type];
Yue Chen64550b62017-01-12 12:18:22 -08001526 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1527
Yaowu Xu4a947652018-03-17 12:39:40 -07001528 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
Yue Chen64550b62017-01-12 12:18:22 -08001529
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001530 if (!mbmi->skip) {
Jingning Hanad54a982018-01-12 14:40:29 -08001531 if (!is_inter_block(mbmi))
1532 av1_write_coeffs_mb(cm, x, mi_row, mi_col, w, mbmi->sb_type);
1533
Jingning Hancdbc47f2018-01-12 16:21:07 -08001534 if (is_inter_block(mbmi)) {
1535 int block[MAX_MB_PLANE] = { 0 };
1536 const struct macroblockd_plane *const y_pd = &xd->plane[0];
1537 const BLOCK_SIZE plane_bsize = get_plane_block_size(mbmi->sb_type, y_pd);
Jingning Han42a0fb32016-10-31 10:43:31 -07001538 const int num_4x4_w =
1539 block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
1540 const int num_4x4_h =
1541 block_size_high[plane_bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001542 int row, col;
Angie Chiangd4022822016-11-02 18:30:25 -07001543 TOKEN_STATS token_stats;
Angie Chiangd02001d2016-11-06 15:31:49 -08001544 init_token_stats(&token_stats);
Angie Chiangd4022822016-11-02 18:30:25 -07001545
Jingning Hancdbc47f2018-01-12 16:21:07 -08001546 const BLOCK_SIZE max_unit_bsize = get_plane_block_size(BLOCK_64X64, y_pd);
Jingning Hanc2b797f2017-07-19 09:37:11 -07001547 int mu_blocks_wide =
1548 block_size_wide[max_unit_bsize] >> tx_size_wide_log2[0];
1549 int mu_blocks_high =
1550 block_size_high[max_unit_bsize] >> tx_size_high_log2[0];
1551
1552 mu_blocks_wide = AOMMIN(num_4x4_w, mu_blocks_wide);
1553 mu_blocks_high = AOMMIN(num_4x4_h, mu_blocks_high);
1554
Jingning Hancdbc47f2018-01-12 16:21:07 -08001555 for (row = 0; row < num_4x4_h; row += mu_blocks_high) {
1556 for (col = 0; col < num_4x4_w; col += mu_blocks_wide) {
1557 for (plane = 0; plane < num_planes && is_inter_block(mbmi); ++plane) {
1558 const struct macroblockd_plane *const pd = &xd->plane[plane];
1559 if (!is_chroma_reference(mi_row, mi_col, mbmi->sb_type,
1560 pd->subsampling_x, pd->subsampling_y)) {
Jingning Hancdbc47f2018-01-12 16:21:07 -08001561 continue;
1562 }
Jingning Hane5e8f4d2017-11-20 20:11:04 -08001563 write_inter_txb_coeff(cm, x, mbmi, w, tok, tok_end, &token_stats,
Jingning Hancdbc47f2018-01-12 16:21:07 -08001564 row, col, &block[plane], plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001565 }
1566 }
Angie Chiangd02001d2016-11-06 15:31:49 -08001567#if CONFIG_RD_DEBUG
Angie Chiang3963d632016-11-10 18:41:40 -08001568 if (mbmi->sb_type >= BLOCK_8X8 &&
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05001569 rd_token_stats_mismatch(&mbmi->rd_stats, &token_stats, plane)) {
Angie Chiangd02001d2016-11-06 15:31:49 -08001570 dump_mode_info(m);
1571 assert(0);
1572 }
Jingning Hanfe45b212016-11-22 10:30:23 -08001573#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07001574 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001575 }
1576 }
1577}
1578
Yue Chen64550b62017-01-12 12:18:22 -08001579static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
1580 aom_writer *w, const TOKENEXTRA **tok,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001581 const TOKENEXTRA *const tok_end, int mi_row,
1582 int mi_col) {
1583 write_mbmi_b(cpi, tile, w, mi_row, mi_col);
Jingning Hanf5a4d3b2017-08-27 23:01:19 -07001584
Jingning Han088217b2018-02-23 21:55:21 -08001585 AV1_COMMON *cm = &cpi->common;
1586 MACROBLOCKD *xd = &cpi->td.mb.e_mbd;
1587 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
1588 for (int plane = 0; plane < AOMMIN(2, av1_num_planes(cm)); ++plane) {
1589 const uint8_t palette_size_plane =
1590 mbmi->palette_mode_info.palette_size[plane];
Jingning Han088217b2018-02-23 21:55:21 -08001591 assert(!mbmi->skip_mode || !palette_size_plane);
Jingning Han088217b2018-02-23 21:55:21 -08001592 if (palette_size_plane > 0) {
1593 assert(mbmi->use_intrabc == 0);
1594 assert(av1_allow_palette(cm->allow_screen_content_tools, mbmi->sb_type));
1595 int rows, cols;
1596 av1_get_block_dimensions(mbmi->sb_type, plane, xd, NULL, NULL, &rows,
1597 &cols);
1598 assert(*tok < tok_end);
1599 pack_map_tokens(w, tok, palette_size_plane, rows * cols);
1600 }
1601 }
1602
1603 BLOCK_SIZE bsize = mbmi->sb_type;
1604 int is_inter_tx = is_inter_block(mbmi) || is_intrabc_block(mbmi);
1605 int skip = mbmi->skip;
1606 int segment_id = mbmi->segment_id;
1607 if (cm->tx_mode == TX_MODE_SELECT && block_signals_txsize(bsize) &&
1608 !(is_inter_tx && skip) && !xd->lossless[segment_id]) {
1609 if (is_inter_tx) { // This implies skip flag is 0.
1610 const TX_SIZE max_tx_size = get_vartx_max_txsize(xd, bsize, 0);
1611 const int txbh = tx_size_high_unit[max_tx_size];
1612 const int txbw = tx_size_wide_unit[max_tx_size];
1613 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1614 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
1615 int idx, idy;
1616 for (idy = 0; idy < height; idy += txbh)
1617 for (idx = 0; idx < width; idx += txbw)
1618 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, 0, idy, idx, w);
1619 } else {
1620 write_selected_tx_size(cm, xd, w);
Jingning Hanb6211eb2018-02-27 10:46:42 -08001621 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, 0, xd);
Jingning Han088217b2018-02-23 21:55:21 -08001622 }
1623 } else {
Jingning Hanb6211eb2018-02-27 10:46:42 -08001624 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h,
1625 skip && is_inter_block(mbmi), xd);
Jingning Han088217b2018-02-23 21:55:21 -08001626 }
1627
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001628 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
Yue Chen64550b62017-01-12 12:18:22 -08001629}
1630
Yaowu Xuf883b422016-08-30 14:01:10 -07001631static void write_partition(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001632 const MACROBLOCKD *const xd, int hbs, int mi_row,
1633 int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize,
Yaowu Xuf883b422016-08-30 14:01:10 -07001634 aom_writer *w) {
Alex Converse55c6bde2017-01-12 15:55:31 -08001635 const int is_partition_point = bsize >= BLOCK_8X8;
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00001636
Jingning Hanbf9c6b72016-12-14 14:50:45 -08001637 if (!is_partition_point) return;
1638
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001639 const int has_rows = (mi_row + hbs) < cm->mi_rows;
1640 const int has_cols = (mi_col + hbs) < cm->mi_cols;
1641 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
1642 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1643
1644 if (!has_rows && !has_cols) {
1645 assert(p == PARTITION_SPLIT);
1646 return;
1647 }
1648
Yaowu Xuc27fc142016-08-22 16:08:15 -07001649 if (has_rows && has_cols) {
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001650 aom_write_symbol(w, p, ec_ctx->partition_cdf[ctx],
1651 partition_cdf_length(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001652 } else if (!has_rows && has_cols) {
1653 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001654 assert(bsize > BLOCK_8X8);
1655 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001656 partition_gather_vert_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001657 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001658 } else {
1659 assert(has_rows && !has_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001660 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001661 assert(bsize > BLOCK_8X8);
1662 aom_cdf_prob cdf[2];
Rupert Swarbrickeb123932017-11-22 15:20:47 +00001663 partition_gather_horz_alike(cdf, ec_ctx->partition_cdf[ctx], bsize);
Stanislav Vitvitskyy8711cf52017-08-18 15:17:57 -07001664 aom_write_cdf(w, p == PARTITION_SPLIT, cdf, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001665 }
1666}
1667
Yaowu Xuf883b422016-08-30 14:01:10 -07001668static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile,
1669 aom_writer *const w, const TOKENEXTRA **tok,
Sebastien Alaiwan0cf54d42017-10-16 16:10:04 +02001670 const TOKENEXTRA *const tok_end, int mi_row,
1671 int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001672 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001673 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Jingning Hanc709e1f2016-12-06 14:48:09 -08001674 const int hbs = mi_size_wide[bsize] / 2;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001675 const int quarter_step = mi_size_wide[bsize] / 4;
1676 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001677 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
1678 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
Jingning Han52261842016-12-14 12:17:49 -08001679
Yaowu Xuc27fc142016-08-22 16:08:15 -07001680 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
1681
Debargha Mukherjeea78c8f52018-01-31 11:14:38 -08001682 const int num_planes = av1_num_planes(cm);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001683 for (int plane = 0; plane < num_planes; ++plane) {
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001684 int rcol0, rcol1, rrow0, rrow1, tile_tl_idx;
1685 if (av1_loop_restoration_corners_in_sb(cm, plane, mi_row, mi_col, bsize,
1686 &rcol0, &rcol1, &rrow0, &rrow1,
1687 &tile_tl_idx)) {
1688 const int rstride = cm->rst_info[plane].horz_units_per_tile;
1689 for (int rrow = rrow0; rrow < rrow1; ++rrow) {
1690 for (int rcol = rcol0; rcol < rcol1; ++rcol) {
Urvang Joshi813186b2018-03-08 15:38:46 -08001691 const int runit_idx = tile_tl_idx + rcol + rrow * rstride;
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001692 const RestorationUnitInfo *rui =
Urvang Joshi813186b2018-03-08 15:38:46 -08001693 &cm->rst_info[plane].unit_info[runit_idx];
Yue Chen44391512018-03-13 15:37:26 -07001694 loop_restoration_write_sb_coeffs(cm, xd, rui, w, plane,
1695 cpi->td.counts);
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001696 }
1697 }
1698 }
1699 }
Imdad Sardharwalla7d2e5c92018-01-05 18:41:00 +00001700
Yaowu Xuc27fc142016-08-22 16:08:15 -07001701 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001702 switch (partition) {
1703 case PARTITION_NONE:
1704 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1705 break;
1706 case PARTITION_HORZ:
1707 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1708 if (mi_row + hbs < cm->mi_rows)
1709 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1710 break;
1711 case PARTITION_VERT:
1712 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1713 if (mi_col + hbs < cm->mi_cols)
1714 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1715 break;
1716 case PARTITION_SPLIT:
1717 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
1718 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs, subsize);
1719 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col, subsize);
1720 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs,
1721 subsize);
1722 break;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001723 case PARTITION_HORZ_A:
1724 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1725 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1726 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1727 break;
1728 case PARTITION_HORZ_B:
1729 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1730 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1731 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
1732 break;
1733 case PARTITION_VERT_A:
1734 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1735 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
1736 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1737 break;
1738 case PARTITION_VERT_B:
1739 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
1740 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
1741 write_modes_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
1742 break;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001743 case PARTITION_HORZ_4:
1744 for (i = 0; i < 4; ++i) {
1745 int this_mi_row = mi_row + i * quarter_step;
1746 if (i > 0 && this_mi_row >= cm->mi_rows) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001747
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001748 write_modes_b(cpi, tile, w, tok, tok_end, this_mi_row, mi_col);
1749 }
1750 break;
1751 case PARTITION_VERT_4:
1752 for (i = 0; i < 4; ++i) {
1753 int this_mi_col = mi_col + i * quarter_step;
1754 if (i > 0 && this_mi_col >= cm->mi_cols) break;
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001755
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001756 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, this_mi_col);
1757 }
1758 break;
Debargha Mukherjeeedced252017-10-20 00:02:00 -07001759 default: assert(0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001760 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001761
Debargha Mukherjee16870852018-02-28 10:00:17 -08001762 // update partition context
Yaowu Xuc27fc142016-08-22 16:08:15 -07001763 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001764}
1765
Yaowu Xuf883b422016-08-30 14:01:10 -07001766static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile,
1767 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001768 const TOKENEXTRA *const tok_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001769 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001770 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1771 const int mi_row_start = tile->mi_row_start;
1772 const int mi_row_end = tile->mi_row_end;
1773 const int mi_col_start = tile->mi_col_start;
1774 const int mi_col_end = tile->mi_col_end;
1775 int mi_row, mi_col;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08001776
Yaowu Xuf883b422016-08-30 14:01:10 -07001777 av1_zero_above_context(cm, mi_col_start, mi_col_end);
Arild Fuldseth07441162016-08-15 15:07:52 +02001778 if (cpi->common.delta_q_present_flag) {
1779 xd->prev_qindex = cpi->common.base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07001780#if CONFIG_EXT_DELTA_Q
1781 if (cpi->common.delta_lf_present_flag) {
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +00001782 const int frame_lf_count =
1783 av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
1784 for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id)
Cheng Chena97394f2017-09-27 15:05:14 -07001785 xd->prev_delta_lf[lf_id] = 0;
Fangwen Fu231fe422017-04-24 17:52:29 -07001786 xd->prev_delta_lf_from_base = 0;
1787 }
1788#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001789 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001790
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001791 for (mi_row = mi_row_start; mi_row < mi_row_end;
1792 mi_row += cm->seq_params.mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001793 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001794
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00001795 for (mi_col = mi_col_start; mi_col < mi_col_end;
1796 mi_col += cm->seq_params.mib_size) {
1797 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col,
1798 cm->seq_params.sb_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001799 }
1800 }
1801}
1802
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001803static void encode_restoration_mode(AV1_COMMON *cm,
1804 struct aom_write_bit_buffer *wb) {
Urvang Joshi5ec7b812018-02-28 14:37:06 -08001805 assert(!cm->all_lossless);
Hui Su27df8342017-11-07 15:16:05 -08001806 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
Hui Su293f2812018-02-26 14:41:18 -08001807 const int num_planes = av1_num_planes(cm);
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001808 int all_none = 1, chroma_none = 1;
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001809 for (int p = 0; p < num_planes; ++p) {
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00001810 RestorationInfo *rsi = &cm->rst_info[p];
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001811 if (rsi->frame_restoration_type != RESTORE_NONE) {
1812 all_none = 0;
1813 chroma_none &= p == 0;
1814 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001815 switch (rsi->frame_restoration_type) {
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001816 case RESTORE_NONE:
1817 aom_wb_write_bit(wb, 0);
1818 aom_wb_write_bit(wb, 0);
1819 break;
Debargha Mukherjeed23ceea2017-05-18 20:33:52 -07001820 case RESTORE_WIENER:
1821 aom_wb_write_bit(wb, 1);
1822 aom_wb_write_bit(wb, 0);
1823 break;
1824 case RESTORE_SGRPROJ:
1825 aom_wb_write_bit(wb, 1);
1826 aom_wb_write_bit(wb, 1);
1827 break;
Debargha Mukherjeea3d4fe52017-05-19 16:22:54 -07001828 case RESTORE_SWITCHABLE:
1829 aom_wb_write_bit(wb, 0);
1830 aom_wb_write_bit(wb, 1);
1831 break;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001832 default: assert(0);
1833 }
1834 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001835 if (!all_none) {
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00001836 assert(cm->seq_params.sb_size == BLOCK_64X64 ||
1837 cm->seq_params.sb_size == BLOCK_128X128);
1838 const int sb_size = cm->seq_params.sb_size == BLOCK_128X128 ? 128 : 64;
1839
Rupert Swarbrick4596deb2017-11-07 18:06:38 +00001840 RestorationInfo *rsi = &cm->rst_info[0];
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00001841
1842 assert(rsi->restoration_unit_size >= sb_size);
Urvang Joshi813186b2018-03-08 15:38:46 -08001843 assert(RESTORATION_UNITSIZE_MAX == 256);
Imdad Sardharwallab1dce0a2018-02-12 16:43:59 +00001844
1845 if (sb_size == 64) {
1846 aom_wb_write_bit(wb, rsi->restoration_unit_size > 64);
1847 }
1848 if (rsi->restoration_unit_size > 64) {
1849 aom_wb_write_bit(wb, rsi->restoration_unit_size > 128);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08001850 }
1851 }
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001852
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00001853 if (num_planes > 1) {
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001854 int s = AOMMIN(cm->subsampling_x, cm->subsampling_y);
1855 if (s && !chroma_none) {
Johannb0ef6ff2018-02-08 14:32:21 -08001856 aom_wb_write_bit(wb, cm->rst_info[1].restoration_unit_size !=
1857 cm->rst_info[0].restoration_unit_size);
Rupert Swarbrickdcb3cff2017-11-09 15:58:33 +00001858 assert(cm->rst_info[1].restoration_unit_size ==
1859 cm->rst_info[0].restoration_unit_size ||
1860 cm->rst_info[1].restoration_unit_size ==
1861 (cm->rst_info[0].restoration_unit_size >> s));
1862 assert(cm->rst_info[2].restoration_unit_size ==
1863 cm->rst_info[1].restoration_unit_size);
1864 } else if (!s) {
1865 assert(cm->rst_info[1].restoration_unit_size ==
1866 cm->rst_info[0].restoration_unit_size);
1867 assert(cm->rst_info[2].restoration_unit_size ==
1868 cm->rst_info[1].restoration_unit_size);
1869 }
Debargha Mukherjee84f567c2017-06-21 10:53:59 -07001870 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001871}
1872
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001873static void write_wiener_filter(int wiener_win, const WienerInfo *wiener_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001874 WienerInfo *ref_wiener_info, aom_writer *wb) {
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001875 if (wiener_win == WIENER_WIN)
1876 aom_write_primitive_refsubexpfin(
1877 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1878 WIENER_FILT_TAP0_SUBEXP_K,
1879 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV,
1880 wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV);
1881 else
1882 assert(wiener_info->vfilter[0] == 0 &&
1883 wiener_info->vfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001884 aom_write_primitive_refsubexpfin(
1885 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1886 WIENER_FILT_TAP1_SUBEXP_K,
1887 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV,
1888 wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV);
1889 aom_write_primitive_refsubexpfin(
1890 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1891 WIENER_FILT_TAP2_SUBEXP_K,
1892 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV,
1893 wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV);
Debargha Mukherjee1cb757c2017-08-21 02:46:31 -07001894 if (wiener_win == WIENER_WIN)
1895 aom_write_primitive_refsubexpfin(
1896 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
1897 WIENER_FILT_TAP0_SUBEXP_K,
1898 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV,
1899 wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV);
1900 else
1901 assert(wiener_info->hfilter[0] == 0 &&
1902 wiener_info->hfilter[WIENER_WIN - 1] == 0);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001903 aom_write_primitive_refsubexpfin(
1904 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
1905 WIENER_FILT_TAP1_SUBEXP_K,
1906 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV,
1907 wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV);
1908 aom_write_primitive_refsubexpfin(
1909 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
1910 WIENER_FILT_TAP2_SUBEXP_K,
1911 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV,
1912 wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV);
1913 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001914}
1915
Imdad Sardharwallafdeb1162018-02-21 17:38:20 +00001916#if CONFIG_SKIP_SGR
1917static void write_sgrproj_filter(const SgrprojInfo *sgrproj_info,
1918 SgrprojInfo *ref_sgrproj_info,
1919 aom_writer *wb) {
1920 aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS);
1921 const sgr_params_type *params = &sgr_params[sgrproj_info->ep];
1922
Imdad Sardharwalla7d3bd8d2018-02-22 15:47:33 +00001923 if (params->r0 == 0) {
Imdad Sardharwallafdeb1162018-02-21 17:38:20 +00001924 assert(sgrproj_info->xqd[0] == 0);
1925 aom_write_primitive_refsubexpfin(
1926 wb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
1927 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
1928 sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1);
Imdad Sardharwalla7d3bd8d2018-02-22 15:47:33 +00001929 } else if (params->r1 == 0) {
Imdad Sardharwallafdeb1162018-02-21 17:38:20 +00001930 aom_write_primitive_refsubexpfin(
1931 wb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
1932 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
1933 sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0);
Imdad Sardharwallafdeb1162018-02-21 17:38:20 +00001934 } else {
1935 aom_write_primitive_refsubexpfin(
1936 wb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1, SGRPROJ_PRJ_SUBEXP_K,
1937 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
1938 sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0);
1939 aom_write_primitive_refsubexpfin(
1940 wb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1, SGRPROJ_PRJ_SUBEXP_K,
1941 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
1942 sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1);
1943 }
1944
1945 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
1946}
1947#else // CONFIG_SKIP_SGR
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001948static void write_sgrproj_filter(const SgrprojInfo *sgrproj_info,
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001949 SgrprojInfo *ref_sgrproj_info,
1950 aom_writer *wb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001951 aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07001952 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1,
1953 SGRPROJ_PRJ_SUBEXP_K,
1954 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
1955 sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0);
1956 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1,
1957 SGRPROJ_PRJ_SUBEXP_K,
1958 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
1959 sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1);
1960 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001961}
Imdad Sardharwallafdeb1162018-02-21 17:38:20 +00001962#endif // CONFIG_SKIP_SGR
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001963
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001964static void loop_restoration_write_sb_coeffs(const AV1_COMMON *const cm,
1965 MACROBLOCKD *xd,
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001966 const RestorationUnitInfo *rui,
Yue Chen44391512018-03-13 15:37:26 -07001967 aom_writer *const w, int plane,
1968 FRAME_COUNTS *counts) {
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001969 const RestorationInfo *rsi = cm->rst_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001970 RestorationType frame_rtype = rsi->frame_restoration_type;
1971 if (frame_rtype == RESTORE_NONE) return;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001972
Urvang Joshi5ec7b812018-02-28 14:37:06 -08001973 assert(!cm->all_lossless);
1974
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001975 const int wiener_win = (plane > 0) ? WIENER_WIN_CHROMA : WIENER_WIN;
1976 WienerInfo *wiener_info = xd->wiener_info + plane;
1977 SgrprojInfo *sgrproj_info = xd->sgrproj_info + plane;
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001978 RestorationType unit_rtype = rui->restoration_type;
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01001979
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001980 if (frame_rtype == RESTORE_SWITCHABLE) {
1981 aom_write_symbol(w, unit_rtype, xd->tile_ctx->switchable_restore_cdf,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001982 RESTORE_SWITCHABLE_TYPES);
Yue Chen44391512018-03-13 15:37:26 -07001983 ++counts->switchable_restore[unit_rtype];
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001984 switch (unit_rtype) {
1985 case RESTORE_WIENER:
1986 write_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, w);
1987 break;
1988 case RESTORE_SGRPROJ:
1989 write_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, w);
1990 break;
1991 default: assert(unit_rtype == RESTORE_NONE); break;
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001992 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001993 } else if (frame_rtype == RESTORE_WIENER) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001994 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07001995 xd->tile_ctx->wiener_restore_cdf, 2);
Yue Chen44391512018-03-13 15:37:26 -07001996 ++counts->wiener_restore[unit_rtype != RESTORE_NONE];
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01001997 if (unit_rtype != RESTORE_NONE) {
1998 write_wiener_filter(wiener_win, &rui->wiener_info, wiener_info, w);
Rupert Swarbrick6c545212017-09-01 17:17:25 +01001999 }
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002000 } else if (frame_rtype == RESTORE_SGRPROJ) {
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002001 aom_write_symbol(w, unit_rtype != RESTORE_NONE,
Debargha Mukherjeebc732ef2017-10-12 12:40:25 -07002002 xd->tile_ctx->sgrproj_restore_cdf, 2);
Yue Chen44391512018-03-13 15:37:26 -07002003 ++counts->sgrproj_restore[unit_rtype != RESTORE_NONE];
Rupert Swarbrickdd6f09a2017-10-19 16:10:23 +01002004 if (unit_rtype != RESTORE_NONE) {
2005 write_sgrproj_filter(&rui->sgrproj_info, sgrproj_info, w);
Rupert Swarbrick09b5b162017-08-31 16:32:29 +01002006 }
2007 }
2008}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002009
Yaowu Xuf883b422016-08-30 14:01:10 -07002010static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Urvang Joshi65c9efd2018-03-01 12:02:12 -08002011 assert(!cm->all_lossless);
Hui Su27df8342017-11-07 15:16:05 -08002012 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
Hui Su293f2812018-02-26 14:41:18 -08002013 const int num_planes = av1_num_planes(cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002014 int i;
2015 struct loopfilter *lf = &cm->lf;
2016
Debargha Mukherjee2382b142018-02-26 14:31:32 -08002017 // Encode the loop filter level and type
Cheng Chen76224b02017-12-15 12:21:01 -08002018 aom_wb_write_literal(wb, lf->filter_level[0], 6);
2019 aom_wb_write_literal(wb, lf->filter_level[1], 6);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002020 if (num_planes > 1) {
Cheng Chen76224b02017-12-15 12:21:01 -08002021 if (lf->filter_level[0] || lf->filter_level[1]) {
2022 aom_wb_write_literal(wb, lf->filter_level_u, 6);
2023 aom_wb_write_literal(wb, lf->filter_level_v, 6);
Cheng Chen765e34e2017-12-11 11:43:35 -08002024 }
Cheng Chene94df5c2017-07-19 17:25:33 -07002025 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002026 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002027
2028 // Write out loop filter deltas applied at the MB level based on mode or
2029 // ref frame (if they are enabled).
Yaowu Xuf883b422016-08-30 14:01:10 -07002030 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002031
2032 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002033 aom_wb_write_bit(wb, lf->mode_ref_delta_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002034 if (lf->mode_ref_delta_update) {
2035 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
2036 const int delta = lf->ref_deltas[i];
David Barkerd776f282018-03-19 11:37:36 +00002037 const int changed = delta != lf->last_ref_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002038 aom_wb_write_bit(wb, changed);
David Barkerd776f282018-03-19 11:37:36 +00002039 if (changed) {
2040 lf->last_ref_deltas[i] = delta;
2041 aom_wb_write_inv_signed_literal(wb, delta, 6);
2042 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002043 }
2044
2045 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
2046 const int delta = lf->mode_deltas[i];
David Barkerd776f282018-03-19 11:37:36 +00002047 const int changed = delta != lf->last_mode_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002048 aom_wb_write_bit(wb, changed);
David Barkerd776f282018-03-19 11:37:36 +00002049 if (changed) {
2050 lf->last_mode_deltas[i] = delta;
2051 aom_wb_write_inv_signed_literal(wb, delta, 6);
2052 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002053 }
2054 }
2055 }
2056}
2057
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002058static void encode_cdef(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Hui Su27df8342017-11-07 15:16:05 -08002059 if (cm->allow_intrabc && NO_FILTER_FOR_IBC) return;
Hui Su293f2812018-02-26 14:41:18 -08002060 const int num_planes = av1_num_planes(cm);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002061 int i;
Steinar Midtskogen59782122017-07-20 08:49:43 +02002062 aom_wb_write_literal(wb, cm->cdef_pri_damping - 3, 2);
2063 assert(cm->cdef_pri_damping == cm->cdef_sec_damping);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002064 aom_wb_write_literal(wb, cm->cdef_bits, 2);
2065 for (i = 0; i < cm->nb_cdef_strengths; i++) {
2066 aom_wb_write_literal(wb, cm->cdef_strengths[i], CDEF_STRENGTH_BITS);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002067 if (num_planes > 1)
Steinar Midtskogen1c1161f2017-09-08 15:03:51 +02002068 aom_wb_write_literal(wb, cm->cdef_uv_strengths[i], CDEF_STRENGTH_BITS);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04002069 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002070}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002071
Yaowu Xuf883b422016-08-30 14:01:10 -07002072static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002073 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002074 aom_wb_write_bit(wb, 1);
2075 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002076 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002077 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002078 }
2079}
2080
Yaowu Xuf883b422016-08-30 14:01:10 -07002081static void encode_quantization(const AV1_COMMON *const cm,
2082 struct aom_write_bit_buffer *wb) {
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002083 const int num_planes = av1_num_planes(cm);
2084
Yaowu Xuf883b422016-08-30 14:01:10 -07002085 aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002086 write_delta_q(wb, cm->y_dc_delta_q);
Imdad Sardharwallaaf8e2642018-01-19 11:46:34 +00002087 if (num_planes > 1) {
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002088 int diff_uv_delta = (cm->u_dc_delta_q != cm->v_dc_delta_q) ||
2089 (cm->u_ac_delta_q != cm->v_ac_delta_q);
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002090 if (cm->separate_uv_delta_q) aom_wb_write_bit(wb, diff_uv_delta);
Debargha Mukherjee18f4fb22017-12-14 14:26:27 -08002091 write_delta_q(wb, cm->u_dc_delta_q);
2092 write_delta_q(wb, cm->u_ac_delta_q);
2093 if (diff_uv_delta) {
2094 write_delta_q(wb, cm->v_dc_delta_q);
2095 write_delta_q(wb, cm->v_ac_delta_q);
2096 }
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002097 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002098 aom_wb_write_bit(wb, cm->using_qmatrix);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002099 if (cm->using_qmatrix) {
Yaowu Xuf7a12422018-01-31 15:29:20 -08002100#if CONFIG_AOM_QM_EXT
2101 aom_wb_write_literal(wb, cm->qm_y, QM_LEVEL_BITS);
2102 aom_wb_write_literal(wb, cm->qm_u, QM_LEVEL_BITS);
Yaowu Xuf7a12422018-01-31 15:29:20 -08002103 if (!cm->separate_uv_delta_q)
2104 assert(cm->qm_u == cm->qm_v);
2105 else
Yaowu Xuf7a12422018-01-31 15:29:20 -08002106 aom_wb_write_literal(wb, cm->qm_v, QM_LEVEL_BITS);
2107#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002108 aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS);
2109 aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS);
Yaowu Xuf7a12422018-01-31 15:29:20 -08002110#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002111 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002112}
2113
Yaowu Xuf883b422016-08-30 14:01:10 -07002114static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
2115 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002116 int i, j;
Rostislav Pehlivanovdd1a80c2018-03-05 21:26:45 +00002117 struct segmentation *seg = &cm->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002118
Yaowu Xuf883b422016-08-30 14:01:10 -07002119 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002120 if (!seg->enabled) return;
2121
David Barker190b77a2018-03-16 14:29:46 +00002122 // Write update flags
2123 if (frame_is_intra_only(cm) || cm->error_resilient_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002124 assert(seg->update_map == 1);
David Barker190b77a2018-03-16 14:29:46 +00002125 seg->temporal_update = 0;
2126 assert(seg->update_data == 1);
2127 } else {
2128 aom_wb_write_bit(wb, seg->update_map);
2129 if (seg->update_map) {
2130 // Select the coding strategy (temporal or spatial)
2131 av1_choose_segmap_coding_method(cm, xd);
Yaowu Xuf883b422016-08-30 14:01:10 -07002132 aom_wb_write_bit(wb, seg->temporal_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002133 }
David Barker190b77a2018-03-16 14:29:46 +00002134 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002135 }
2136
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00002137#if !CONFIG_SEGMENT_PRED_LAST && CONFIG_SPATIAL_SEGMENTATION
Rostislav Pehlivanovdd1a80c2018-03-05 21:26:45 +00002138 seg->preskip_segid = 0;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00002139#endif
2140
Yaowu Xuc27fc142016-08-22 16:08:15 -07002141 // Segmentation data
Yaowu Xuc27fc142016-08-22 16:08:15 -07002142 if (seg->update_data) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002143 for (i = 0; i < MAX_SEGMENTS; i++) {
2144 for (j = 0; j < SEG_LVL_MAX; j++) {
2145 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002146 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002147 if (active) {
Rostislav Pehlivanov3a964622018-03-14 18:00:32 +00002148#if !CONFIG_SEGMENT_PRED_LAST && CONFIG_SPATIAL_SEGMENTATION
Rostislav Pehlivanovdd1a80c2018-03-05 21:26:45 +00002149 seg->preskip_segid |= j >= SEG_LVL_REF_FRAME;
2150 seg->last_active_segid = i;
Rostislav Pehlivanov938710c2017-11-28 02:26:21 +00002151#endif
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002152 const int data_max = av1_seg_feature_data_max(j);
2153 const int data_min = -data_max;
2154 const int ubits = get_unsigned_bits(data_max);
2155 const int data = clamp(get_segdata(seg, i, j), data_min, data_max);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002156
Yaowu Xuf883b422016-08-30 14:01:10 -07002157 if (av1_is_segfeature_signed(j)) {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002158 aom_wb_write_inv_signed_literal(wb, data, ubits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002159 } else {
Sebastien Alaiwanca14b472017-12-11 11:46:00 +01002160 aom_wb_write_literal(wb, data, ubits);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002161 }
2162 }
2163 }
2164 }
2165 }
2166}
2167
Thomas Daedef636d5c2017-06-29 13:48:27 -07002168static void write_tx_mode(AV1_COMMON *cm, TX_MODE *mode,
Yue Cheneeacc4c2017-01-17 17:29:17 -08002169 struct aom_write_bit_buffer *wb) {
Thomas Daedef636d5c2017-06-29 13:48:27 -07002170 if (cm->all_lossless) {
Yue Cheneeacc4c2017-01-17 17:29:17 -08002171 *mode = ONLY_4X4;
2172 return;
2173 }
Debargha Mukherjee923b73d2017-10-31 18:11:34 -07002174 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002175}
2176
Angie Chiang5678ad92016-11-21 09:38:40 -08002177static void write_frame_interp_filter(InterpFilter filter,
2178 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002179 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002180 if (filter != SWITCHABLE)
Angie Chiang6305abe2016-10-24 12:24:44 -07002181 aom_wb_write_literal(wb, filter, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002182}
2183
Yaowu Xuf883b422016-08-30 14:01:10 -07002184static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002185 if (cm->interp_filter == SWITCHABLE) {
2186 // Check to see if only one of the filters is actually used
2187 int count[SWITCHABLE_FILTERS];
2188 int i, j, c = 0;
2189 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2190 count[i] = 0;
2191 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
2192 count[i] += counts->switchable_interp[j][i];
2193 c += (count[i] > 0);
2194 }
2195 if (c == 1) {
2196 // Only one filter is used. So set the filter at frame level
2197 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2198 if (count[i]) {
Sarah Parker4c10a3c2017-04-10 19:37:59 -07002199 if (i == EIGHTTAP_REGULAR || WARP_WM_NEIGHBORS_WITH_OBMC)
Debargha Mukherjee604d8462017-04-06 15:27:00 -07002200 cm->interp_filter = i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002201 break;
2202 }
2203 }
2204 }
2205 }
2206}
2207
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002208#if CONFIG_MAX_TILE
2209
2210// Same function as write_uniform but writing to uncompresses header wb
2211static void wb_write_uniform(struct aom_write_bit_buffer *wb, int n, int v) {
2212 const int l = get_unsigned_bits(n);
2213 const int m = (1 << l) - n;
2214 if (l == 0) return;
2215 if (v < m) {
2216 aom_wb_write_literal(wb, v, l - 1);
2217 } else {
2218 aom_wb_write_literal(wb, m + ((v - m) >> 1), l - 1);
2219 aom_wb_write_literal(wb, (v - m) & 1, 1);
2220 }
2221}
2222
2223static void write_tile_info_max_tile(const AV1_COMMON *const cm,
2224 struct aom_write_bit_buffer *wb) {
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002225 int width_mi = ALIGN_POWER_OF_TWO(cm->mi_cols, cm->seq_params.mib_size_log2);
2226 int height_mi = ALIGN_POWER_OF_TWO(cm->mi_rows, cm->seq_params.mib_size_log2);
2227 int width_sb = width_mi >> cm->seq_params.mib_size_log2;
2228 int height_sb = height_mi >> cm->seq_params.mib_size_log2;
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002229 int size_sb, i;
2230
2231 aom_wb_write_bit(wb, cm->uniform_tile_spacing_flag);
2232
2233 if (cm->uniform_tile_spacing_flag) {
2234 // Uniform spaced tiles with power-of-two number of rows and columns
2235 // tile columns
2236 int ones = cm->log2_tile_cols - cm->min_log2_tile_cols;
2237 while (ones--) {
2238 aom_wb_write_bit(wb, 1);
2239 }
2240 if (cm->log2_tile_cols < cm->max_log2_tile_cols) {
2241 aom_wb_write_bit(wb, 0);
2242 }
2243
2244 // rows
2245 ones = cm->log2_tile_rows - cm->min_log2_tile_rows;
2246 while (ones--) {
2247 aom_wb_write_bit(wb, 1);
2248 }
2249 if (cm->log2_tile_rows < cm->max_log2_tile_rows) {
2250 aom_wb_write_bit(wb, 0);
2251 }
2252 } else {
2253 // Explicit tiles with configurable tile widths and heights
2254 // columns
2255 for (i = 0; i < cm->tile_cols; i++) {
2256 size_sb = cm->tile_col_start_sb[i + 1] - cm->tile_col_start_sb[i];
David Barker6cd5a822018-03-05 16:19:28 +00002257 wb_write_uniform(wb, AOMMIN(width_sb, cm->max_tile_width_sb),
2258 size_sb - 1);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002259 width_sb -= size_sb;
2260 }
2261 assert(width_sb == 0);
2262
2263 // rows
2264 for (i = 0; i < cm->tile_rows; i++) {
2265 size_sb = cm->tile_row_start_sb[i + 1] - cm->tile_row_start_sb[i];
2266 wb_write_uniform(wb, AOMMIN(height_sb, cm->max_tile_height_sb),
2267 size_sb - 1);
2268 height_sb -= size_sb;
2269 }
2270 assert(height_sb == 0);
2271 }
2272}
2273#endif
2274
Yaowu Xuf883b422016-08-30 14:01:10 -07002275static void write_tile_info(const AV1_COMMON *const cm,
James Zern8b007ff2018-02-26 22:49:13 -08002276 struct aom_write_bit_buffer *saved_wb,
Yaowu Xuf883b422016-08-30 14:01:10 -07002277 struct aom_write_bit_buffer *wb) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002278 if (cm->large_scale_tile) {
2279 const int tile_width =
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002280 ALIGN_POWER_OF_TWO(cm->tile_width, cm->seq_params.mib_size_log2) >>
2281 cm->seq_params.mib_size_log2;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002282 const int tile_height =
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002283 ALIGN_POWER_OF_TWO(cm->tile_height, cm->seq_params.mib_size_log2) >>
2284 cm->seq_params.mib_size_log2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002285
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002286 assert(tile_width > 0);
2287 assert(tile_height > 0);
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08002288
Debargha Mukherjee2ccf4b92018-02-27 17:30:46 -08002289 // Write the tile sizes
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002290 if (cm->seq_params.sb_size == BLOCK_128X128) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002291 assert(tile_width <= 32);
2292 assert(tile_height <= 32);
2293 aom_wb_write_literal(wb, tile_width - 1, 5);
2294 aom_wb_write_literal(wb, tile_height - 1, 5);
2295 } else {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002296 assert(tile_width <= 64);
2297 assert(tile_height <= 64);
2298 aom_wb_write_literal(wb, tile_width - 1, 6);
2299 aom_wb_write_literal(wb, tile_height - 1, 6);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002300 }
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002301 } else {
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002302#if CONFIG_MAX_TILE
2303 write_tile_info_max_tile(cm, wb);
2304#else
Yaowu Xudd6ef6c2018-03-02 16:43:52 -08002305 int min_log2_tile_cols, max_log2_tile_cols, ones;
2306 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002307
Yaowu Xudd6ef6c2018-03-02 16:43:52 -08002308 // columns
2309 ones = cm->log2_tile_cols - min_log2_tile_cols;
2310 while (ones--) aom_wb_write_bit(wb, 1);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07002311
Yaowu Xudd6ef6c2018-03-02 16:43:52 -08002312 if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002313
Yaowu Xudd6ef6c2018-03-02 16:43:52 -08002314 // rows
2315 aom_wb_write_bit(wb, cm->log2_tile_rows != 0);
2316 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1);
Dominic Symesdb5d66f2017-08-18 18:11:34 +02002317#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002318 }
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002319
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002320#if CONFIG_LOOPFILTERING_ACROSS_TILES
Lei7bb501d2017-12-13 15:10:34 -08002321#if CONFIG_LOOPFILTERING_ACROSS_TILES_EXT
2322 if (cm->tile_cols > 1) {
2323 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_v_enabled);
2324 }
2325 if (cm->tile_rows > 1) {
2326 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_h_enabled);
2327 }
2328#else
Yunqing Wang42015d12017-10-17 15:43:49 -07002329 if (cm->tile_cols * cm->tile_rows > 1)
2330 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_enabled);
Lei7bb501d2017-12-13 15:10:34 -08002331#endif // CONFIG_LOOPFILTERING_ACROSS_TILES_EXT
Ryan Lei9b02b0e2017-01-30 15:52:20 -08002332#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Cyril Concolato3b5afc12017-12-15 12:54:15 -08002333
James Zern8b007ff2018-02-26 22:49:13 -08002334 *saved_wb = *wb;
2335 if (cm->large_scale_tile) {
2336 if (cm->tile_rows * cm->tile_cols > 1) {
2337 // Note that the last item in the uncompressed header is the data
2338 // describing tile configuration.
2339 // Number of bytes in tile column size - 1
2340 aom_wb_write_literal(wb, 0, 2);
2341 // Number of bytes in tile size - 1
2342 aom_wb_write_literal(wb, 0, 2);
2343 }
2344 return;
2345 }
James Zern9e9f7ad2018-03-02 17:38:53 -08002346 if (cm->tile_rows * cm->tile_cols > 1) {
2347 // Number of bytes in tile size - 1
2348 aom_wb_write_literal(wb, 3, 2);
2349 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002350}
2351
Zoe Liu8dd1c982017-09-11 10:14:35 -07002352#if USE_GF16_MULTI_LAYER
2353static int get_refresh_mask_gf16(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002354 int refresh_mask = 0;
2355
Zoe Liu8dd1c982017-09-11 10:14:35 -07002356 if (cpi->refresh_last_frame || cpi->refresh_golden_frame ||
2357 cpi->refresh_bwd_ref_frame || cpi->refresh_alt2_ref_frame ||
2358 cpi->refresh_alt_ref_frame) {
2359 assert(cpi->refresh_fb_idx >= 0 && cpi->refresh_fb_idx < REF_FRAMES);
2360 refresh_mask |= (1 << cpi->refresh_fb_idx);
2361 }
2362
2363 return refresh_mask;
2364}
2365#endif // USE_GF16_MULTI_LAYER
Zoe Liu8dd1c982017-09-11 10:14:35 -07002366
2367static int get_refresh_mask(AV1_COMP *cpi) {
Yi Luo2e6a9ab2017-09-15 08:13:59 -07002368 int refresh_mask = 0;
Zoe Liu8dd1c982017-09-11 10:14:35 -07002369#if USE_GF16_MULTI_LAYER
2370 if (cpi->rc.baseline_gf_interval == 16) return get_refresh_mask_gf16(cpi);
2371#endif // USE_GF16_MULTI_LAYER
2372
Yaowu Xuc27fc142016-08-22 16:08:15 -07002373 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
2374 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
2375 // the 3 LAST reference frames will be updated accordingly, i.e.:
2376 // (1) The original virtual index for LAST3_FRAME will become the new virtual
2377 // index for LAST_FRAME; and
2378 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
2379 // shifted and become the new virtual indexes for LAST2_FRAME and
2380 // LAST3_FRAME.
2381 refresh_mask |=
2382 (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]);
Zoe Liue9b15e22017-07-19 15:53:01 -07002383
Zoe Liue9b15e22017-07-19 15:53:01 -07002384 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
2385 refresh_mask |= (cpi->refresh_alt2_ref_frame << cpi->alt2_fb_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002386
Yaowu Xuf883b422016-08-30 14:01:10 -07002387 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002388 // We have decided to preserve the previously existing golden frame as our
2389 // new ARF frame. However, in the short term we leave it in the GF slot and,
2390 // if we're updating the GF with the current decoded frame, we save it
2391 // instead to the ARF slot.
Yaowu Xuf883b422016-08-30 14:01:10 -07002392 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
Yaowu Xuc27fc142016-08-22 16:08:15 -07002393 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
2394 // there so that it can be done outside of the recode loop.
2395 // Note: This is highly specific to the use of ARF as a forward reference,
2396 // and this needs to be generalized as other uses are implemented
2397 // (like RTC/temporal scalability).
2398 return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx);
2399 } else {
Zoe Liue9b15e22017-07-19 15:53:01 -07002400 const int arf_idx = cpi->alt_fb_idx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002401 return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) |
2402 (cpi->refresh_alt_ref_frame << arf_idx);
2403 }
2404}
2405
Yaowu Xuc27fc142016-08-22 16:08:15 -07002406static INLINE int find_identical_tile(
2407 const int tile_row, const int tile_col,
2408 TileBufferEnc (*const tile_buffers)[1024]) {
2409 const MV32 candidate_offset[1] = { { 1, 0 } };
2410 const uint8_t *const cur_tile_data =
2411 tile_buffers[tile_row][tile_col].data + 4;
Jingning Han99ffce62017-04-25 15:48:41 -07002412 const size_t cur_tile_size = tile_buffers[tile_row][tile_col].size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002413
2414 int i;
2415
2416 if (tile_row == 0) return 0;
2417
2418 // (TODO: yunqingwang) For now, only above tile is checked and used.
2419 // More candidates such as left tile can be added later.
2420 for (i = 0; i < 1; i++) {
2421 int row_offset = candidate_offset[0].row;
2422 int col_offset = candidate_offset[0].col;
2423 int row = tile_row - row_offset;
2424 int col = tile_col - col_offset;
2425 uint8_t tile_hdr;
2426 const uint8_t *tile_data;
2427 TileBufferEnc *candidate;
2428
2429 if (row < 0 || col < 0) continue;
2430
2431 tile_hdr = *(tile_buffers[row][col].data);
2432
2433 // Read out tcm bit
2434 if ((tile_hdr >> 7) == 1) {
2435 // The candidate is a copy tile itself
2436 row_offset += tile_hdr & 0x7f;
2437 row = tile_row - row_offset;
2438 }
2439
2440 candidate = &tile_buffers[row][col];
2441
2442 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
2443
2444 tile_data = candidate->data + 4;
2445
2446 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
2447
2448 // Identical tile found
2449 assert(row_offset > 0);
2450 return row_offset;
2451 }
2452
2453 // No identical tile found
2454 return 0;
2455}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002456
Yaowu Xuf883b422016-08-30 14:01:10 -07002457static void write_render_size(const AV1_COMMON *cm,
2458 struct aom_write_bit_buffer *wb) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002459 const int scaling_active = !av1_resize_unscaled(cm);
Yaowu Xuf883b422016-08-30 14:01:10 -07002460 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002461 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002462 aom_wb_write_literal(wb, cm->render_width - 1, 16);
2463 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002464 }
2465}
2466
Fergus Simpsone7508412017-03-14 18:14:09 -07002467static void write_superres_scale(const AV1_COMMON *const cm,
2468 struct aom_write_bit_buffer *wb) {
Fergus Simpsone7508412017-03-14 18:14:09 -07002469 // First bit is whether to to scale or not
Urvang Joshide71d142017-10-05 12:12:15 -07002470 if (cm->superres_scale_denominator == SCALE_NUMERATOR) {
Fergus Simpsone7508412017-03-14 18:14:09 -07002471 aom_wb_write_bit(wb, 0); // no scaling
2472 } else {
2473 aom_wb_write_bit(wb, 1); // scaling, write scale factor
Urvang Joshi83010182017-10-27 12:36:02 -07002474 assert(cm->superres_scale_denominator >= SUPERRES_SCALE_DENOMINATOR_MIN);
2475 assert(cm->superres_scale_denominator <
2476 SUPERRES_SCALE_DENOMINATOR_MIN + (1 << SUPERRES_SCALE_BITS));
Fergus Simpsone7508412017-03-14 18:14:09 -07002477 aom_wb_write_literal(
Urvang Joshide71d142017-10-05 12:12:15 -07002478 wb, cm->superres_scale_denominator - SUPERRES_SCALE_DENOMINATOR_MIN,
Fergus Simpsone7508412017-03-14 18:14:09 -07002479 SUPERRES_SCALE_BITS);
2480 }
2481}
Fergus Simpsone7508412017-03-14 18:14:09 -07002482
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002483static void write_frame_size(const AV1_COMMON *cm, int frame_size_override,
Tom Finegan8ab2bba2018-02-28 07:36:28 -08002484 struct aom_write_bit_buffer *wb) {
David Barker22171312017-11-20 11:26:04 +00002485 const int coded_width = cm->superres_upscaled_width - 1;
2486 const int coded_height = cm->superres_upscaled_height - 1;
David Barker22171312017-11-20 11:26:04 +00002487
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002488 if (frame_size_override) {
2489 const SequenceHeader *seq_params = &cm->seq_params;
2490 int num_bits_width = seq_params->num_bits_width;
2491 int num_bits_height = seq_params->num_bits_height;
David Barker22171312017-11-20 11:26:04 +00002492 aom_wb_write_literal(wb, coded_width, num_bits_width);
2493 aom_wb_write_literal(wb, coded_height, num_bits_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002494 }
David Barker22171312017-11-20 11:26:04 +00002495
David Barker22171312017-11-20 11:26:04 +00002496 write_superres_scale(cm, wb);
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002497 write_render_size(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002498}
2499
Yaowu Xuf883b422016-08-30 14:01:10 -07002500static void write_frame_size_with_refs(AV1_COMP *cpi,
2501 struct aom_write_bit_buffer *wb) {
2502 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002503 int found = 0;
2504
2505 MV_REFERENCE_FRAME ref_frame;
2506 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
2507 YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
2508
2509 if (cfg != NULL) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002510 found = cm->superres_upscaled_width == cfg->y_crop_width &&
2511 cm->superres_upscaled_height == cfg->y_crop_height;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002512 found &= cm->render_width == cfg->render_width &&
2513 cm->render_height == cfg->render_height;
2514 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002515 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002516 if (found) {
Fergus Simpsond2bcbb52017-05-22 23:15:05 -07002517 write_superres_scale(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002518 break;
2519 }
2520 }
2521
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002522 if (!found) {
Tom Finegan8ab2bba2018-02-28 07:36:28 -08002523 int frame_size_override = 1; // Always equal to 1 in this function
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002524 write_frame_size(cm, frame_size_override, wb);
2525 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002526}
2527
Yaowu Xuc27fc142016-08-22 16:08:15 -07002528static void write_profile(BITSTREAM_PROFILE profile,
Yaowu Xuf883b422016-08-30 14:01:10 -07002529 struct aom_write_bit_buffer *wb) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002530 assert(profile >= PROFILE_0 && profile < MAX_PROFILES);
2531 aom_wb_write_literal(wb, profile, 2);
2532}
2533
2534static void write_bitdepth(AV1_COMMON *const cm,
2535 struct aom_write_bit_buffer *wb) {
2536 // Profile 0/1: [0] for 8 bit, [1] 10-bit
2537 // Profile 2: [0] for 8 bit, [10] 10-bit, [11] - 12-bit
2538 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_8 ? 0 : 1);
2539 if (cm->profile == PROFILE_2 && cm->bit_depth != AOM_BITS_8) {
2540 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002541 }
2542}
2543
2544static void write_bitdepth_colorspace_sampling(
Yaowu Xuf883b422016-08-30 14:01:10 -07002545 AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002546 write_bitdepth(cm, wb);
Debargha Mukherjeef340fec2018-01-10 18:12:22 -08002547 const int is_monochrome = cm->seq_params.monochrome;
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002548 // monochrome bit
2549 if (cm->profile != PROFILE_1)
2550 aom_wb_write_bit(wb, is_monochrome);
2551 else
2552 assert(!is_monochrome);
Andrey Norkin9e694632017-12-21 18:50:57 -08002553 if (cm->color_primaries == AOM_CICP_CP_UNSPECIFIED &&
2554 cm->transfer_characteristics == AOM_CICP_TC_UNSPECIFIED &&
2555 cm->matrix_coefficients == AOM_CICP_MC_UNSPECIFIED) {
2556 aom_wb_write_bit(wb, 0); // No color description present
2557 } else {
2558 aom_wb_write_bit(wb, 1); // Color description present
2559 aom_wb_write_literal(wb, cm->color_primaries, 8);
2560 aom_wb_write_literal(wb, cm->transfer_characteristics, 8);
2561 aom_wb_write_literal(wb, cm->matrix_coefficients, 8);
2562 }
Debargha Mukherjeee5267692018-01-16 09:41:15 -08002563 if (is_monochrome) return;
Andrey Norkin9e694632017-12-21 18:50:57 -08002564 if (cm->color_primaries == AOM_CICP_CP_BT_709 &&
2565 cm->transfer_characteristics == AOM_CICP_TC_SRGB &&
2566 cm->matrix_coefficients ==
2567 AOM_CICP_MC_IDENTITY) { // it would be better to remove this
2568 // dependency too
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002569 assert(cm->subsampling_x == 0 && cm->subsampling_y == 0);
2570 assert(cm->profile == PROFILE_1 ||
2571 (cm->profile == PROFILE_2 && cm->bit_depth == AOM_BITS_12));
Imdad Sardharwalla317002f2017-12-05 16:24:56 +00002572 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002573 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Yaowu Xuf883b422016-08-30 14:01:10 -07002574 aom_wb_write_bit(wb, cm->color_range);
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002575 if (cm->profile == PROFILE_0) {
2576 // 420 only
Yaowu Xuc27fc142016-08-22 16:08:15 -07002577 assert(cm->subsampling_x == 1 && cm->subsampling_y == 1);
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002578 } else if (cm->profile == PROFILE_1) {
2579 // 444 only
2580 assert(cm->subsampling_x == 0 && cm->subsampling_y == 0);
2581 } else if (cm->profile == PROFILE_2) {
2582 if (cm->bit_depth == AOM_BITS_12) {
2583 // 420, 444 or 422
2584 aom_wb_write_bit(wb, cm->subsampling_x);
David Barker0c3545b2018-01-16 17:32:23 +00002585 if (cm->subsampling_x == 0) {
2586 assert(cm->subsampling_y == 0 &&
2587 "4:4:0 subsampling not allowed in AV1");
2588 } else {
2589 aom_wb_write_bit(wb, cm->subsampling_y);
2590 }
Debargha Mukherjeef9a50ea2018-01-09 22:28:20 -08002591 } else {
2592 // 422 only
2593 assert(cm->subsampling_x == 1 && cm->subsampling_y == 0);
2594 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002595 }
anorkin76fb1262017-03-22 15:12:12 -07002596 if (cm->subsampling_x == 1 && cm->subsampling_y == 1) {
2597 aom_wb_write_literal(wb, cm->chroma_sample_position, 2);
2598 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002599 }
Yaowu Xu6fc47e52017-12-04 15:07:48 -08002600 aom_wb_write_bit(wb, cm->separate_uv_delta_q);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002601}
2602
Andrey Norkin28e9ce22018-01-08 10:11:21 -08002603static void write_timing_info_header(AV1_COMMON *const cm,
2604 struct aom_write_bit_buffer *wb) {
2605 aom_wb_write_bit(wb, cm->timing_info_present); // timing info present flag
2606
2607 if (cm->timing_info_present) {
2608 aom_wb_write_unsigned_literal(wb, cm->num_units_in_tick,
2609 32); // Number of units in tick
2610 aom_wb_write_unsigned_literal(wb, cm->time_scale, 32); // Time scale
2611 aom_wb_write_bit(wb,
2612 cm->equal_picture_interval); // Equal picture interval bit
2613 if (cm->equal_picture_interval) {
2614 aom_wb_write_uvlc(wb,
2615 cm->num_ticks_per_picture - 1); // ticks per picture
2616 }
2617 }
2618}
Andrey Norkin28e9ce22018-01-08 10:11:21 -08002619
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002620#if CONFIG_FILM_GRAIN
Dominic Symesd4929012018-01-31 17:32:01 +01002621static void write_film_grain_params(AV1_COMP *cpi,
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002622 struct aom_write_bit_buffer *wb) {
Dominic Symesd4929012018-01-31 17:32:01 +01002623 AV1_COMMON *const cm = &cpi->common;
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002624 aom_film_grain_t *pars = &cm->film_grain_params;
2625
Dominic Symesd4929012018-01-31 17:32:01 +01002626 cm->cur_frame->film_grain_params = *pars;
2627
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002628 aom_wb_write_bit(wb, pars->apply_grain);
2629 if (!pars->apply_grain) return;
2630
2631 aom_wb_write_literal(wb, pars->random_seed, 16);
2632
2633 pars->random_seed += 3245; // For film grain test vectors purposes
2634 if (!pars->random_seed) // Random seed should not be zero
2635 pars->random_seed += 1735;
Andrey Norkin879488f2018-02-28 15:30:26 -08002636 if (cm->frame_type == INTER_FRAME)
2637 aom_wb_write_bit(wb, pars->update_parameters);
2638 else
2639 pars->update_parameters = 1;
Dominic Symesd4929012018-01-31 17:32:01 +01002640#if CONFIG_FILM_GRAIN_SHOWEX
2641 if (!pars->update_parameters) {
2642 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
2643 int ref_frame, ref_idx, buf_idx;
2644 for (ref_frame = LAST_FRAME; ref_frame < TOTAL_REFS_PER_FRAME;
2645 ref_frame++) {
2646 ref_idx = get_ref_frame_map_idx(cpi, ref_frame);
Dominic Symes4d375682018-02-28 17:26:04 +01002647 assert(ref_idx != INVALID_IDX);
Dominic Symesd4929012018-01-31 17:32:01 +01002648 buf_idx = cm->ref_frame_map[ref_idx];
2649 if (frame_bufs[buf_idx].film_grain_params_present &&
2650 memcmp(pars, &frame_bufs[buf_idx].film_grain_params, sizeof(*pars))) {
2651 break;
2652 }
2653 }
2654 assert(ref_frame < TOTAL_REFS_PER_FRAME);
2655 aom_wb_write_literal(wb, ref_idx, 3);
2656 return;
2657 }
2658#else
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002659 if (!pars->update_parameters) return;
Dominic Symesd4929012018-01-31 17:32:01 +01002660#endif
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002661
2662 // Scaling functions parameters
2663
2664 aom_wb_write_literal(wb, pars->num_y_points, 4); // max 14
2665 for (int i = 0; i < pars->num_y_points; i++) {
2666 aom_wb_write_literal(wb, pars->scaling_points_y[i][0], 8);
2667 aom_wb_write_literal(wb, pars->scaling_points_y[i][1], 8);
2668 }
2669
Andrey Norkin20be5452018-02-20 17:46:13 -08002670 if (!cm->seq_params.monochrome)
2671 aom_wb_write_bit(wb, pars->chroma_scaling_from_luma);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002672
Andrey Norkin20be5452018-02-20 17:46:13 -08002673 if (cm->seq_params.monochrome || pars->chroma_scaling_from_luma) {
2674 pars->num_cb_points = 0;
2675 pars->num_cr_points = 0;
Andrey Norkin0c294fa2018-02-16 18:32:12 -08002676 } else {
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002677 aom_wb_write_literal(wb, pars->num_cb_points, 4); // max 10
2678 for (int i = 0; i < pars->num_cb_points; i++) {
2679 aom_wb_write_literal(wb, pars->scaling_points_cb[i][0], 8);
2680 aom_wb_write_literal(wb, pars->scaling_points_cb[i][1], 8);
2681 }
2682
2683 aom_wb_write_literal(wb, pars->num_cr_points, 4); // max 10
2684 for (int i = 0; i < pars->num_cr_points; i++) {
2685 aom_wb_write_literal(wb, pars->scaling_points_cr[i][0], 8);
2686 aom_wb_write_literal(wb, pars->scaling_points_cr[i][1], 8);
2687 }
2688 }
2689
2690 aom_wb_write_literal(wb, pars->scaling_shift - 8, 2); // 8 + value
2691
2692 // AR coefficients
2693 // Only sent if the corresponsing scaling function has
2694 // more than 0 points
2695
2696 aom_wb_write_literal(wb, pars->ar_coeff_lag, 2);
2697
2698 int num_pos_luma = 2 * pars->ar_coeff_lag * (pars->ar_coeff_lag + 1);
Andrey Norkin20be5452018-02-20 17:46:13 -08002699 int num_pos_chroma = num_pos_luma;
2700 if (pars->num_y_points > 0) ++num_pos_chroma;
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002701
2702 if (pars->num_y_points)
2703 for (int i = 0; i < num_pos_luma; i++)
2704 aom_wb_write_literal(wb, pars->ar_coeffs_y[i] + 128, 8);
2705
2706 if (pars->num_cb_points || pars->chroma_scaling_from_luma)
2707 for (int i = 0; i < num_pos_chroma; i++)
2708 aom_wb_write_literal(wb, pars->ar_coeffs_cb[i] + 128, 8);
2709
2710 if (pars->num_cr_points || pars->chroma_scaling_from_luma)
2711 for (int i = 0; i < num_pos_chroma; i++)
2712 aom_wb_write_literal(wb, pars->ar_coeffs_cr[i] + 128, 8);
2713
2714 aom_wb_write_literal(wb, pars->ar_coeff_shift - 6, 2); // 8 + value
2715
Andrey Norkina840cde2018-02-16 15:39:50 -08002716 aom_wb_write_literal(wb, pars->grain_scale_shift, 2);
2717
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002718 if (pars->num_cb_points) {
2719 aom_wb_write_literal(wb, pars->cb_mult, 8);
2720 aom_wb_write_literal(wb, pars->cb_luma_mult, 8);
2721 aom_wb_write_literal(wb, pars->cb_offset, 9);
2722 }
2723
2724 if (pars->num_cr_points) {
2725 aom_wb_write_literal(wb, pars->cr_mult, 8);
2726 aom_wb_write_literal(wb, pars->cr_luma_mult, 8);
2727 aom_wb_write_literal(wb, pars->cr_offset, 9);
2728 }
2729
2730 aom_wb_write_bit(wb, pars->overlap_flag);
2731
2732 aom_wb_write_bit(wb, pars->clip_to_restricted_range);
2733}
2734#endif // CONFIG_FILM_GRAIN
2735
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002736static void write_sb_size(SequenceHeader *seq_params,
2737 struct aom_write_bit_buffer *wb) {
2738 (void)seq_params;
2739 (void)wb;
2740 assert(seq_params->mib_size == mi_size_wide[seq_params->sb_size]);
2741 assert(seq_params->mib_size == 1 << seq_params->mib_size_log2);
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002742 assert(seq_params->sb_size == BLOCK_128X128 ||
2743 seq_params->sb_size == BLOCK_64X64);
2744 aom_wb_write_bit(wb, seq_params->sb_size == BLOCK_128X128 ? 1 : 0);
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002745}
2746
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01002747void write_sequence_header(AV1_COMP *cpi, struct aom_write_bit_buffer *wb) {
2748 AV1_COMMON *const cm = &cpi->common;
David Barker5e70a112017-10-03 14:28:17 +01002749 SequenceHeader *seq_params = &cm->seq_params;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002750
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002751 int num_bits_width = 16;
2752 int num_bits_height = 16;
Imdad Sardharwalla102c8652018-02-23 16:35:13 +00002753 int max_frame_width = cpi->oxcf.forced_max_frame_width
2754 ? cpi->oxcf.forced_max_frame_width
2755 : cpi->oxcf.width;
2756 int max_frame_height = cpi->oxcf.forced_max_frame_height
2757 ? cpi->oxcf.forced_max_frame_height
2758 : cpi->oxcf.height;
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002759
2760 seq_params->num_bits_width = num_bits_width;
2761 seq_params->num_bits_height = num_bits_height;
2762 seq_params->max_frame_width = max_frame_width;
2763 seq_params->max_frame_height = max_frame_height;
2764
2765 aom_wb_write_literal(wb, num_bits_width - 1, 4);
2766 aom_wb_write_literal(wb, num_bits_height - 1, 4);
2767 aom_wb_write_literal(wb, max_frame_width - 1, num_bits_width);
2768 aom_wb_write_literal(wb, max_frame_height - 1, num_bits_height);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01002769
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002770 /* Placeholder for actually writing to the bitstream */
Yunqing Wangc2502b52017-07-19 17:44:18 -07002771 seq_params->frame_id_numbers_present_flag =
Yaowu Xudd6ef6c2018-03-02 16:43:52 -08002772 cm->large_scale_tile ? 0 : cm->error_resilient_mode;
Sebastien Alaiwand418f682017-10-19 15:06:52 +02002773 seq_params->frame_id_length = FRAME_ID_LENGTH;
2774 seq_params->delta_frame_id_length = DELTA_FRAME_ID_LENGTH;
David Barker5e70a112017-10-03 14:28:17 +01002775
2776 aom_wb_write_bit(wb, seq_params->frame_id_numbers_present_flag);
2777 if (seq_params->frame_id_numbers_present_flag) {
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02002778 // We must always have delta_frame_id_length < frame_id_length,
2779 // in order for a frame to be referenced with a unique delta.
2780 // Avoid wasting bits by using a coding that enforces this restriction.
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002781 aom_wb_write_literal(wb, seq_params->delta_frame_id_length - 2, 4);
Frederic Barbier4d5d90e2017-10-13 09:22:33 +02002782 aom_wb_write_literal(
2783 wb, seq_params->frame_id_length - seq_params->delta_frame_id_length - 1,
2784 3);
David Barker5e70a112017-10-03 14:28:17 +01002785 }
Imdad Sardharwalla4ec84ab2018-02-06 12:20:18 +00002786
2787 write_sb_size(seq_params, wb);
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002788
Jingning Han127c8232018-02-22 16:54:13 -08002789 aom_wb_write_bit(wb, seq_params->enable_dual_filter);
2790
Cheng Chenfecd9a72018-03-08 15:23:51 -08002791 aom_wb_write_bit(wb, seq_params->enable_order_hint);
2792
2793 if (seq_params->enable_order_hint)
2794 aom_wb_write_bit(wb, seq_params->enable_jnt_comp);
Cheng Chene0c918a2018-02-22 19:38:31 -08002795
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00002796 if (seq_params->force_screen_content_tools == 2) {
2797 aom_wb_write_bit(wb, 1);
2798 } else {
2799 aom_wb_write_bit(wb, 0);
2800 aom_wb_write_bit(wb, seq_params->force_screen_content_tools);
2801 }
2802
2803#if CONFIG_AMVR
2804 if (seq_params->force_screen_content_tools > 0) {
2805 if (seq_params->force_integer_mv == 2) {
2806 aom_wb_write_bit(wb, 1);
2807 } else {
2808 aom_wb_write_bit(wb, 0);
2809 aom_wb_write_bit(wb, seq_params->force_integer_mv);
2810 }
2811 } else {
2812 assert(seq_params->force_integer_mv == 2);
2813 }
2814#endif
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +00002815
2816#if CONFIG_EXPLICIT_ORDER_HINT
Zoe Liu17bdcdf2018-03-09 15:06:58 -08002817 if (seq_params->enable_order_hint)
2818 aom_wb_write_literal(wb, seq_params->order_hint_bits_minus1, 3);
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +00002819#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002820}
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01002821
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002822static void write_compound_tools(const AV1_COMMON *cm,
2823 struct aom_write_bit_buffer *wb) {
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002824 if (!frame_is_intra_only(cm) && cm->reference_mode != COMPOUND_REFERENCE) {
2825 aom_wb_write_bit(wb, cm->allow_interintra_compound);
2826 } else {
2827 assert(cm->allow_interintra_compound == 0);
2828 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002829 if (!frame_is_intra_only(cm) && cm->reference_mode != SINGLE_REFERENCE) {
2830 aom_wb_write_bit(wb, cm->allow_masked_compound);
2831 } else {
2832 assert(cm->allow_masked_compound == 0);
2833 }
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002834}
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -07002835
David Barkerd7c8bd52017-09-25 14:47:29 +01002836static void write_global_motion_params(const WarpedMotionParams *params,
2837 const WarpedMotionParams *ref_params,
Sarah Parker3e579a62017-08-23 16:53:20 -07002838 struct aom_write_bit_buffer *wb,
2839 int allow_hp) {
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01002840 const TransformationType type = params->wmtype;
Sarah Parker3e579a62017-08-23 16:53:20 -07002841
2842 aom_wb_write_bit(wb, type != IDENTITY);
2843 if (type != IDENTITY) {
2844#if GLOBAL_TRANS_TYPES > 4
2845 aom_wb_write_literal(wb, type - 1, GLOBAL_TYPE_BITS);
2846#else
2847 aom_wb_write_bit(wb, type == ROTZOOM);
2848 if (type != ROTZOOM) aom_wb_write_bit(wb, type == TRANSLATION);
2849#endif // GLOBAL_TRANS_TYPES > 4
2850 }
2851
Sebastien Alaiwane4984ff2017-10-31 15:27:44 +01002852 if (type >= ROTZOOM) {
2853 aom_wb_write_signed_primitive_refsubexpfin(
2854 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2855 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
2856 (1 << GM_ALPHA_PREC_BITS),
2857 (params->wmmat[2] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
2858 aom_wb_write_signed_primitive_refsubexpfin(
2859 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2860 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF),
2861 (params->wmmat[3] >> GM_ALPHA_PREC_DIFF));
2862 }
2863
2864 if (type >= AFFINE) {
2865 aom_wb_write_signed_primitive_refsubexpfin(
2866 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2867 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF),
2868 (params->wmmat[4] >> GM_ALPHA_PREC_DIFF));
2869 aom_wb_write_signed_primitive_refsubexpfin(
2870 wb, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
2871 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
2872 (1 << GM_ALPHA_PREC_BITS),
2873 (params->wmmat[5] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
2874 }
2875
2876 if (type >= TRANSLATION) {
2877 const int trans_bits = (type == TRANSLATION)
2878 ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
2879 : GM_ABS_TRANS_BITS;
2880 const int trans_prec_diff = (type == TRANSLATION)
2881 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
2882 : GM_TRANS_PREC_DIFF;
2883 aom_wb_write_signed_primitive_refsubexpfin(
2884 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2885 (ref_params->wmmat[0] >> trans_prec_diff),
2886 (params->wmmat[0] >> trans_prec_diff));
2887 aom_wb_write_signed_primitive_refsubexpfin(
2888 wb, (1 << trans_bits) + 1, SUBEXPFIN_K,
2889 (ref_params->wmmat[1] >> trans_prec_diff),
2890 (params->wmmat[1] >> trans_prec_diff));
Sarah Parker3e579a62017-08-23 16:53:20 -07002891 }
2892}
2893
2894static void write_global_motion(AV1_COMP *cpi,
2895 struct aom_write_bit_buffer *wb) {
2896 AV1_COMMON *const cm = &cpi->common;
2897 int frame;
2898 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barkerd7c8bd52017-09-25 14:47:29 +01002899 const WarpedMotionParams *ref_params =
Yue Chend90d3432018-03-16 11:28:42 -07002900 (cm->error_resilient_mode || cm->prev_frame == NULL)
2901 ? &default_warp_params
2902 : &cm->prev_frame->global_motion[frame];
David Barkerd7c8bd52017-09-25 14:47:29 +01002903 write_global_motion_params(&cm->global_motion[frame], ref_params, wb,
Sarah Parker3e579a62017-08-23 16:53:20 -07002904 cm->allow_high_precision_mv);
2905 // TODO(sarahparker, debargha): The logic in the commented out code below
2906 // does not work currently and causes mismatches when resize is on.
2907 // Fix it before turning the optimization back on.
2908 /*
2909 YV12_BUFFER_CONFIG *ref_buf = get_ref_frame_buffer(cpi, frame);
2910 if (cpi->source->y_crop_width == ref_buf->y_crop_width &&
2911 cpi->source->y_crop_height == ref_buf->y_crop_height) {
2912 write_global_motion_params(&cm->global_motion[frame],
2913 &cm->prev_frame->global_motion[frame], wb,
2914 cm->allow_high_precision_mv);
2915 } else {
2916 assert(cm->global_motion[frame].wmtype == IDENTITY &&
2917 "Invalid warp type for frames of different resolutions");
2918 }
2919 */
2920 /*
2921 printf("Frame %d/%d: Enc Ref %d: %d %d %d %d\n",
2922 cm->current_video_frame, cm->show_frame, frame,
2923 cm->global_motion[frame].wmmat[0],
2924 cm->global_motion[frame].wmmat[1], cm->global_motion[frame].wmmat[2],
2925 cm->global_motion[frame].wmmat[3]);
2926 */
2927 }
2928}
Sarah Parker3e579a62017-08-23 16:53:20 -07002929
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002930// New function based on HLS R18
2931static void write_uncompressed_header_obu(AV1_COMP *cpi,
Jingning Handa11e692017-12-19 08:45:08 -08002932 struct aom_write_bit_buffer *saved_wb,
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002933 struct aom_write_bit_buffer *wb) {
2934 AV1_COMMON *const cm = &cpi->common;
2935 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
2936
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002937 // NOTE: By default all coded frames to be used as a reference
2938 cm->is_reference_frame = 1;
2939
2940 if (cm->show_existing_frame) {
2941 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
2942 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
2943
2944 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
2945 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2946 "Buffer %d does not contain a reconstructed frame",
2947 frame_to_show);
2948 }
2949 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
2950
2951 aom_wb_write_bit(wb, 1); // show_existing_frame
2952 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
2953
David Barker5e70a112017-10-03 14:28:17 +01002954 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02002955 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01002956 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
2957 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002958 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002959
Dominic Symesd4929012018-01-31 17:32:01 +01002960#if CONFIG_FILM_GRAIN && !CONFIG_FILM_GRAIN_SHOWEX
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002961 if (cm->film_grain_params_present && cm->show_frame) {
2962 int flip_back_update_parameters_flag = 0;
2963 if (cm->frame_type == KEY_FRAME &&
2964 cm->film_grain_params.update_parameters == 0) {
2965 cm->film_grain_params.update_parameters = 1;
2966 flip_back_update_parameters_flag = 1;
2967 }
Dominic Symesd4929012018-01-31 17:32:01 +01002968 write_film_grain_params(cpi, wb);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08002969
2970 if (flip_back_update_parameters_flag)
2971 cm->film_grain_params.update_parameters = 0;
2972 }
2973#endif
2974
Zoe Liub4991202017-12-21 15:31:06 -08002975#if CONFIG_FWD_KF
Zoe Liu2723a9d2018-02-22 20:17:00 -08002976 if (cm->reset_decoder_state &&
2977 frame_bufs[frame_to_show].frame_type != KEY_FRAME) {
Zoe Liub4991202017-12-21 15:31:06 -08002978 aom_internal_error(
2979 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Zoe Liu2723a9d2018-02-22 20:17:00 -08002980 "show_existing_frame to reset state on KEY_FRAME only");
Zoe Liub4991202017-12-21 15:31:06 -08002981 }
Zoe Liub4991202017-12-21 15:31:06 -08002982#endif // CONFIG_FWD_KF
2983
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002984 return;
2985 } else {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002986 aom_wb_write_bit(wb, 0); // show_existing_frame
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002987 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04002988
2989 cm->frame_type = cm->intra_only ? INTRA_ONLY_FRAME : cm->frame_type;
2990 aom_wb_write_literal(wb, cm->frame_type, 2);
2991
2992 if (cm->intra_only) cm->frame_type = INTRA_ONLY_FRAME;
2993
2994 aom_wb_write_bit(wb, cm->show_frame);
2995 aom_wb_write_bit(wb, cm->error_resilient_mode);
2996
Yue Chen00dcf5a2018-03-09 14:31:47 -08002997 aom_wb_write_bit(wb, cm->enable_intra_edge_filter);
Yue Chena9383622018-03-08 14:37:09 -08002998 aom_wb_write_bit(wb, cm->allow_filter_intra);
Joe Youngdb5eb4c2018-02-16 17:30:40 -08002999
Hui Su483a8452018-02-26 12:28:48 -08003000#if CONFIG_CDF_UPDATE_MODE
3001 aom_wb_write_bit(wb, cm->disable_cdf_update);
3002#endif // CONFIG_CDF_UPDATE_MODE
3003
Imdad Sardharwallabf2cc012018-02-09 17:32:10 +00003004 if (cm->seq_params.force_screen_content_tools == 2) {
3005 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
3006 } else {
3007 assert(cm->allow_screen_content_tools ==
3008 cm->seq_params.force_screen_content_tools);
3009 }
3010
3011#if CONFIG_AMVR
3012 if (cm->allow_screen_content_tools) {
3013 if (cm->seq_params.force_integer_mv == 2) {
3014 aom_wb_write_bit(wb, cm->cur_frame_force_integer_mv);
3015 } else {
3016 assert(cm->cur_frame_force_integer_mv == cm->seq_params.force_integer_mv);
3017 }
3018 } else {
3019 assert(cm->cur_frame_force_integer_mv == 0);
3020 }
3021#endif // CONFIG_AMVR
3022
David Barker5e70a112017-10-03 14:28:17 +01003023 cm->invalid_delta_frame_id_minus1 = 0;
3024 if (cm->seq_params.frame_id_numbers_present_flag) {
Frederic Barbiere83fcfe2017-10-13 10:37:50 +02003025 int frame_id_len = cm->seq_params.frame_id_length;
David Barker5e70a112017-10-03 14:28:17 +01003026 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003027 }
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003028
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003029 if (cm->width > cm->seq_params.max_frame_width ||
3030 cm->height > cm->seq_params.max_frame_height) {
3031 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
3032 "Frame dimensions are larger than the maximum values");
3033 }
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003034 int frame_size_override_flag =
3035 (cm->width != cm->seq_params.max_frame_width ||
3036 cm->height != cm->seq_params.max_frame_height);
3037 aom_wb_write_bit(wb, frame_size_override_flag);
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003038
Zoe Liud4a67a82018-02-21 12:35:33 -08003039#if CONFIG_FRAME_REFS_SIGNALING
3040 cm->frame_refs_short_signaling = 0;
3041#endif // CONFIG_FRAME_REFS_SIGNALING
3042
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +00003043#if CONFIG_EXPLICIT_ORDER_HINT
Zoe Liu17bdcdf2018-03-09 15:06:58 -08003044 aom_wb_write_literal(wb, cm->frame_offset,
3045 cm->seq_params.order_hint_bits_minus1 + 1);
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +00003046#else
Zoe Liu45a8fed2018-03-02 16:51:02 -08003047 if (cm->show_frame == 0) {
3048 int arf_offset = AOMMIN(
3049 (MAX_GF_INTERVAL - 1),
3050 cpi->twopass.gf_group.arf_src_offset[cpi->twopass.gf_group.index]);
3051 int brf_offset =
3052 cpi->twopass.gf_group.brf_src_offset[cpi->twopass.gf_group.index];
3053
3054 arf_offset = AOMMIN((MAX_GF_INTERVAL - 1), arf_offset + brf_offset);
3055 aom_wb_write_literal(wb, arf_offset, FRAME_OFFSET_BITS);
3056 }
Imdad Sardharwallae68aa8a2018-03-07 18:52:54 +00003057#endif
Zoe Liu45a8fed2018-03-02 16:51:02 -08003058
Yue Chend90d3432018-03-16 11:28:42 -07003059 if (!cm->error_resilient_mode && !frame_is_intra_only(cm)) {
3060 aom_wb_write_literal(wb, cm->primary_ref_frame, PRIMARY_REF_BITS);
3061 }
3062
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003063 if (cm->frame_type == KEY_FRAME) {
Arild Fuldseth (arilfuld)7193f022017-10-30 12:24:57 +01003064 write_frame_size(cm, frame_size_override_flag, wb);
David Barker218556e2018-02-14 14:23:12 +00003065 assert(av1_superres_unscaled(cm) ||
3066 !(cm->allow_intrabc && NO_FILTER_FOR_IBC));
3067 if (cm->allow_screen_content_tools &&
3068 (av1_superres_unscaled(cm) || !NO_FILTER_FOR_IBC))
David Barker218556e2018-02-14 14:23:12 +00003069 aom_wb_write_bit(wb, cm->allow_intrabc);
Thomas Daede51020e12017-12-14 20:12:44 -08003070 // all eight fbs are refreshed, pick one that will live long enough
3071 cm->fb_of_context_type[REGULAR_FRAME] = 0;
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003072 } else {
3073#if CONFIG_EXPLICIT_ORDER_HINT
3074 // Write all ref frame order hints if error_resilient_mode == 1
3075 if (cm->error_resilient_mode && cm->seq_params.enable_order_hint) {
3076 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3077 for (int ref_idx = 0; ref_idx < REF_FRAMES; ref_idx++) {
3078 // Get buffer index
3079 const int buf_idx = cm->ref_frame_map[ref_idx];
3080 assert(buf_idx >= 0 && buf_idx < FRAME_BUFFERS);
3081
3082 // Write order hint to bit stream
3083 aom_wb_write_literal(wb, frame_bufs[buf_idx].cur_frame_offset,
3084 cm->seq_params.order_hint_bits_minus1 + 1);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003085 }
3086 }
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003087#endif // CONFIG_EXPLICIT_ORDER_HINT
Zoe Liu48acf882018-02-21 12:16:50 -08003088
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003089 if (cm->frame_type == INTRA_ONLY_FRAME) {
Zoe Liu48acf882018-02-21 12:16:50 -08003090 cpi->refresh_frame_mask = get_refresh_mask(cpi);
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003091 int updated_fb = -1;
3092 for (int i = 0; i < REF_FRAMES; i++) {
3093 // If more than one frame is refreshed, it doesn't matter which one
3094 // we pick, so pick the first.
3095 if (cpi->refresh_frame_mask & (1 << i)) {
3096 updated_fb = i;
3097 break;
3098 }
Thomas Daede51020e12017-12-14 20:12:44 -08003099 }
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003100 assert(updated_fb >= 0);
Thomas Daede2b4984a2018-03-06 15:52:01 -08003101 cm->fb_of_context_type[cm->frame_context_idx] = updated_fb;
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003102 if (cm->intra_only) {
3103 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
3104 write_frame_size(cm, frame_size_override_flag, wb);
3105 assert(av1_superres_unscaled(cm) ||
3106 !(cm->allow_intrabc && NO_FILTER_FOR_IBC));
3107 if (cm->allow_screen_content_tools &&
3108 (av1_superres_unscaled(cm) || !NO_FILTER_FOR_IBC))
3109 aom_wb_write_bit(wb, cm->allow_intrabc);
3110 }
3111 } else if (cm->frame_type == INTER_FRAME || cm->frame_type == S_FRAME) {
3112 MV_REFERENCE_FRAME ref_frame;
3113
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003114 if (cm->frame_type == INTER_FRAME) {
3115 cpi->refresh_frame_mask = get_refresh_mask(cpi);
3116 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
3117 }
3118
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003119 int updated_fb = -1;
3120 for (int i = 0; i < REF_FRAMES; i++) {
3121 // If more than one frame is refreshed, it doesn't matter which one
3122 // we pick, so pick the first.
3123 if (cpi->refresh_frame_mask & (1 << i)) {
3124 updated_fb = i;
3125 break;
3126 }
3127 }
3128 // large scale tile sometimes won't refresh any fbs
3129 if (updated_fb >= 0) {
3130 cm->fb_of_context_type[cm->frame_context_idx] = updated_fb;
3131 }
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003132
3133 if (!cpi->refresh_frame_mask) {
3134 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
3135 // will not be used as a reference
3136 cm->is_reference_frame = 0;
3137 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003138
Zoe Liud4a67a82018-02-21 12:35:33 -08003139#if CONFIG_FRAME_REFS_SIGNALING
Zoe Liud4a67a82018-02-21 12:35:33 -08003140 assert(cm->frame_refs_short_signaling == 0);
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003141 // NOTE: Error resilient mode turns off frame_refs_short_signaling
3142 // automatically.
Debargha Mukherjee7f1dfa02018-03-14 22:25:43 -07003143 if (cm->seq_params.enable_order_hint)
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003144 aom_wb_write_bit(wb, cm->frame_refs_short_signaling);
3145 else
3146 assert(cm->frame_refs_short_signaling == 0);
Zoe Liud4a67a82018-02-21 12:35:33 -08003147
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003148 if (cm->frame_refs_short_signaling) {
3149 assert(get_ref_frame_map_idx(cpi, LAST_FRAME) != INVALID_IDX);
3150 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, LAST_FRAME),
Zoe Liud4a67a82018-02-21 12:35:33 -08003151 REF_FRAMES_LOG2);
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003152 assert(get_ref_frame_map_idx(cpi, GOLDEN_FRAME) != INVALID_IDX);
3153 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, GOLDEN_FRAME),
3154 REF_FRAMES_LOG2);
3155 }
3156#endif // CONFIG_FRAME_REFS_SIGNALING
3157
3158 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3159 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
3160#if CONFIG_FRAME_REFS_SIGNALING
3161 if (!cm->frame_refs_short_signaling)
3162#endif // CONFIG_FRAME_REFS_SIGNALING
3163 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
3164 REF_FRAMES_LOG2);
3165 if (cm->frame_type == S_FRAME) {
3166 assert(cm->ref_frame_sign_bias[ref_frame] == 0);
3167 }
3168
3169 if (cm->seq_params.frame_id_numbers_present_flag) {
3170 int i = get_ref_frame_map_idx(cpi, ref_frame);
3171 int frame_id_len = cm->seq_params.frame_id_length;
3172 int diff_len = cm->seq_params.delta_frame_id_length;
3173 int delta_frame_id_minus1 =
3174 ((cm->current_frame_id - cm->ref_frame_id[i] +
3175 (1 << frame_id_len)) %
3176 (1 << frame_id_len)) -
3177 1;
3178 if (delta_frame_id_minus1 < 0 ||
3179 delta_frame_id_minus1 >= (1 << diff_len))
3180 cm->invalid_delta_frame_id_minus1 = 1;
3181 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
3182 }
Zoe Liu48acf882018-02-21 12:16:50 -08003183 }
Zoe Liuca0cd3f2018-02-26 15:07:50 -08003184
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003185 if (cm->error_resilient_mode == 0 && frame_size_override_flag) {
3186 write_frame_size_with_refs(cpi, wb);
3187 } else {
3188 write_frame_size(cm, frame_size_override_flag, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003189 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003190
3191#if CONFIG_AMVR
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003192 if (cm->cur_frame_force_integer_mv) {
3193 cm->allow_high_precision_mv = 0;
3194 } else {
3195 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
3196 }
RogerZhou0bf36902017-12-19 13:51:10 -08003197#else
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003198 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
RogerZhou0bf36902017-12-19 13:51:10 -08003199#endif
Imdad Sardharwalla21cc90b2018-03-12 16:39:38 +00003200 fix_interp_filter(cm, cpi->td.counts);
3201 write_frame_interp_filter(cm->interp_filter, wb);
3202 aom_wb_write_bit(wb, cm->switchable_motion_mode);
3203 if (frame_might_use_prev_frame_mvs(cm) &&
3204 cm->seq_params.enable_order_hint) {
3205 aom_wb_write_bit(wb, cm->use_ref_frame_mvs);
3206 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003207 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003208 }
3209
David Barker5e70a112017-10-03 14:28:17 +01003210 if (cm->seq_params.frame_id_numbers_present_flag) {
Debargha Mukherjee778023d2017-09-26 17:50:27 -07003211 cm->refresh_mask =
3212 cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
3213 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003214
Debargha Mukherjee229fdc82018-03-10 07:45:33 -08003215 const int might_bwd_adapt = !(cm->large_scale_tile);
Jingning Handa11e692017-12-19 08:45:08 -08003216 if (might_bwd_adapt) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003217 aom_wb_write_bit(
James Zernf34dfc82018-02-23 16:53:33 -08003218 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_DISABLED);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003219 }
Yue Chend90d3432018-03-16 11:28:42 -07003220
Cyril Concolato3b5afc12017-12-15 12:54:15 -08003221#if CONFIG_TILE_INFO_FIRST
James Zern8b007ff2018-02-26 22:49:13 -08003222 write_tile_info(cm, saved_wb, wb);
3223#endif // CONFIG_TILE_INFO_FIRST
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003224 encode_quantization(cm, wb);
3225 encode_segmentation(cm, xd, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003226 {
Thomas Davies28444be2017-10-13 18:12:25 +01003227 int delta_q_allowed = 1;
3228#if !CONFIG_EXT_DELTA_Q
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003229 int i;
3230 struct segmentation *const seg = &cm->seg;
3231 int segment_quantizer_active = 0;
3232 for (i = 0; i < MAX_SEGMENTS; i++) {
3233 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3234 segment_quantizer_active = 1;
3235 }
3236 }
Thomas Davies28444be2017-10-13 18:12:25 +01003237 delta_q_allowed = !segment_quantizer_active;
3238#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003239
3240 if (cm->delta_q_present_flag)
Thomas Davies28444be2017-10-13 18:12:25 +01003241 assert(delta_q_allowed == 1 && cm->base_qindex > 0);
3242 if (delta_q_allowed == 1 && cm->base_qindex > 0) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003243 aom_wb_write_bit(wb, cm->delta_q_present_flag);
3244 if (cm->delta_q_present_flag) {
3245 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
3246 xd->prev_qindex = cm->base_qindex;
3247#if CONFIG_EXT_DELTA_Q
Hui Su22a51d92018-01-16 13:02:18 -08003248 if (cm->allow_intrabc && NO_FILTER_FOR_IBC)
3249 assert(cm->delta_lf_present_flag == 0);
3250 else
Hui Su22a51d92018-01-16 13:02:18 -08003251 aom_wb_write_bit(wb, cm->delta_lf_present_flag);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003252 if (cm->delta_lf_present_flag) {
3253 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_lf_res) - 1, 2);
Yaowu Xub02d0b12017-12-15 01:32:34 +00003254 xd->prev_delta_lf_from_base = 0;
Yaowu Xub02d0b12017-12-15 01:32:34 +00003255 aom_wb_write_bit(wb, cm->delta_lf_multi);
Imdad Sardharwallaf74b4ab2018-02-20 17:22:42 +00003256 const int frame_lf_count =
3257 av1_num_planes(cm) > 1 ? FRAME_LF_COUNT : FRAME_LF_COUNT - 2;
3258 for (int lf_id = 0; lf_id < frame_lf_count; ++lf_id)
Cheng Chena97394f2017-09-27 15:05:14 -07003259 xd->prev_delta_lf[lf_id] = 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003260 }
3261#endif // CONFIG_EXT_DELTA_Q
3262 }
3263 }
3264 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003265 if (!cm->all_lossless) {
Urvang Joshi65c9efd2018-03-01 12:02:12 -08003266 encode_loopfilter(cm, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003267 encode_cdef(cm, wb);
Urvang Joshi5ec7b812018-02-28 14:37:06 -08003268 encode_restoration_mode(cm, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003269 }
Urvang Joshi5ec7b812018-02-28 14:37:06 -08003270
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003271 write_tx_mode(cm, &cm->tx_mode, wb);
3272
3273 if (cpi->allow_comp_inter_inter) {
3274 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003275
3276 aom_wb_write_bit(wb, use_hybrid_pred);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003277 }
Zoe Liu4b847e12017-12-07 12:44:45 -08003278
Zoe Liu4b847e12017-12-07 12:44:45 -08003279 if (cm->is_skip_mode_allowed) aom_wb_write_bit(wb, cm->skip_mode_flag);
Zoe Liu4b847e12017-12-07 12:44:45 -08003280
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003281 write_compound_tools(cm, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003282
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003283 aom_wb_write_bit(wb, cm->reduced_tx_set_used);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003284
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003285 if (!frame_is_intra_only(cm)) write_global_motion(cpi, wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003286
Dominic Symesd4929012018-01-31 17:32:01 +01003287#if CONFIG_FILM_GRAIN_SHOWEX
3288 if (!cm->show_frame) {
3289 aom_wb_write_bit(wb, cm->showable_frame);
3290 }
3291#endif
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003292#if CONFIG_FILM_GRAIN
Dominic Symesd4929012018-01-31 17:32:01 +01003293#if CONFIG_FILM_GRAIN_SHOWEX
3294 if (cm->film_grain_params_present && (cm->show_frame || cm->showable_frame)) {
3295#else
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003296 if (cm->film_grain_params_present && cm->show_frame) {
Dominic Symesd4929012018-01-31 17:32:01 +01003297#endif
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003298 int flip_back_update_parameters_flag = 0;
Andrey Norkin879488f2018-02-28 15:30:26 -08003299 if (cm->frame_type != INTER_FRAME &&
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003300 cm->film_grain_params.update_parameters == 0) {
3301 cm->film_grain_params.update_parameters = 1;
3302 flip_back_update_parameters_flag = 1;
3303 }
Dominic Symesd4929012018-01-31 17:32:01 +01003304 write_film_grain_params(cpi, wb);
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003305
3306 if (flip_back_update_parameters_flag)
3307 cm->film_grain_params.update_parameters = 0;
3308 }
3309#endif
3310
Cyril Concolato3b5afc12017-12-15 12:54:15 -08003311#if !CONFIG_TILE_INFO_FIRST
James Zern8b007ff2018-02-26 22:49:13 -08003312 write_tile_info(cm, saved_wb, wb);
Jingning Handa11e692017-12-19 08:45:08 -08003313#endif // !CONFIG_TILE_INFO_FIRST
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003314}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003315
Yaowu Xuc27fc142016-08-22 16:08:15 -07003316static int choose_size_bytes(uint32_t size, int spare_msbs) {
3317 // Choose the number of bytes required to represent size, without
3318 // using the 'spare_msbs' number of most significant bits.
3319
3320 // Make sure we will fit in 4 bytes to start with..
3321 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
3322
3323 // Normalise to 32 bits
3324 size <<= spare_msbs;
3325
3326 if (size >> 24 != 0)
3327 return 4;
3328 else if (size >> 16 != 0)
3329 return 3;
3330 else if (size >> 8 != 0)
3331 return 2;
3332 else
3333 return 1;
3334}
3335
3336static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) {
3337 switch (sz) {
3338 case 1: dst[0] = (uint8_t)(val & 0xff); break;
3339 case 2: mem_put_le16(dst, val); break;
3340 case 3: mem_put_le24(dst, val); break;
3341 case 4: mem_put_le32(dst, val); break;
James Zern06c372d2017-04-20 16:08:29 -07003342 default: assert(0 && "Invalid size"); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003343 }
3344}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003345
Yaowu Xuf883b422016-08-30 14:01:10 -07003346static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003347 const uint32_t data_size, const uint32_t max_tile_size,
3348 const uint32_t max_tile_col_size,
3349 int *const tile_size_bytes,
3350 int *const tile_col_size_bytes) {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003351 // Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
3352 int tsb;
3353 int tcsb;
3354
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003355 if (cm->large_scale_tile) {
3356 // The top bit in the tile size field indicates tile copy mode, so we
3357 // have 1 less bit to code the tile size
3358 tsb = choose_size_bytes(max_tile_size, 1);
3359 tcsb = choose_size_bytes(max_tile_col_size, 0);
3360 } else {
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003361 tsb = choose_size_bytes(max_tile_size, 0);
3362 tcsb = 4; // This is ignored
3363 (void)max_tile_col_size;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003364 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003365
3366 assert(tsb > 0);
3367 assert(tcsb > 0);
3368
3369 *tile_size_bytes = tsb;
3370 *tile_col_size_bytes = tcsb;
James Zerna60ff582018-02-24 14:02:12 -08003371 if (tsb == 4 && tcsb == 4) return data_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003372
James Zerna60ff582018-02-24 14:02:12 -08003373 uint32_t wpos = 0;
3374 uint32_t rpos = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003375
James Zerna60ff582018-02-24 14:02:12 -08003376 if (cm->large_scale_tile) {
3377 int tile_row;
3378 int tile_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003379
James Zerna60ff582018-02-24 14:02:12 -08003380 for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
3381 // All but the last column has a column header
3382 if (tile_col < cm->tile_cols - 1) {
3383 uint32_t tile_col_size = mem_get_le32(dst + rpos);
3384 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003385
James Zerna60ff582018-02-24 14:02:12 -08003386 // Adjust the tile column size by the number of bytes removed
3387 // from the tile size fields.
3388 tile_col_size -= (4 - tsb) * cm->tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003389
James Zerna60ff582018-02-24 14:02:12 -08003390 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
3391 wpos += tcsb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003392 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003393
James Zerna60ff582018-02-24 14:02:12 -08003394 for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
3395 // All, including the last row has a header
3396 uint32_t tile_header = mem_get_le32(dst + rpos);
3397 rpos += 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003398
James Zerna60ff582018-02-24 14:02:12 -08003399 // If this is a copy tile, we need to shift the MSB to the
3400 // top bit of the new width, and there is no data to copy.
3401 if (tile_header >> 31 != 0) {
3402 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
3403 mem_put_varsize(dst + wpos, tsb, tile_header);
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003404 wpos += tsb;
James Zerna60ff582018-02-24 14:02:12 -08003405 } else {
3406 mem_put_varsize(dst + wpos, tsb, tile_header);
3407 wpos += tsb;
3408
3409 memmove(dst + wpos, dst + rpos, tile_header);
3410 rpos += tile_header;
3411 wpos += tile_header;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003412 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003413 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003414 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003415
3416 assert(rpos > wpos);
3417 assert(rpos == data_size);
3418
3419 return wpos;
3420 }
James Zerna60ff582018-02-24 14:02:12 -08003421 const int n_tiles = cm->tile_cols * cm->tile_rows;
3422 int n;
3423
3424 for (n = 0; n < n_tiles; n++) {
3425 int tile_size;
3426
3427 if (n == n_tiles - 1) {
3428 tile_size = data_size - rpos;
3429 } else {
3430 tile_size = mem_get_le32(dst + rpos);
3431 rpos += 4;
3432 mem_put_varsize(dst + wpos, tsb, tile_size);
3433 wpos += tsb;
3434 }
3435
3436 memmove(dst + wpos, dst + rpos, tile_size);
3437
3438 rpos += tile_size;
3439 wpos += tile_size;
3440 }
3441
3442 assert(rpos > wpos);
3443 assert(rpos == data_size);
3444
3445 return wpos;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003446}
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003447
Soo-Chul Han38427e82017-09-27 15:06:13 -04003448uint32_t write_obu_header(OBU_TYPE obu_type, int obu_extension,
3449 uint8_t *const dst) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003450 struct aom_write_bit_buffer wb = { dst, 0 };
3451 uint32_t size = 0;
3452
Tom Fineganf9273812018-03-14 09:49:45 -07003453 aom_wb_write_literal(&wb, 0, 1); // forbidden bit.
Soo-Chul Han38427e82017-09-27 15:06:13 -04003454 aom_wb_write_literal(&wb, (int)obu_type, 4);
Vignesh Venkatasubramanian726f7952018-03-08 15:03:35 -08003455#if CONFIG_OBU_SIZE_AFTER_HEADER
3456 aom_wb_write_literal(&wb, obu_extension ? 1 : 0, 1);
3457 aom_wb_write_literal(&wb, 1, 1); // obu_has_payload_length_field
3458 aom_wb_write_literal(&wb, 0, 1); // reserved
3459#else
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003460 aom_wb_write_literal(&wb, 0, 2);
3461 aom_wb_write_literal(&wb, obu_extension ? 1 : 0, 1);
Vignesh Venkatasubramanian726f7952018-03-08 15:03:35 -08003462#endif // CONFIG_OBU_SIZE_AFTER_HEADER
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003463 if (obu_extension) {
3464 aom_wb_write_literal(&wb, obu_extension & 0xFF, 8);
3465 }
3466
3467 size = aom_wb_bytes_written(&wb);
3468 return size;
3469}
3470
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003471size_t get_uleb_obu_size_in_bytes(uint32_t obu_header_size,
3472 uint32_t obu_payload_size) {
3473#if CONFIG_OBU_SIZE_AFTER_HEADER
3474 const uint32_t obu_size = obu_payload_size;
3475 (void)obu_header_size;
3476#else
3477 const uint32_t obu_size = obu_header_size + obu_payload_size;
3478#endif // CONFIG_OBU_SIZE_AFTER_HEADER
3479 return aom_uleb_size_in_bytes(obu_size);
3480}
3481
3482int write_uleb_obu_size(uint32_t obu_header_size, uint32_t obu_payload_size,
3483 uint8_t *dest) {
3484#if CONFIG_OBU_SIZE_AFTER_HEADER
3485 const uint32_t obu_size = obu_payload_size;
3486 const uint32_t offset = obu_header_size;
3487#else
3488 const uint32_t obu_size = obu_header_size + obu_payload_size;
3489 const uint32_t offset = 0;
3490#endif // CONFIG_OBU_SIZE_AFTER_HEADER
Tom Finegan41150ad2018-01-23 11:42:55 -08003491 size_t coded_obu_size = 0;
3492
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003493 if (aom_uleb_encode(obu_size, sizeof(obu_size), dest + offset,
3494 &coded_obu_size) != 0) {
Tom Finegan41150ad2018-01-23 11:42:55 -08003495 return AOM_CODEC_ERROR;
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003496 }
Tom Finegan41150ad2018-01-23 11:42:55 -08003497
3498 return AOM_CODEC_OK;
3499}
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003500
3501static size_t obu_memmove(uint32_t obu_header_size, uint32_t obu_payload_size,
3502 uint8_t *data) {
3503 const size_t length_field_size =
3504 get_uleb_obu_size_in_bytes(obu_header_size, obu_payload_size);
3505 uint32_t move_dst_offset = (uint32_t)length_field_size;
3506#if CONFIG_OBU_SIZE_AFTER_HEADER
3507 // In this case, header shouldn't be moved.
3508 move_dst_offset += obu_header_size;
3509 const uint32_t move_src_offset = obu_header_size;
3510 const uint32_t move_size = obu_payload_size;
3511#else
3512 const uint32_t move_src_offset = 0;
3513 const uint32_t move_size = obu_header_size + obu_payload_size;
3514#endif // CONFIG_OBU_SIZE_AFTER_HEADER
3515 memmove(data + move_dst_offset, data + move_src_offset, move_size);
3516 return length_field_size;
3517}
Tom Finegan41150ad2018-01-23 11:42:55 -08003518
Cyril Concolato2dab2752018-02-26 14:25:47 -08003519#if CONFIG_TRAILING_BITS
3520static void add_trailing_bits(struct aom_write_bit_buffer *wb) {
3521 if (aom_wb_is_byte_aligned(wb)) {
3522 aom_wb_write_literal(wb, 0x80, 8);
3523 } else {
3524 // assumes that the other bits are already 0s
3525 aom_wb_write_bit(wb, 1);
3526 }
3527}
3528#endif
3529
Soo-Chul Hanf8589862018-01-24 03:13:14 +00003530static uint32_t write_sequence_header_obu(AV1_COMP *cpi, uint8_t *const dst
3531#if CONFIG_SCALABILITY
3532 ,
3533 uint8_t enhancement_layers_cnt) {
3534#else
Johannb0ef6ff2018-02-08 14:32:21 -08003535) {
Soo-Chul Hanf8589862018-01-24 03:13:14 +00003536#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003537 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003538 struct aom_write_bit_buffer wb = { dst, 0 };
3539 uint32_t size = 0;
3540
3541 write_profile(cm->profile, &wb);
3542
3543 aom_wb_write_literal(&wb, 0, 4);
Soo-Chul Hanf8589862018-01-24 03:13:14 +00003544#if CONFIG_SCALABILITY
3545 aom_wb_write_literal(&wb, enhancement_layers_cnt, 2);
3546 int i;
3547 for (i = 1; i <= enhancement_layers_cnt; i++) {
3548 aom_wb_write_literal(&wb, 0, 4);
3549 }
3550#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003551
Arild Fuldseth (arilfuld)b6380742017-11-03 09:42:05 +01003552 write_sequence_header(cpi, &wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003553
3554 // color_config
3555 write_bitdepth_colorspace_sampling(cm, &wb);
3556
Andrey Norkin28e9ce22018-01-08 10:11:21 -08003557 // timing_info
3558 write_timing_info_header(cm, &wb);
Andrey Norkin28e9ce22018-01-08 10:11:21 -08003559
Andrey Norkin6f1c2f72018-01-15 20:08:52 -08003560#if CONFIG_FILM_GRAIN
3561 aom_wb_write_bit(&wb, cm->film_grain_params_present);
3562#endif
3563
Cyril Concolato2dab2752018-02-26 14:25:47 -08003564#if CONFIG_TRAILING_BITS
3565 add_trailing_bits(&wb);
3566#endif
3567
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003568 size = aom_wb_bytes_written(&wb);
3569 return size;
3570}
3571
Jingning Handa11e692017-12-19 08:45:08 -08003572static uint32_t write_frame_header_obu(AV1_COMP *cpi,
Jingning Handa11e692017-12-19 08:45:08 -08003573 struct aom_write_bit_buffer *saved_wb,
Jingning Handa11e692017-12-19 08:45:08 -08003574 uint8_t *const dst) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003575 AV1_COMMON *const cm = &cpi->common;
3576 struct aom_write_bit_buffer wb = { dst, 0 };
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003577 uint32_t total_size = 0;
Yunqing Wange7142e12018-01-17 11:20:12 -08003578 uint32_t uncompressed_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003579
Yaowu Xudd6ef6c2018-03-02 16:43:52 -08003580 write_uncompressed_header_obu(cpi, saved_wb, &wb);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003581
3582 if (cm->show_existing_frame) {
3583 total_size = aom_wb_bytes_written(&wb);
3584 return total_size;
3585 }
3586
Cyril Concolato2dab2752018-02-26 14:25:47 -08003587#if CONFIG_TRAILING_BITS
3588 add_trailing_bits(&wb);
3589#endif
3590
Yunqing Wange7142e12018-01-17 11:20:12 -08003591 uncompressed_hdr_size = aom_wb_bytes_written(&wb);
3592 total_size = uncompressed_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003593 return total_size;
3594}
3595
3596static uint32_t write_tile_group_header(uint8_t *const dst, int startTile,
Vignesh Venkatasubramanian2a06b412018-03-01 15:18:06 -08003597 int endTile, int tiles_log2,
3598 int tile_start_and_end_present_flag) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003599 struct aom_write_bit_buffer wb = { dst, 0 };
3600 uint32_t size = 0;
3601
Vignesh Venkatasubramanian2a06b412018-03-01 15:18:06 -08003602 if (!tiles_log2) return size;
3603
3604 aom_wb_write_bit(&wb, tile_start_and_end_present_flag);
3605
3606 if (tile_start_and_end_present_flag) {
3607 aom_wb_write_literal(&wb, startTile, tiles_log2);
3608 aom_wb_write_literal(&wb, endTile, tiles_log2);
3609 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003610
3611 size = aom_wb_bytes_written(&wb);
3612 return size;
3613}
3614
Tom Finegan07afef62018-03-07 12:07:53 -08003615typedef struct {
3616 uint8_t *frame_header;
Tom Fineganf9273812018-03-14 09:49:45 -07003617 size_t obu_header_byte_offset;
Tom Finegan07afef62018-03-07 12:07:53 -08003618 size_t total_length;
3619} FrameHeaderInfo;
3620
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003621static uint32_t write_tiles_in_tg_obus(AV1_COMP *const cpi, uint8_t *const dst,
3622 unsigned int *max_tile_size,
3623 unsigned int *max_tile_col_size,
Jingning Handa11e692017-12-19 08:45:08 -08003624 struct aom_write_bit_buffer *saved_wb,
Tom Finegan07afef62018-03-07 12:07:53 -08003625 uint8_t obu_extension_header,
3626 const FrameHeaderInfo *fh_info) {
Thomas Davies4822e142017-10-10 11:30:36 +01003627 AV1_COMMON *const cm = &cpi->common;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003628 aom_writer mode_bc;
3629 int tile_row, tile_col;
3630 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
3631 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
3632 uint32_t total_size = 0;
3633 const int tile_cols = cm->tile_cols;
3634 const int tile_rows = cm->tile_rows;
3635 unsigned int tile_size = 0;
3636 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
3637 // Fixed size tile groups for the moment
3638 const int num_tg_hdrs = cm->num_tg;
3639 const int tg_size =
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003640 (cm->large_scale_tile)
3641 ? 1
Yaowu Xudd6ef6c2018-03-02 16:43:52 -08003642 : (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003643 int tile_count = 0;
3644 int curr_tg_data_size = 0;
3645 uint8_t *data = dst;
3646 int new_tg = 1;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003647 const int have_tiles = tile_cols * tile_rows > 1;
Tom Finegan07afef62018-03-07 12:07:53 -08003648 int first_tg = 1;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003649
Thomas Davies4822e142017-10-10 11:30:36 +01003650 cm->largest_tile_id = 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003651 *max_tile_size = 0;
3652 *max_tile_col_size = 0;
3653
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003654 if (cm->large_scale_tile) {
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08003655#if CONFIG_OBU_FRAME
3656 // For large_scale_tile case, we always have only one tile group, so it can
3657 // be written as an OBU_FRAME.
3658 const OBU_TYPE obu_type = OBU_FRAME;
3659#else
3660 const OBU_TYPE obu_type = OBU_TILE_GROUP;
3661#endif // CONFIG_OBU_FRAME
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003662 const uint32_t tg_hdr_size = write_obu_header(obu_type, 0, data);
Jingning Handa11e692017-12-19 08:45:08 -08003663 data += tg_hdr_size;
3664
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08003665#if CONFIG_OBU_FRAME
Vignesh Venkatasubramanian9d681182018-03-13 15:16:19 -07003666 const uint32_t frame_header_size =
3667 write_frame_header_obu(cpi, saved_wb, data);
3668#else
3669 const uint32_t frame_header_size = 0;
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08003670#endif // CONFIG_OBU_FRAME
Vignesh Venkatasubramanian9d681182018-03-13 15:16:19 -07003671 data += frame_header_size;
3672 total_size += frame_header_size;
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08003673
Yunqing Wangf279c6b2018-02-23 11:15:46 -08003674 int tile_size_bytes = 0;
3675 int tile_col_size_bytes = 0;
Jingning Handa11e692017-12-19 08:45:08 -08003676
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003677 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3678 TileInfo tile_info;
3679 const int is_last_col = (tile_col == tile_cols - 1);
3680 const uint32_t col_offset = total_size;
3681
3682 av1_tile_set_col(&tile_info, cm, tile_col);
3683
3684 // The last column does not have a column header
3685 if (!is_last_col) total_size += 4;
3686
3687 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3688 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
3689 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3690 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3691 const int data_offset = have_tiles ? 4 : 0;
3692 const int tile_idx = tile_row * tile_cols + tile_col;
3693 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
3694 av1_tile_set_row(&tile_info, cm, tile_row);
3695
Jingning Handa11e692017-12-19 08:45:08 -08003696 buf->data = dst + total_size + tg_hdr_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003697
3698 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
3699 // even for the last one, unless no tiling is used at all.
3700 total_size += data_offset;
3701 // Initialise tile context from the frame context
3702 this_tile->tctx = *cm->fc;
3703 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
Yunqing Wang0e141b52017-11-02 15:08:58 -07003704 mode_bc.allow_update_cdf = !cm->large_scale_tile;
Hui Su483a8452018-02-26 12:28:48 -08003705#if CONFIG_CDF_UPDATE_MODE
3706 mode_bc.allow_update_cdf =
3707 mode_bc.allow_update_cdf && !cm->disable_cdf_update;
3708#endif // CONFIG_CDF_UPDATE_MODE
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003709 aom_start_encode(&mode_bc, buf->data + data_offset);
3710 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
3711 assert(tok == tok_end);
3712 aom_stop_encode(&mode_bc);
3713 tile_size = mode_bc.pos;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003714 buf->size = tile_size;
3715
3716 // Record the maximum tile size we see, so we can compact headers later.
Thomas Davies4822e142017-10-10 11:30:36 +01003717 if (tile_size > *max_tile_size) {
3718 *max_tile_size = tile_size;
Thomas Davies4822e142017-10-10 11:30:36 +01003719 cm->largest_tile_id = tile_cols * tile_row + tile_col;
Thomas Davies4822e142017-10-10 11:30:36 +01003720 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003721
3722 if (have_tiles) {
3723 // tile header: size of this tile, or copy offset
3724 uint32_t tile_header = tile_size;
3725 const int tile_copy_mode =
3726 ((AOMMAX(cm->tile_width, cm->tile_height) << MI_SIZE_LOG2) <= 256)
3727 ? 1
3728 : 0;
3729
3730 // If tile_copy_mode = 1, check if this tile is a copy tile.
3731 // Very low chances to have copy tiles on the key frames, so don't
3732 // search on key frames to reduce unnecessary search.
3733 if (cm->frame_type != KEY_FRAME && tile_copy_mode) {
James Zern1a79a7e2018-03-08 18:17:18 -08003734 const int identical_tile_offset =
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003735 find_identical_tile(tile_row, tile_col, tile_buffers);
3736
James Zern1a79a7e2018-03-08 18:17:18 -08003737 if (identical_tile_offset > 0) {
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003738 tile_size = 0;
James Zern1a79a7e2018-03-08 18:17:18 -08003739 tile_header = identical_tile_offset | 0x80;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003740 tile_header <<= 24;
3741 }
3742 }
3743
3744 mem_put_le32(buf->data, tile_header);
3745 }
3746
3747 total_size += tile_size;
3748 }
3749
3750 if (!is_last_col) {
3751 uint32_t col_size = total_size - col_offset - 4;
Jingning Handa11e692017-12-19 08:45:08 -08003752 mem_put_le32(dst + col_offset + tg_hdr_size, col_size);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003753
Yunqing Wangf279c6b2018-02-23 11:15:46 -08003754 // Record the maximum tile column size we see.
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003755 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
3756 }
3757 }
Jingning Handa11e692017-12-19 08:45:08 -08003758
3759 if (have_tiles) {
Vignesh Venkatasubramanian9d681182018-03-13 15:16:19 -07003760 total_size = remux_tiles(cm, data, total_size - frame_header_size,
3761 *max_tile_size, *max_tile_col_size,
3762 &tile_size_bytes, &tile_col_size_bytes);
3763 total_size += frame_header_size;
Jingning Handa11e692017-12-19 08:45:08 -08003764 }
3765
Yunqing Wangf279c6b2018-02-23 11:15:46 -08003766 // In EXT_TILE case, only use 1 tile group. Follow the obu syntax, write
3767 // current tile group size before tile data(include tile column header).
3768 // Tile group size doesn't include the bytes storing tg size.
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003769 total_size += tg_hdr_size;
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003770 const uint32_t obu_payload_size = total_size - tg_hdr_size;
3771 const size_t length_field_size =
3772 obu_memmove(tg_hdr_size, obu_payload_size, dst);
3773 if (write_uleb_obu_size(tg_hdr_size, obu_payload_size, dst) !=
3774 AOM_CODEC_OK) {
3775 assert(0);
3776 }
Yaowu Xu68dc87e2018-02-28 14:41:50 -08003777 total_size += (uint32_t)length_field_size;
Vignesh Venkatasubramanian9d681182018-03-13 15:16:19 -07003778#if CONFIG_OBU_FRAME
3779 saved_wb->bit_buffer += length_field_size;
3780#endif // CONFIG_OBU_FRAME
Yunqing Wangf279c6b2018-02-23 11:15:46 -08003781
Jingning Handa11e692017-12-19 08:45:08 -08003782 // Now fill in the gaps in the uncompressed header.
3783 if (have_tiles) {
3784 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
James Zern56f7cff2018-02-28 20:30:26 -08003785 aom_wb_overwrite_literal(saved_wb, tile_col_size_bytes - 1, 2);
Jingning Handa11e692017-12-19 08:45:08 -08003786
3787 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
James Zern56f7cff2018-02-28 20:30:26 -08003788 aom_wb_overwrite_literal(saved_wb, tile_size_bytes - 1, 2);
Jingning Handa11e692017-12-19 08:45:08 -08003789 }
James Zerna60ff582018-02-24 14:02:12 -08003790 return (uint32_t)total_size;
3791 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003792
Vignesh Venkatasubramanian6198ebc2018-03-02 12:47:33 -08003793 uint32_t obu_header_size = 0;
James Zerna60ff582018-02-24 14:02:12 -08003794 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3795 TileInfo tile_info;
3796 const int is_last_row = (tile_row == tile_rows - 1);
3797 av1_tile_set_row(&tile_info, cm, tile_row);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003798
James Zerna60ff582018-02-24 14:02:12 -08003799 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3800 const int tile_idx = tile_row * tile_cols + tile_col;
3801 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
3802 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
3803 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3804 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3805 const int is_last_col = (tile_col == tile_cols - 1);
3806 const int is_last_tile = is_last_col && is_last_row;
3807 int is_last_tile_in_tg = 0;
Cyril Concolato2dab2752018-02-26 14:25:47 -08003808#if CONFIG_TRAILING_BITS
3809 int nb_bits = 0;
3810#endif
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003811
James Zerna60ff582018-02-24 14:02:12 -08003812 if (new_tg) {
3813 data = dst + total_size;
Tom Finegan07afef62018-03-07 12:07:53 -08003814
James Zerna60ff582018-02-24 14:02:12 -08003815 // A new tile group begins at this tile. Write the obu header and
3816 // tile group header
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08003817#if CONFIG_OBU_FRAME
3818 const OBU_TYPE obu_type =
3819 (num_tg_hdrs == 1) ? OBU_FRAME : OBU_TILE_GROUP;
3820#else
3821 const OBU_TYPE obu_type = OBU_TILE_GROUP;
3822#endif
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003823 curr_tg_data_size =
3824 write_obu_header(obu_type, obu_extension_header, data);
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003825 obu_header_size = curr_tg_data_size;
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003826
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08003827#if CONFIG_OBU_FRAME
3828 if (num_tg_hdrs == 1) {
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003829 curr_tg_data_size +=
3830 write_frame_header_obu(cpi, saved_wb, data + curr_tg_data_size);
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08003831 }
3832#endif
Vignesh Venkatasubramanian2a06b412018-03-01 15:18:06 -08003833 curr_tg_data_size += write_tile_group_header(
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003834 data + curr_tg_data_size, tile_idx,
Vignesh Venkatasubramanian2a06b412018-03-01 15:18:06 -08003835 AOMMIN(tile_idx + tg_size - 1, tile_cols * tile_rows - 1),
3836 n_log2_tiles, cm->num_tg > 1);
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003837 total_size += curr_tg_data_size;
James Zerna60ff582018-02-24 14:02:12 -08003838 new_tg = 0;
3839 tile_count = 0;
3840 }
3841 tile_count++;
3842 av1_tile_set_col(&tile_info, cm, tile_col);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003843
James Zerna60ff582018-02-24 14:02:12 -08003844 if (tile_count == tg_size || tile_idx == (tile_cols * tile_rows - 1)) {
3845 is_last_tile_in_tg = 1;
3846 new_tg = 1;
3847 } else {
3848 is_last_tile_in_tg = 0;
3849 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003850
James Zerna60ff582018-02-24 14:02:12 -08003851 buf->data = dst + total_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003852
James Zerna60ff582018-02-24 14:02:12 -08003853 // The last tile of the tile group does not have a header.
3854 if (!is_last_tile_in_tg) total_size += 4;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003855
James Zerna60ff582018-02-24 14:02:12 -08003856 // Initialise tile context from the frame context
3857 this_tile->tctx = *cm->fc;
3858 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
3859 mode_bc.allow_update_cdf = 1;
Hui Su483a8452018-02-26 12:28:48 -08003860#if CONFIG_CDF_UPDATE_MODE
3861 mode_bc.allow_update_cdf =
3862 mode_bc.allow_update_cdf && !cm->disable_cdf_update;
3863#endif // CONFIG_CDF_UPDATE_MODE
James Zerna60ff582018-02-24 14:02:12 -08003864 const int num_planes = av1_num_planes(cm);
3865 av1_reset_loop_restoration(&cpi->td.mb.e_mbd, num_planes);
Soo-Chul Han13f0d9c2017-10-22 21:55:52 -04003866
James Zerna60ff582018-02-24 14:02:12 -08003867 aom_start_encode(&mode_bc, dst + total_size);
3868 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
Cyril Concolato2dab2752018-02-26 14:25:47 -08003869#if CONFIG_TRAILING_BITS
3870 nb_bits = aom_stop_encode(&mode_bc);
3871#else
James Zerna60ff582018-02-24 14:02:12 -08003872 aom_stop_encode(&mode_bc);
Cyril Concolato2dab2752018-02-26 14:25:47 -08003873#endif
James Zerna60ff582018-02-24 14:02:12 -08003874 tile_size = mode_bc.pos;
3875 assert(tile_size > 0);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003876
Cyril Concolato2dab2752018-02-26 14:25:47 -08003877#if CONFIG_TRAILING_BITS
3878 // similar to add_trailing_bits, but specific to end of last tile
3879 if (is_last_tile) {
3880 if (nb_bits % 8 == 0) {
3881 // the arithmetic encoder ended on a byte boundary
3882 // adding a 0b10000000 byte
3883 *(dst + total_size + tile_size) = 0x80;
3884 tile_size += 1;
3885 } else {
3886 // arithmetic encoder left several 0 bits
3887 // changing the first 0 bit to 1
3888 int bit_offset = 7 - nb_bits % 8;
3889 *(dst + total_size + tile_size) |= 1 << bit_offset;
3890 }
3891 }
3892#endif
3893
James Zerna60ff582018-02-24 14:02:12 -08003894 curr_tg_data_size += (tile_size + (is_last_tile_in_tg ? 0 : 4));
3895 buf->size = tile_size;
3896 if (tile_size > *max_tile_size) {
3897 cm->largest_tile_id = tile_cols * tile_row + tile_col;
3898 }
3899 if (!is_last_tile) {
3900 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
3901 }
Thomas Davies4822e142017-10-10 11:30:36 +01003902
James Zerna60ff582018-02-24 14:02:12 -08003903 if (!is_last_tile_in_tg) {
3904 // size of this tile
3905 mem_put_le32(buf->data, tile_size);
3906 } else {
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003907 // write current tile group size
James Zern3c24b8f2018-03-12 17:42:30 -07003908 const uint32_t obu_payload_size = curr_tg_data_size - obu_header_size;
James Zerna60ff582018-02-24 14:02:12 -08003909 const size_t length_field_size =
James Zern3c24b8f2018-03-12 17:42:30 -07003910 obu_memmove(obu_header_size, obu_payload_size, data);
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003911 if (write_uleb_obu_size(obu_header_size, obu_payload_size, data) !=
3912 AOM_CODEC_OK) {
James Zerna60ff582018-02-24 14:02:12 -08003913 assert(0);
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003914 }
Yaowu Xu68dc87e2018-02-28 14:41:50 -08003915 curr_tg_data_size += (int)length_field_size;
3916 total_size += (uint32_t)length_field_size;
Tom Finegan07afef62018-03-07 12:07:53 -08003917
3918 if (!first_tg && cm->error_resilient_mode) {
3919 // Make room for a duplicate Frame Header OBU.
3920 memmove(data + fh_info->total_length, data, curr_tg_data_size);
3921
3922 // Insert a copy of the Frame Header OBU.
3923 memcpy(data, fh_info->frame_header, fh_info->total_length);
Tom Fineganf9273812018-03-14 09:49:45 -07003924
3925#if CONFIG_OBU_REDUNDANT_FRAME_HEADER
3926 // Rewrite the OBU header to change the OBU type to Redundant Frame
3927 // Header.
3928 write_obu_header(OBU_REDUNDANT_FRAME_HEADER, obu_extension_header,
3929 &data[fh_info->obu_header_byte_offset]);
3930#endif // CONFIG_OBU_REDUNDANT_FRAME_HEADER
3931
Tom Finegan07afef62018-03-07 12:07:53 -08003932 data += fh_info->total_length;
3933
3934 curr_tg_data_size += fh_info->total_length;
3935 total_size += fh_info->total_length;
3936 }
3937 first_tg = 0;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003938 }
James Zerna60ff582018-02-24 14:02:12 -08003939
3940 total_size += tile_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003941 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003942 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003943 return (uint32_t)total_size;
3944}
3945
Tom Finegane4099e32018-01-23 12:01:51 -08003946int av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003947 uint8_t *data = dst;
Yunqing Wangeeb08a92017-07-07 21:25:18 -07003948 uint32_t data_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003949 unsigned int max_tile_size;
3950 unsigned int max_tile_col_size;
Soo-Chul Hanf8589862018-01-24 03:13:14 +00003951 AV1_COMMON *const cm = &cpi->common;
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003952 uint32_t obu_header_size = 0;
3953 uint32_t obu_payload_size = 0;
Tom Fineganf9273812018-03-14 09:49:45 -07003954 FrameHeaderInfo fh_info = { NULL, 0, 0 };
Soo-Chul Hanf8589862018-01-24 03:13:14 +00003955#if CONFIG_SCALABILITY
3956 const uint8_t enhancement_layers_cnt = cm->enhancement_layers_cnt;
3957 const uint8_t obu_extension_header =
3958 cm->temporal_layer_id << 5 | cm->enhancement_layer_id << 3 | 0;
3959#else
3960 uint8_t obu_extension_header = 0;
3961#endif // CONFIG_SCALABILITY
Soo-Chul Hanf8589862018-01-24 03:13:14 +00003962
Angie Chiangb11aedf2017-03-10 17:31:46 -08003963#if CONFIG_BITSTREAM_DEBUG
3964 bitstream_queue_reset_write();
3965#endif
3966
Soo-Chul Han38427e82017-09-27 15:06:13 -04003967 // The TD is now written outside the frame encode loop
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003968
3969 // write sequence header obu if KEY_FRAME, preceded by 4-byte size
3970 if (cm->frame_type == KEY_FRAME) {
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003971 obu_header_size = write_obu_header(OBU_SEQUENCE_HEADER, 0, data);
Soo-Chul Hanf8589862018-01-24 03:13:14 +00003972
Soo-Chul Hanf8589862018-01-24 03:13:14 +00003973#if CONFIG_SCALABILITY
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003974 obu_payload_size = write_sequence_header_obu(cpi, data + obu_header_size,
3975 enhancement_layers_cnt);
Soo-Chul Hanf8589862018-01-24 03:13:14 +00003976#else
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003977 obu_payload_size = write_sequence_header_obu(cpi, data + obu_header_size);
Soo-Chul Hanf8589862018-01-24 03:13:14 +00003978#endif // CONFIG_SCALABILITY
Tom Finegan41150ad2018-01-23 11:42:55 -08003979
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003980 const size_t length_field_size =
3981 obu_memmove(obu_header_size, obu_payload_size, data);
3982 if (write_uleb_obu_size(obu_header_size, obu_payload_size, data) !=
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003983 AOM_CODEC_OK) {
Tom Finegan41150ad2018-01-23 11:42:55 -08003984 return AOM_CODEC_ERROR;
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07003985 }
Tom Finegan41150ad2018-01-23 11:42:55 -08003986
Vignesh Venkatasubramanianea0257d2018-02-28 14:43:34 -08003987 data += obu_header_size + obu_payload_size + length_field_size;
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04003988 }
3989
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08003990#if CONFIG_OBU_FRAME
3991 const int write_frame_header = (cm->num_tg > 1 || cm->show_existing_frame);
3992#else
3993 const int write_frame_header = 1;
3994#endif // CONFIG_OBU_FRAME
Jingning Handa11e692017-12-19 08:45:08 -08003995
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08003996 struct aom_write_bit_buffer saved_wb;
3997 if (write_frame_header) {
3998 // Write Frame Header OBU.
3999 fh_info.frame_header = data;
Tom Finegan9d9ec1f2018-03-14 17:55:18 -07004000 obu_header_size =
4001 write_obu_header(OBU_FRAME_HEADER, obu_extension_header, data);
4002 obu_payload_size =
4003 write_frame_header_obu(cpi, &saved_wb, data + obu_header_size);
Tom Finegan41150ad2018-01-23 11:42:55 -08004004
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08004005 const size_t length_field_size =
4006 obu_memmove(obu_header_size, obu_payload_size, data);
4007 if (write_uleb_obu_size(obu_header_size, obu_payload_size, data) !=
4008 AOM_CODEC_OK) {
4009 return AOM_CODEC_ERROR;
4010 }
Tom Finegan41150ad2018-01-23 11:42:55 -08004011
Tom Fineganf9273812018-03-14 09:49:45 -07004012#if CONFIG_OBU_REDUNDANT_FRAME_HEADER
4013#if CONFIG_OBU_SIZE_AFTER_HEADER
4014 fh_info.obu_header_byte_offset = 0;
4015#else
4016 fh_info.obu_header_byte_offset = length_field_size;
4017#endif // CONFIG_OBU_SIZE_AFTER_HEADER
4018#endif // CONFIG_OBU_REDUNDANT_FRAME_HEADER
4019
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08004020 fh_info.total_length =
4021 obu_header_size + obu_payload_size + length_field_size;
4022 data += fh_info.total_length;
Tom Finegan07afef62018-03-07 12:07:53 -08004023
Vignesh Venkatasubramanianb2ce34e2018-03-05 16:57:40 -08004024 // Since length_field_size is determined adaptively after frame header
4025 // encoding, saved_wb must be adjusted accordingly.
4026 saved_wb.bit_buffer += length_field_size;
4027 }
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004028
Yunqing Wang8cb64b82018-02-27 15:05:12 -08004029#define EXT_TILE_DEBUG 0
4030#if EXT_TILE_DEBUG
4031 {
4032 char fn[20] = "./fh";
4033 fn[4] = cm->current_video_frame / 100 + '0';
4034 fn[5] = (cm->current_video_frame % 100) / 10 + '0';
4035 fn[6] = (cm->current_video_frame % 10) + '0';
4036 fn[7] = '\0';
4037 av1_print_uncompressed_frame_header(data - obu_size, obu_size, fn);
4038 }
4039#endif // EXT_TILE_DEBUG
4040#undef EXT_TILE_DEBUG
4041
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004042 if (cm->show_existing_frame) {
4043 data_size = 0;
4044 } else {
4045 // Each tile group obu will be preceded by 4-byte size of the tile group
4046 // obu
Tom Fineganf2d40f62018-02-01 11:52:49 -08004047 data_size =
4048 write_tiles_in_tg_obus(cpi, data, &max_tile_size, &max_tile_col_size,
Tom Finegan07afef62018-03-07 12:07:53 -08004049 &saved_wb, obu_extension_header, &fh_info);
Soo-Chul Han65c00ae2017-09-07 13:12:35 -04004050 }
Yaowu Xud29ea972018-02-22 09:50:58 -08004051 data += data_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004052 *size = data - dst;
Tom Finegane4099e32018-01-23 12:01:51 -08004053 return AOM_CODEC_OK;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004054}