blob: 61526524c16b66aeb3a67f4b8976dfb90acb3a21 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070017#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070018#include "aom_dsp/aom_dsp_common.h"
19#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070020#include "aom_ports/mem_ops.h"
21#include "aom_ports/system_state.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070022#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070023#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070024#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070025
26#if CONFIG_CLPF
27#include "av1/common/clpf.h"
28#endif
29#if CONFIG_DERING
30#include "av1/common/dering.h"
31#endif // CONFIG_DERING
32#include "av1/common/entropy.h"
33#include "av1/common/entropymode.h"
34#include "av1/common/entropymv.h"
35#include "av1/common/mvref_common.h"
Thomas Daviesf6936102016-09-05 16:51:31 +010036#include "av1/common/odintrin.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070037#include "av1/common/pred_common.h"
38#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080039#if CONFIG_EXT_INTRA
40#include "av1/common/reconintra.h"
41#endif // CONFIG_EXT_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/seg_common.h"
43#include "av1/common/tile_common.h"
44
45#if CONFIG_ANS
Alex Converse1ac1ae72016-09-17 15:11:16 -070046#include "aom_dsp/buf_ans.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#endif // CONFIG_ANS
48#include "av1/encoder/bitstream.h"
49#include "av1/encoder/cost.h"
50#include "av1/encoder/encodemv.h"
51#include "av1/encoder/mcomp.h"
52#include "av1/encoder/segmentation.h"
53#include "av1/encoder/subexp.h"
54#include "av1/encoder/tokenize.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070055#if CONFIG_PVQ
56#include "av1/encoder/pvq_encoder.h"
57#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070058
Nathan E. Egge3c056792016-05-20 08:58:44 -040059static struct av1_token intra_mode_encodings[INTRA_MODES];
60static struct av1_token switchable_interp_encodings[SWITCHABLE_FILTERS];
Yaowu Xuc27fc142016-08-22 16:08:15 -070061#if CONFIG_EXT_PARTITION_TYPES
Yaowu Xuf883b422016-08-30 14:01:10 -070062static const struct av1_token ext_partition_encodings[EXT_PARTITION_TYPES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070063 { 0, 1 }, { 4, 3 }, { 12, 4 }, { 7, 3 },
64 { 10, 4 }, { 11, 4 }, { 26, 5 }, { 27, 5 }
65};
66#endif
Nathan E. Egge3c056792016-05-20 08:58:44 -040067static struct av1_token partition_encodings[PARTITION_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -070068#if !CONFIG_REF_MV
Nathan E. Egge3c056792016-05-20 08:58:44 -040069static struct av1_token inter_mode_encodings[INTER_MODES];
Yaowu Xuc27fc142016-08-22 16:08:15 -070070#endif
71#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -070072static const struct av1_token
Yaowu Xuc27fc142016-08-22 16:08:15 -070073 inter_compound_mode_encodings[INTER_COMPOUND_MODES] = {
74 { 2, 2 }, { 50, 6 }, { 51, 6 }, { 24, 5 }, { 52, 6 },
75 { 53, 6 }, { 54, 6 }, { 55, 6 }, { 0, 1 }, { 7, 3 }
76 };
77#endif // CONFIG_EXT_INTER
Urvang Joshib100db72016-10-12 16:28:56 -070078#if CONFIG_PALETTE
Urvang Joshi0b325972016-10-24 14:06:43 -070079static struct av1_token palette_size_encodings[PALETTE_MAX_SIZE - 1];
80static struct av1_token palette_color_encodings[PALETTE_MAX_SIZE - 1]
81 [PALETTE_MAX_SIZE];
Urvang Joshib100db72016-10-12 16:28:56 -070082#endif // CONFIG_PALETTE
Jingning Hanaae72a62016-10-25 15:35:29 -070083static const struct av1_token tx_size_encodings[MAX_TX_DEPTH][TX_SIZES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070084 { { 0, 1 }, { 1, 1 } }, // Max tx_size is 8X8
85 { { 0, 1 }, { 2, 2 }, { 3, 2 } }, // Max tx_size is 16X16
86 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 7, 3 } }, // Max tx_size is 32X32
Debargha Mukherjee25ed5302016-11-22 12:13:41 -080087#if CONFIG_TX64X64
88 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 14, 4 }, { 15, 4 } }, // Max tx_size 64X64
89#endif // CONFIG_TX64X64
Yaowu Xuc27fc142016-08-22 16:08:15 -070090};
91
hui su5db97432016-10-14 16:10:14 -070092#if CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -070093static INLINE void write_uniform(aom_writer *w, int n, int v) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070094 int l = get_unsigned_bits(n);
95 int m = (1 << l) - n;
96 if (l == 0) return;
97 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -070098 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070099 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700100 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
101 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700102 }
103}
hui su5db97432016-10-14 16:10:14 -0700104#endif // CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700105
106#if CONFIG_EXT_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700107static struct av1_token ext_tx_inter_encodings[EXT_TX_SETS_INTER][TX_TYPES];
108static struct av1_token ext_tx_intra_encodings[EXT_TX_SETS_INTRA][TX_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700109#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700110static struct av1_token ext_tx_encodings[TX_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700111#endif // CONFIG_EXT_TX
112#if CONFIG_GLOBAL_MOTION
David Barkercf3d0b02016-11-10 10:14:49 +0000113static struct av1_token global_motion_types_encodings[GLOBAL_TRANS_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700114#endif // CONFIG_GLOBAL_MOTION
115#if CONFIG_EXT_INTRA
hui sueda3d762016-12-06 16:58:23 -0800116#if CONFIG_INTRA_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -0700117static struct av1_token intra_filter_encodings[INTRA_FILTERS];
hui sueda3d762016-12-06 16:58:23 -0800118#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700119#endif // CONFIG_EXT_INTRA
120#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700121static struct av1_token interintra_mode_encodings[INTERINTRA_MODES];
Sarah Parker6fddd182016-11-10 20:57:20 -0800122static struct av1_token compound_type_encodings[COMPOUND_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700123#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -0700124#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
125static struct av1_token motion_mode_encodings[MOTION_MODES];
126#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700127#if CONFIG_LOOP_RESTORATION
128static struct av1_token switchable_restore_encodings[RESTORE_SWITCHABLE_TYPES];
129#endif // CONFIG_LOOP_RESTORATION
Thomas Davies80188d12016-10-26 16:08:35 -0700130static void write_uncompressed_header(AV1_COMP *cpi,
131 struct aom_write_bit_buffer *wb);
132static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700133
Yaowu Xuf883b422016-08-30 14:01:10 -0700134void av1_encode_token_init(void) {
Urvang Joshi0b325972016-10-24 14:06:43 -0700135#if CONFIG_EXT_TX || CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700136 int s;
Urvang Joshi0b325972016-10-24 14:06:43 -0700137#endif // CONFIG_EXT_TX || CONFIG_PALETTE
138#if CONFIG_EXT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -0700139 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700140 av1_tokens_from_tree(ext_tx_inter_encodings[s], av1_ext_tx_inter_tree[s]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700141 }
142 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700143 av1_tokens_from_tree(ext_tx_intra_encodings[s], av1_ext_tx_intra_tree[s]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700144 }
145#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700146 av1_tokens_from_tree(ext_tx_encodings, av1_ext_tx_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700147#endif // CONFIG_EXT_TX
Nathan E. Egge3c056792016-05-20 08:58:44 -0400148 av1_tokens_from_tree(intra_mode_encodings, av1_intra_mode_tree);
149 av1_tokens_from_tree(switchable_interp_encodings, av1_switchable_interp_tree);
150 av1_tokens_from_tree(partition_encodings, av1_partition_tree);
151#if !CONFIG_REF_MV
152 av1_tokens_from_tree(inter_mode_encodings, av1_inter_mode_tree);
153#endif
154
Urvang Joshi0b325972016-10-24 14:06:43 -0700155#if CONFIG_PALETTE
156 av1_tokens_from_tree(palette_size_encodings, av1_palette_size_tree);
157 for (s = 0; s < PALETTE_MAX_SIZE - 1; ++s) {
158 av1_tokens_from_tree(palette_color_encodings[s], av1_palette_color_tree[s]);
159 }
160#endif // CONFIG_PALETTE
161
Yaowu Xuc27fc142016-08-22 16:08:15 -0700162#if CONFIG_EXT_INTRA
hui sueda3d762016-12-06 16:58:23 -0800163#if CONFIG_INTRA_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -0700164 av1_tokens_from_tree(intra_filter_encodings, av1_intra_filter_tree);
hui sueda3d762016-12-06 16:58:23 -0800165#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700166#endif // CONFIG_EXT_INTRA
167#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700168 av1_tokens_from_tree(interintra_mode_encodings, av1_interintra_mode_tree);
Sarah Parker6fddd182016-11-10 20:57:20 -0800169 av1_tokens_from_tree(compound_type_encodings, av1_compound_type_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700170#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -0700171#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
172 av1_tokens_from_tree(motion_mode_encodings, av1_motion_mode_tree);
173#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -0700174#if CONFIG_GLOBAL_MOTION
Yaowu Xuf883b422016-08-30 14:01:10 -0700175 av1_tokens_from_tree(global_motion_types_encodings,
176 av1_global_motion_types_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700177#endif // CONFIG_GLOBAL_MOTION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700178#if CONFIG_LOOP_RESTORATION
179 av1_tokens_from_tree(switchable_restore_encodings,
180 av1_switchable_restore_tree);
181#endif // CONFIG_LOOP_RESTORATION
Nathan E. Egge4947c292016-04-26 11:37:06 -0400182
Nathan E. Eggedfa33f22016-11-16 09:44:26 -0500183#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge4947c292016-04-26 11:37:06 -0400184 /* This hack is necessary when CONFIG_EXT_INTERP is enabled because the five
185 SWITCHABLE_FILTERS are not consecutive, e.g., 0, 1, 2, 3, 4, when doing
186 an in-order traversal of the av1_switchable_interp_tree structure. */
187 av1_indices_from_tree(av1_switchable_interp_ind, av1_switchable_interp_inv,
188 SWITCHABLE_FILTERS, av1_switchable_interp_tree);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700189/* This hack is necessary because the four TX_TYPES are not consecutive,
190 e.g., 0, 1, 2, 3, when doing an in-order traversal of the av1_ext_tx_tree
191 structure. */
David Barkerf5419322016-11-10 12:04:21 +0000192#if !CONFIG_EXT_TX
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400193 av1_indices_from_tree(av1_ext_tx_ind, av1_ext_tx_inv, TX_TYPES,
194 av1_ext_tx_tree);
David Barkerf5419322016-11-10 12:04:21 +0000195#endif
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400196 av1_indices_from_tree(av1_intra_mode_ind, av1_intra_mode_inv, INTRA_MODES,
197 av1_intra_mode_tree);
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400198 av1_indices_from_tree(av1_inter_mode_ind, av1_inter_mode_inv, INTER_MODES,
199 av1_inter_mode_tree);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400200#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700201}
202
Nathan E. Egge380cb1a2016-09-08 10:13:42 -0400203#if !CONFIG_DAALA_EC
Yaowu Xuf883b422016-08-30 14:01:10 -0700204static void write_intra_mode(aom_writer *w, PREDICTION_MODE mode,
205 const aom_prob *probs) {
206 av1_write_token(w, av1_intra_mode_tree, probs, &intra_mode_encodings[mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207}
Nathan E. Egge380cb1a2016-09-08 10:13:42 -0400208#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700209
210#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700211static void write_interintra_mode(aom_writer *w, INTERINTRA_MODE mode,
212 const aom_prob *probs) {
213 av1_write_token(w, av1_interintra_mode_tree, probs,
214 &interintra_mode_encodings[mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700215}
216#endif // CONFIG_EXT_INTER
217
Yaowu Xuf883b422016-08-30 14:01:10 -0700218static void write_inter_mode(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 PREDICTION_MODE mode,
220#if CONFIG_REF_MV && CONFIG_EXT_INTER
221 int is_compound,
222#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
223 const int16_t mode_ctx) {
224#if CONFIG_REF_MV
225 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700226 const aom_prob newmv_prob = cm->fc->newmv_prob[newmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700227#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700228 aom_write(w, mode != NEWMV && mode != NEWFROMNEARMV, newmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700229
230 if (!is_compound && (mode == NEWMV || mode == NEWFROMNEARMV))
Yaowu Xuf883b422016-08-30 14:01:10 -0700231 aom_write(w, mode == NEWFROMNEARMV, cm->fc->new2mv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700232
233 if (mode != NEWMV && mode != NEWFROMNEARMV) {
234#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700235 aom_write(w, mode != NEWMV, newmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700236
237 if (mode != NEWMV) {
238#endif // CONFIG_EXT_INTER
239 const int16_t zeromv_ctx = (mode_ctx >> ZEROMV_OFFSET) & ZEROMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700240 const aom_prob zeromv_prob = cm->fc->zeromv_prob[zeromv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700241
242 if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) {
243 assert(mode == ZEROMV);
244 return;
245 }
246
Yaowu Xuf883b422016-08-30 14:01:10 -0700247 aom_write(w, mode != ZEROMV, zeromv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700248
249 if (mode != ZEROMV) {
250 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700251 aom_prob refmv_prob;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700252
253 if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6;
254 if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7;
255 if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8;
256
257 refmv_prob = cm->fc->refmv_prob[refmv_ctx];
Yaowu Xuf883b422016-08-30 14:01:10 -0700258 aom_write(w, mode != NEARESTMV, refmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700259 }
260 }
261#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700262 assert(is_inter_mode(mode));
Nathan E. Eggea59b23d2016-11-16 09:44:26 -0500263#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400264 aom_write_symbol(w, av1_inter_mode_ind[INTER_OFFSET(mode)],
265 cm->fc->inter_mode_cdf[mode_ctx], INTER_MODES);
266#else
267 {
268 const aom_prob *const inter_probs = cm->fc->inter_mode_probs[mode_ctx];
269 av1_write_token(w, av1_inter_mode_tree, inter_probs,
270 &inter_mode_encodings[INTER_OFFSET(mode)]);
271 }
272#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700273#endif
274}
275
276#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700277static void write_drl_idx(const AV1_COMMON *cm, const MB_MODE_INFO *mbmi,
278 const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) {
279 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700280
281 assert(mbmi->ref_mv_idx < 3);
282
283 if (mbmi->mode == NEWMV) {
284 int idx;
285 for (idx = 0; idx < 2; ++idx) {
286 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
287 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700288 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
289 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700290
Yaowu Xuf883b422016-08-30 14:01:10 -0700291 aom_write(w, mbmi->ref_mv_idx != idx, drl_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700292 if (mbmi->ref_mv_idx == idx) return;
293 }
294 }
295 return;
296 }
297
298 if (mbmi->mode == NEARMV) {
299 int idx;
300 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
301 for (idx = 1; idx < 3; ++idx) {
302 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
303 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700304 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
305 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700306
Yaowu Xuf883b422016-08-30 14:01:10 -0700307 aom_write(w, mbmi->ref_mv_idx != (idx - 1), drl_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700308 if (mbmi->ref_mv_idx == (idx - 1)) return;
309 }
310 }
311 return;
312 }
313}
314#endif
315
316#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700317static void write_inter_compound_mode(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700318 PREDICTION_MODE mode,
319 const int16_t mode_ctx) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700320 const aom_prob *const inter_compound_probs =
Yaowu Xuc27fc142016-08-22 16:08:15 -0700321 cm->fc->inter_compound_mode_probs[mode_ctx];
322
323 assert(is_inter_compound_mode(mode));
Yaowu Xuf883b422016-08-30 14:01:10 -0700324 av1_write_token(w, av1_inter_compound_mode_tree, inter_compound_probs,
325 &inter_compound_mode_encodings[INTER_COMPOUND_OFFSET(mode)]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700326}
327#endif // CONFIG_EXT_INTER
328
Yaowu Xuf883b422016-08-30 14:01:10 -0700329static void encode_unsigned_max(struct aom_write_bit_buffer *wb, int data,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700330 int max) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700331 aom_wb_write_literal(wb, data, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700332}
333
Yaowu Xuf883b422016-08-30 14:01:10 -0700334static void prob_diff_update(const aom_tree_index *tree,
335 aom_prob probs[/*n - 1*/],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700336 const unsigned int counts[/*n - 1*/], int n,
Thomas Davies80188d12016-10-26 16:08:35 -0700337 int probwt, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700338 int i;
339 unsigned int branch_ct[32][2];
340
341 // Assuming max number of probabilities <= 32
342 assert(n <= 32);
343
Yaowu Xuf883b422016-08-30 14:01:10 -0700344 av1_tree_probs_from_distribution(tree, branch_ct, counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700345 for (i = 0; i < n - 1; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700346 av1_cond_prob_diff_update(w, &probs[i], branch_ct[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700347}
Yaowu Xu17fd2f22016-11-17 18:23:28 -0800348#if CONFIG_EXT_INTER || CONFIG_EXT_TX || !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700349static int prob_diff_update_savings(const aom_tree_index *tree,
350 aom_prob probs[/*n - 1*/],
Thomas Davies80188d12016-10-26 16:08:35 -0700351 const unsigned int counts[/*n - 1*/], int n,
352 int probwt) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700353 int i;
354 unsigned int branch_ct[32][2];
355 int savings = 0;
356
357 // Assuming max number of probabilities <= 32
358 assert(n <= 32);
Yaowu Xuf883b422016-08-30 14:01:10 -0700359 av1_tree_probs_from_distribution(tree, branch_ct, counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700360 for (i = 0; i < n - 1; ++i) {
Thomas Davies80188d12016-10-26 16:08:35 -0700361 savings +=
362 av1_cond_prob_diff_update_savings(&probs[i], branch_ct[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700363 }
364 return savings;
365}
Yaowu Xu17fd2f22016-11-17 18:23:28 -0800366#endif // CONFIG_EXT_INTER || CONFIG_EXT_TX || !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700367
368#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700369static void write_tx_size_vartx(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700370 const MB_MODE_INFO *mbmi, TX_SIZE tx_size,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700371 int depth, int blk_row, int blk_col,
372 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700373 const int tx_row = blk_row >> 1;
374 const int tx_col = blk_col >> 1;
Jingning Hanf65b8702016-10-31 12:13:20 -0700375 const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0);
376 const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0);
377
Yaowu Xuc27fc142016-08-22 16:08:15 -0700378 int ctx = txfm_partition_context(xd->above_txfm_context + tx_col,
Jingning Hanc8b89362016-11-01 10:28:53 -0700379 xd->left_txfm_context + tx_row,
380 mbmi->sb_type, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700381
Yaowu Xuc27fc142016-08-22 16:08:15 -0700382 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
383
Jingning Han571189c2016-10-24 10:38:43 -0700384 if (depth == MAX_VARTX_DEPTH) {
Jingning Han94d5bfc2016-10-21 10:14:36 -0700385 txfm_partition_update(xd->above_txfm_context + tx_col,
386 xd->left_txfm_context + tx_row, tx_size);
387 return;
388 }
389
Yaowu Xuc27fc142016-08-22 16:08:15 -0700390 if (tx_size == mbmi->inter_tx_size[tx_row][tx_col]) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700391 aom_write(w, 0, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700392 txfm_partition_update(xd->above_txfm_context + tx_col,
393 xd->left_txfm_context + tx_row, tx_size);
394 } else {
Jingning Hanf64062f2016-11-02 16:22:18 -0700395 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
396 const int bsl = tx_size_wide_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700397 int i;
Jingning Hanf64062f2016-11-02 16:22:18 -0700398
Yaowu Xuf883b422016-08-30 14:01:10 -0700399 aom_write(w, 1, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700400
401 if (tx_size == TX_8X8) {
402 txfm_partition_update(xd->above_txfm_context + tx_col,
403 xd->left_txfm_context + tx_row, TX_4X4);
404 return;
405 }
406
407 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700408 for (i = 0; i < 4; ++i) {
Jingning Hanf64062f2016-11-02 16:22:18 -0700409 int offsetr = blk_row + (i >> 1) * bsl;
410 int offsetc = blk_col + (i & 0x01) * bsl;
411 write_tx_size_vartx(cm, xd, mbmi, sub_txs, depth + 1, offsetr, offsetc,
412 w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700413 }
414 }
415}
416
Yaowu Xuf883b422016-08-30 14:01:10 -0700417static void update_txfm_partition_probs(AV1_COMMON *cm, aom_writer *w,
Thomas Davies80188d12016-10-26 16:08:35 -0700418 FRAME_COUNTS *counts, int probwt) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700419 int k;
420 for (k = 0; k < TXFM_PARTITION_CONTEXTS; ++k)
Yaowu Xuf883b422016-08-30 14:01:10 -0700421 av1_cond_prob_diff_update(w, &cm->fc->txfm_partition_prob[k],
Thomas Davies80188d12016-10-26 16:08:35 -0700422 counts->txfm_partition[k], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700423}
424#endif
425
Yaowu Xuf883b422016-08-30 14:01:10 -0700426static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd,
427 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700428 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
429 const BLOCK_SIZE bsize = mbmi->sb_type;
430 // For sub8x8 blocks the tx_size symbol does not need to be sent
431 if (bsize >= BLOCK_8X8) {
432 const TX_SIZE tx_size = mbmi->tx_size;
433 const int is_inter = is_inter_block(mbmi);
434 const int tx_size_ctx = get_tx_size_context(xd);
435 const int tx_size_cat = is_inter ? inter_tx_size_cat_lookup[bsize]
436 : intra_tx_size_cat_lookup[bsize];
437 const TX_SIZE coded_tx_size = txsize_sqr_up_map[tx_size];
Jingning Han4e1737a2016-10-25 16:05:02 -0700438 const int depth = tx_size_to_depth(coded_tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700439
440#if CONFIG_EXT_TX && CONFIG_RECT_TX
Yue Chen49587a72016-09-28 17:09:47 -0700441 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700442 assert(
443 IMPLIES(is_rect_tx(tx_size), tx_size == max_txsize_rect_lookup[bsize]));
444#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
445
Yaowu Xuf883b422016-08-30 14:01:10 -0700446 av1_write_token(w, av1_tx_size_tree[tx_size_cat],
447 cm->fc->tx_size_probs[tx_size_cat][tx_size_ctx],
Jingning Han4e1737a2016-10-25 16:05:02 -0700448 &tx_size_encodings[tx_size_cat][depth]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700449 }
450}
451
452#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700453static void update_inter_mode_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700454 FRAME_COUNTS *counts) {
455 int i;
Thomas Davies80188d12016-10-26 16:08:35 -0700456#if CONFIG_TILE_GROUPS
457 const int probwt = cm->num_tg;
458#else
459 const int probwt = 1;
460#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700461 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700462 av1_cond_prob_diff_update(w, &cm->fc->newmv_prob[i], counts->newmv_mode[i],
463 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700464 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700465 av1_cond_prob_diff_update(w, &cm->fc->zeromv_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700466 counts->zeromv_mode[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700467 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700468 av1_cond_prob_diff_update(w, &cm->fc->refmv_prob[i], counts->refmv_mode[i],
469 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700470 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700471 av1_cond_prob_diff_update(w, &cm->fc->drl_prob[i], counts->drl_mode[i],
472 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700473#if CONFIG_EXT_INTER
Thomas Davies80188d12016-10-26 16:08:35 -0700474 av1_cond_prob_diff_update(w, &cm->fc->new2mv_prob, counts->new2mv_mode,
475 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700476#endif // CONFIG_EXT_INTER
477}
478#endif
479
480#if CONFIG_EXT_INTER
Thomas Davies80188d12016-10-26 16:08:35 -0700481static void update_inter_compound_mode_probs(AV1_COMMON *cm, int probwt,
482 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700483 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
484 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700485 int i;
486 int savings = 0;
487 int do_update = 0;
488 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
489 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700490 av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700491 cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700492 }
493 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700494 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700495 if (do_update) {
496 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
497 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700498 av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700499 cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700500 }
501 }
502}
503#endif // CONFIG_EXT_INTER
504
Yaowu Xuf883b422016-08-30 14:01:10 -0700505static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
506 int segment_id, const MODE_INFO *mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700507 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
508 return 1;
509 } else {
510 const int skip = mi->mbmi.skip;
Yaowu Xuf883b422016-08-30 14:01:10 -0700511 aom_write(w, skip, av1_get_skip_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700512 return skip;
513 }
514}
515
Yue Chen69f18e12016-09-08 14:48:15 -0700516#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
517static void write_motion_mode(const AV1_COMMON *cm, const MB_MODE_INFO *mbmi,
518 aom_writer *w) {
519 MOTION_MODE last_motion_mode_allowed = motion_mode_allowed(mbmi);
520
521 if (last_motion_mode_allowed == SIMPLE_TRANSLATION) return;
522#if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
523 if (last_motion_mode_allowed == OBMC_CAUSAL) {
524 aom_write(w, mbmi->motion_mode == OBMC_CAUSAL,
525 cm->fc->obmc_prob[mbmi->sb_type]);
526 } else {
527#endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
528 av1_write_token(w, av1_motion_mode_tree,
529 cm->fc->motion_mode_prob[mbmi->sb_type],
530 &motion_mode_encodings[mbmi->motion_mode]);
531#if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
532 }
533#endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
534}
535#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
536
Arild Fuldseth07441162016-08-15 15:07:52 +0200537#if CONFIG_DELTA_Q
Thomas Daviesf6936102016-09-05 16:51:31 +0100538static void write_delta_qindex(const AV1_COMMON *cm, int delta_qindex,
539 aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +0200540 int sign = delta_qindex < 0;
541 int abs = sign ? -delta_qindex : delta_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +0100542 int rem_bits, thr, i = 0;
543 int smallval = abs < DELTA_Q_SMALL ? 1 : 0;
544
545 while (i < DELTA_Q_SMALL && i <= abs) {
546 int bit = (i < abs);
547 aom_write(w, bit, cm->fc->delta_q_prob[i]);
548 i++;
549 }
550
551 if (!smallval) {
552 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
553 thr = (1 << rem_bits) + 1;
554 aom_write_literal(w, rem_bits, 3);
555 aom_write_literal(w, abs - thr, rem_bits);
Arild Fuldseth07441162016-08-15 15:07:52 +0200556 }
557 if (abs > 0) {
558 aom_write_bit(w, sign);
559 }
560}
Thomas Daviesf6936102016-09-05 16:51:31 +0100561
562static void update_delta_q_probs(AV1_COMMON *cm, aom_writer *w,
563 FRAME_COUNTS *counts) {
564 int k;
Jingning Hanbe44c5f2016-09-30 11:35:22 -0700565#if CONFIG_TILE_GROUPS
566 const int probwt = cm->num_tg;
567#else
568 const int probwt = 1;
569#endif
Thomas Daviesf6936102016-09-05 16:51:31 +0100570 for (k = 0; k < DELTA_Q_CONTEXTS; ++k) {
Jingning Hanbe44c5f2016-09-30 11:35:22 -0700571 av1_cond_prob_diff_update(w, &cm->fc->delta_q_prob[k], counts->delta_q[k],
572 probwt);
Thomas Daviesf6936102016-09-05 16:51:31 +0100573 }
574}
Arild Fuldseth07441162016-08-15 15:07:52 +0200575#endif
576
Yaowu Xuf883b422016-08-30 14:01:10 -0700577static void update_skip_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700578 FRAME_COUNTS *counts) {
579 int k;
Thomas Davies80188d12016-10-26 16:08:35 -0700580#if CONFIG_TILE_GROUPS
581 const int probwt = cm->num_tg;
582#else
583 const int probwt = 1;
584#endif
585 for (k = 0; k < SKIP_CONTEXTS; ++k) {
586 av1_cond_prob_diff_update(w, &cm->fc->skip_probs[k], counts->skip[k],
587 probwt);
588 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700589}
590
Nathan E. Eggebaaaa162016-10-24 09:50:52 -0400591#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700592static void update_switchable_interp_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700593 FRAME_COUNTS *counts) {
594 int j;
Nathan E. Egge4947c292016-04-26 11:37:06 -0400595 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) {
Thomas Davies80188d12016-10-26 16:08:35 -0700596#if CONFIG_TILE_GROUPS
597 const int probwt = cm->num_tg;
598#else
599 const int probwt = 1;
600#endif
601 prob_diff_update(
602 av1_switchable_interp_tree, cm->fc->switchable_interp_prob[j],
603 counts->switchable_interp[j], SWITCHABLE_FILTERS, probwt, w);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400604 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700605}
Thomas Davies6519beb2016-10-19 14:46:07 +0100606#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700607
608#if CONFIG_EXT_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700609static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) {
610 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
611 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700612 int i, j;
613 int s;
Thomas Davies80188d12016-10-26 16:08:35 -0700614#if CONFIG_TILE_GROUPS
615 const int probwt = cm->num_tg;
616#else
617 const int probwt = 1;
618#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700619 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
620 int savings = 0;
621 int do_update = 0;
622 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
623 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
624 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700625 av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i],
Thomas Davies80188d12016-10-26 16:08:35 -0700626 cm->counts.inter_ext_tx[s][i], num_ext_tx_set_inter[s], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700627 }
628 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700629 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700630 if (do_update) {
631 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
632 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
633 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700634 av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i],
Thomas Davies80188d12016-10-26 16:08:35 -0700635 cm->counts.inter_ext_tx[s][i], num_ext_tx_set_inter[s], probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700636 }
637 }
638 }
639
640 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
641 int savings = 0;
642 int do_update = 0;
643 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
644 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
645 for (j = 0; j < INTRA_MODES; ++j)
646 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700647 av1_ext_tx_intra_tree[s], cm->fc->intra_ext_tx_prob[s][i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700648 cm->counts.intra_ext_tx[s][i][j], num_ext_tx_set_intra[s], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700649 }
650 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700651 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700652 if (do_update) {
653 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
654 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
655 for (j = 0; j < INTRA_MODES; ++j)
Thomas Davies80188d12016-10-26 16:08:35 -0700656 prob_diff_update(av1_ext_tx_intra_tree[s],
657 cm->fc->intra_ext_tx_prob[s][i][j],
658 cm->counts.intra_ext_tx[s][i][j],
659 num_ext_tx_set_intra[s], probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700660 }
661 }
662 }
663}
664
665#else
Nathan E. Eggebaaaa162016-10-24 09:50:52 -0400666#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700667static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) {
668 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
669 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700670 int i, j;
671
672 int savings = 0;
673 int do_update = 0;
Thomas Davies80188d12016-10-26 16:08:35 -0700674#if CONFIG_TILE_GROUPS
675 const int probwt = cm->num_tg;
676#else
677 const int probwt = 1;
678#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700679 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
680 for (j = 0; j < TX_TYPES; ++j)
681 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700682 av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700683 cm->counts.intra_ext_tx[i][j], TX_TYPES, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700684 }
685 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700686 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700687 if (do_update) {
688 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400689 for (j = 0; j < TX_TYPES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700690 prob_diff_update(av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700691 cm->counts.intra_ext_tx[i][j], TX_TYPES, probwt, w);
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400692 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700693 }
694 }
Thomas Davies6519beb2016-10-19 14:46:07 +0100695
Yaowu Xuc27fc142016-08-22 16:08:15 -0700696 savings = 0;
697 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
698 savings +=
Yaowu Xuf883b422016-08-30 14:01:10 -0700699 prob_diff_update_savings(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700700 cm->counts.inter_ext_tx[i], TX_TYPES, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700701 }
702 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700703 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700704 if (do_update) {
705 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700706 prob_diff_update(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700707 cm->counts.inter_ext_tx[i], TX_TYPES, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700708 }
709 }
710}
711#endif // CONFIG_EXT_TX
Thomas9ac55082016-09-23 18:04:17 +0100712#endif
Urvang Joshib100db72016-10-12 16:28:56 -0700713#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -0700714static void pack_palette_tokens(aom_writer *w, const TOKENEXTRA **tp, int n,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700715 int num) {
716 int i;
717 const TOKENEXTRA *p = *tp;
718
719 for (i = 0; i < num; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700720 av1_write_token(w, av1_palette_color_tree[n - 2], p->context_tree,
721 &palette_color_encodings[n - 2][p->token]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700722 ++p;
723 }
724
725 *tp = p;
726}
Urvang Joshib100db72016-10-12 16:28:56 -0700727#endif // CONFIG_PALETTE
Yushin Cho77bba8d2016-11-04 16:36:56 -0700728#if !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -0700729#if CONFIG_SUPERTX
Thomas Davies80188d12016-10-26 16:08:35 -0700730static void update_supertx_probs(AV1_COMMON *cm, int probwt, aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700731 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
732 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700733 int i, j;
734 int savings = 0;
735 int do_update = 0;
736 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
Jingning Han5b7706a2016-12-21 09:55:10 -0800737 for (j = TX_8X8; j < TX_SIZES; ++j) {
Thomas Davies80188d12016-10-26 16:08:35 -0700738 savings += av1_cond_prob_diff_update_savings(
739 &cm->fc->supertx_prob[i][j], cm->counts.supertx[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700740 }
741 }
742 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700743 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700744 if (do_update) {
745 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
Jingning Han5b7706a2016-12-21 09:55:10 -0800746 for (j = TX_8X8; j < TX_SIZES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700747 av1_cond_prob_diff_update(w, &cm->fc->supertx_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700748 cm->counts.supertx[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700749 }
750 }
751 }
752}
753#endif // CONFIG_SUPERTX
754
Yaowu Xuf883b422016-08-30 14:01:10 -0700755static void pack_mb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700756 const TOKENEXTRA *const stop,
Angie Chiangd4022822016-11-02 18:30:25 -0700757 aom_bit_depth_t bit_depth, const TX_SIZE tx_size,
758 TOKEN_STATS *token_stats) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700759 const TOKENEXTRA *p = *tp;
760#if CONFIG_VAR_TX
761 int count = 0;
Jingning Han7e992972016-10-31 11:03:06 -0700762 const int seg_eob = tx_size_2d[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700763#endif
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700764#if CONFIG_AOM_HIGHBITDEPTH
765 const av1_extra_bit *const extra_bits_table =
766 (bit_depth == AOM_BITS_12)
767 ? av1_extra_bits_high12
768 : (bit_depth == AOM_BITS_10) ? av1_extra_bits_high10 : av1_extra_bits;
769#else
770 const av1_extra_bit *const extra_bits_table = av1_extra_bits;
771 (void)bit_depth;
772#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700773
774 while (p < stop && p->token != EOSB_TOKEN) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700775 const int token = p->token;
776 aom_tree_index index = 0;
Alex Converseaca9feb2016-10-10 11:08:10 -0700777#if !CONFIG_EC_MULTISYMBOL
Urvang Joshi454280d2016-10-14 16:51:44 -0700778 const struct av1_token *const coef_encoding = &av1_coef_encodings[token];
779 int coef_value = coef_encoding->value;
780 int coef_length = coef_encoding->len;
Alex Converseaca9feb2016-10-10 11:08:10 -0700781#endif // !CONFIG_EC_MULTISYMBOL
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700782 const av1_extra_bit *const extra_bits = &extra_bits_table[token];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700783
Alex Converseaca9feb2016-10-10 11:08:10 -0700784#if CONFIG_EC_MULTISYMBOL
Alex Conversedc62b092016-10-11 16:50:56 -0700785 /* skip one or two nodes */
Angie Chiangd4022822016-11-02 18:30:25 -0700786 if (!p->skip_eob_node)
787 aom_write_record(w, token != EOB_TOKEN, p->context_tree[0], token_stats);
Alex Conversedc62b092016-10-11 16:50:56 -0700788
Urvang Joshi454280d2016-10-14 16:51:44 -0700789 if (token != EOB_TOKEN) {
Angie Chiangd4022822016-11-02 18:30:25 -0700790 aom_write_record(w, token != ZERO_TOKEN, p->context_tree[1], token_stats);
Alex Converseea7e9902016-10-12 12:53:40 -0700791
Urvang Joshi454280d2016-10-14 16:51:44 -0700792 if (token != ZERO_TOKEN) {
793 aom_write_symbol(w, token - ONE_TOKEN, *p->token_cdf,
Alex Conversea1ac9722016-10-12 15:59:58 -0700794 CATEGORY6_TOKEN - ONE_TOKEN + 1);
Alex Converseea7e9902016-10-12 12:53:40 -0700795 }
Alex Conversedc62b092016-10-11 16:50:56 -0700796 }
797#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700798 /* skip one or two nodes */
799 if (p->skip_eob_node)
Urvang Joshi454280d2016-10-14 16:51:44 -0700800 coef_length -= p->skip_eob_node;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700801 else
Angie Chiangd4022822016-11-02 18:30:25 -0700802 aom_write_record(w, token != EOB_TOKEN, p->context_tree[0], token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700803
Urvang Joshi454280d2016-10-14 16:51:44 -0700804 if (token != EOB_TOKEN) {
Angie Chiangd4022822016-11-02 18:30:25 -0700805 aom_write_record(w, token != ZERO_TOKEN, p->context_tree[1], token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700806
Urvang Joshi454280d2016-10-14 16:51:44 -0700807 if (token != ZERO_TOKEN) {
Angie Chiangd4022822016-11-02 18:30:25 -0700808 aom_write_record(w, token != ONE_TOKEN, p->context_tree[2],
809 token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700810
Urvang Joshi454280d2016-10-14 16:51:44 -0700811 if (token != ONE_TOKEN) {
812 const int unconstrained_len = UNCONSTRAINED_NODES - p->skip_eob_node;
Angie Chiangd4022822016-11-02 18:30:25 -0700813 aom_write_tree_record(
814 w, av1_coef_con_tree,
815 av1_pareto8_full[p->context_tree[PIVOT_NODE] - 1], coef_value,
816 coef_length - unconstrained_len, 0, token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700817 }
818 }
819 }
Alex Converseaca9feb2016-10-10 11:08:10 -0700820#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700821
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700822 if (extra_bits->base_val) {
823 const int bit_string = p->extra;
824 const int bit_string_length = extra_bits->len; // Length of extra bits to
825 // be written excluding
826 // the sign bit.
827 int skip_bits = (extra_bits->base_val == CAT6_MIN_VAL)
Jingning Han7e992972016-10-31 11:03:06 -0700828 ? TX_SIZES - 1 - txsize_sqr_up_map[tx_size]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700829 : 0;
830
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700831 if (bit_string_length > 0) {
832 const unsigned char *pb = extra_bits->prob;
833 const int value = bit_string >> 1;
834 const int num_bits = bit_string_length; // number of bits in value
Urvang Joshi454280d2016-10-14 16:51:44 -0700835 assert(num_bits > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700836
Alex Converse81fd8902016-07-26 15:35:42 -0700837 for (index = 0; index < num_bits; ++index) {
838 const int shift = num_bits - index - 1;
839 const int bb = (value >> shift) & 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700840 if (skip_bits) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700841 --skip_bits;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700842 assert(!bb);
843 } else {
Angie Chiangd4022822016-11-02 18:30:25 -0700844 aom_write_record(w, bb, pb[index], token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700845 }
Alex Converse81fd8902016-07-26 15:35:42 -0700846 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700847 }
848
Angie Chiangd4022822016-11-02 18:30:25 -0700849 aom_write_bit_record(w, bit_string & 1, token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700850 }
851 ++p;
852
853#if CONFIG_VAR_TX
854 ++count;
Urvang Joshi454280d2016-10-14 16:51:44 -0700855 if (token == EOB_TOKEN || count == seg_eob) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700856#endif
857 }
858
859 *tp = p;
860}
Yushin Cho77bba8d2016-11-04 16:36:56 -0700861#endif // !CONFIG_PVG
Yaowu Xuc27fc142016-08-22 16:08:15 -0700862#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700863static void pack_txb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700864 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
865 MB_MODE_INFO *mbmi, int plane,
Yaowu Xuf883b422016-08-30 14:01:10 -0700866 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700867 int block, int blk_row, int blk_col,
Angie Chiangd4022822016-11-02 18:30:25 -0700868 TX_SIZE tx_size, TOKEN_STATS *token_stats) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700869 const struct macroblockd_plane *const pd = &xd->plane[plane];
870 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
871 const int tx_row = blk_row >> (1 - pd->subsampling_y);
872 const int tx_col = blk_col >> (1 - pd->subsampling_x);
873 TX_SIZE plane_tx_size;
Jingning Hanf65b8702016-10-31 12:13:20 -0700874 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
875 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700876
877 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
878
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700879 plane_tx_size =
880 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
881 : mbmi->inter_tx_size[tx_row][tx_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700882
883 if (tx_size == plane_tx_size) {
Angie Chiangd02001d2016-11-06 15:31:49 -0800884 TOKEN_STATS tmp_token_stats;
885 init_token_stats(&tmp_token_stats);
886 pack_mb_tokens(w, tp, tok_end, bit_depth, tx_size, &tmp_token_stats);
887#if CONFIG_RD_DEBUG
888 token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
889 token_stats->cost += tmp_token_stats.cost;
890#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700891 } else {
Jingning Han1807fdc2016-11-08 15:17:58 -0800892 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
893 const int bsl = tx_size_wide_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700894 int i;
895
896 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700897
898 for (i = 0; i < 4; ++i) {
Jingning Han42a0fb32016-10-31 10:43:31 -0700899 const int offsetr = blk_row + (i >> 1) * bsl;
900 const int offsetc = blk_col + (i & 0x01) * bsl;
Jingning Han42a0fb32016-10-31 10:43:31 -0700901 const int step = tx_size_wide_unit[sub_txs] * tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700902
903 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
904
905 pack_txb_tokens(w, tp, tok_end, xd, mbmi, plane, plane_bsize, bit_depth,
Angie Chiangd4022822016-11-02 18:30:25 -0700906 block, offsetr, offsetc, sub_txs, token_stats);
Jingning Han98d6a1f2016-11-03 12:47:47 -0700907 block += step;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700908 }
909 }
910}
911#endif
912
Yaowu Xuf883b422016-08-30 14:01:10 -0700913static void write_segment_id(aom_writer *w, const struct segmentation *seg,
Thomas9ac55082016-09-23 18:04:17 +0100914 struct segmentation_probs *segp, int segment_id) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400915 if (seg->enabled && seg->update_map) {
Nathan E. Egge31296062016-11-16 09:44:26 -0500916#if CONFIG_EC_MULTISYMBOL
Nathan E. Eggef627e582016-08-19 20:06:51 -0400917 aom_write_symbol(w, segment_id, segp->tree_cdf, MAX_SEGMENTS);
918#else
Nathan E. Eggeeeedc632016-06-19 12:02:33 -0400919 aom_write_tree(w, av1_segment_tree, segp->tree_probs, segment_id, 3, 0);
Nathan E. Eggef627e582016-08-19 20:06:51 -0400920#endif
921 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700922}
923
924// This function encodes the reference frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700925static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd,
926 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700927 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
928 const int is_compound = has_second_ref(mbmi);
929 const int segment_id = mbmi->segment_id;
930
931 // If segment level coding of this signal is disabled...
932 // or the segment allows multiple reference frame options
933 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
934 assert(!is_compound);
935 assert(mbmi->ref_frame[0] ==
936 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
937 } else {
938 // does the feature use compound prediction or not
939 // (if not specified at the frame/segment level)
940 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700941 aom_write(w, is_compound, av1_get_reference_mode_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700942 } else {
943 assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE));
944 }
945
946 if (is_compound) {
947#if CONFIG_EXT_REFS
948 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
949 mbmi->ref_frame[0] == LAST3_FRAME);
950 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
951#else // CONFIG_EXT_REFS
952 const int bit = mbmi->ref_frame[0] == GOLDEN_FRAME;
953#endif // CONFIG_EXT_REFS
954
Yaowu Xuf883b422016-08-30 14:01:10 -0700955 aom_write(w, bit, av1_get_pred_prob_comp_ref_p(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700956
957#if CONFIG_EXT_REFS
958 if (!bit) {
959 const int bit1 = mbmi->ref_frame[0] == LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700960 aom_write(w, bit1, av1_get_pred_prob_comp_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700961 } else {
962 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700963 aom_write(w, bit2, av1_get_pred_prob_comp_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700964 }
Yaowu Xuf883b422016-08-30 14:01:10 -0700965 aom_write(w, bit_bwd, av1_get_pred_prob_comp_bwdref_p(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700966#endif // CONFIG_EXT_REFS
967 } else {
968#if CONFIG_EXT_REFS
969 const int bit0 = (mbmi->ref_frame[0] == ALTREF_FRAME ||
970 mbmi->ref_frame[0] == BWDREF_FRAME);
Yaowu Xuf883b422016-08-30 14:01:10 -0700971 aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700972
973 if (bit0) {
974 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700975 aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700976 } else {
977 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
978 mbmi->ref_frame[0] == GOLDEN_FRAME);
Yaowu Xuf883b422016-08-30 14:01:10 -0700979 aom_write(w, bit2, av1_get_pred_prob_single_ref_p3(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700980
981 if (!bit2) {
982 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700983 aom_write(w, bit3, av1_get_pred_prob_single_ref_p4(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700984 } else {
985 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700986 aom_write(w, bit4, av1_get_pred_prob_single_ref_p5(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700987 }
988 }
989#else // CONFIG_EXT_REFS
990 const int bit0 = mbmi->ref_frame[0] != LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700991 aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700992
993 if (bit0) {
994 const int bit1 = mbmi->ref_frame[0] != GOLDEN_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700995 aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700996 }
997#endif // CONFIG_EXT_REFS
998 }
999 }
1000}
1001
hui su5db97432016-10-14 16:10:14 -07001002#if CONFIG_FILTER_INTRA
1003static void write_filter_intra_mode_info(const AV1_COMMON *const cm,
1004 const MB_MODE_INFO *const mbmi,
1005 aom_writer *w) {
Urvang Joshib100db72016-10-12 16:28:56 -07001006 if (mbmi->mode == DC_PRED
1007#if CONFIG_PALETTE
1008 && mbmi->palette_mode_info.palette_size[0] == 0
1009#endif // CONFIG_PALETTE
1010 ) {
hui su5db97432016-10-14 16:10:14 -07001011 aom_write(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[0],
1012 cm->fc->filter_intra_probs[0]);
1013 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[0]) {
1014 const FILTER_INTRA_MODE mode =
1015 mbmi->filter_intra_mode_info.filter_intra_mode[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001016 write_uniform(w, FILTER_INTRA_MODES, mode);
1017 }
1018 }
1019
Urvang Joshib100db72016-10-12 16:28:56 -07001020 if (mbmi->uv_mode == DC_PRED
1021#if CONFIG_PALETTE
1022 && mbmi->palette_mode_info.palette_size[1] == 0
1023#endif // CONFIG_PALETTE
1024 ) {
hui su5db97432016-10-14 16:10:14 -07001025 aom_write(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[1],
1026 cm->fc->filter_intra_probs[1]);
1027 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[1]) {
1028 const FILTER_INTRA_MODE mode =
1029 mbmi->filter_intra_mode_info.filter_intra_mode[1];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001030 write_uniform(w, FILTER_INTRA_MODES, mode);
1031 }
1032 }
1033}
hui su5db97432016-10-14 16:10:14 -07001034#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001035
hui su5db97432016-10-14 16:10:14 -07001036#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -07001037static void write_intra_angle_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1038 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001039 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1040 const BLOCK_SIZE bsize = mbmi->sb_type;
hui sueda3d762016-12-06 16:58:23 -08001041#if CONFIG_INTRA_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -07001042 const int intra_filter_ctx = av1_get_pred_context_intra_interp(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001043 int p_angle;
hui sueda3d762016-12-06 16:58:23 -08001044#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001045
hui sueda3d762016-12-06 16:58:23 -08001046 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001047 if (bsize < BLOCK_8X8) return;
1048
hui su45dc5972016-12-08 17:42:50 -08001049 if (av1_is_directional_mode(mbmi->mode, bsize)) {
1050 const int max_angle_delta = av1_get_max_angle_delta(mbmi->sb_type, 0);
1051 write_uniform(w, 2 * max_angle_delta + 1,
1052 max_angle_delta + mbmi->angle_delta[0]);
hui sueda3d762016-12-06 16:58:23 -08001053#if CONFIG_INTRA_INTERP
hui su45dc5972016-12-08 17:42:50 -08001054 p_angle = mode_to_angle_map[mbmi->mode] +
1055 mbmi->angle_delta[0] * av1_get_angle_step(mbmi->sb_type, 0);
Yaowu Xuf883b422016-08-30 14:01:10 -07001056 if (av1_is_intra_filter_switchable(p_angle)) {
1057 av1_write_token(w, av1_intra_filter_tree,
1058 cm->fc->intra_filter_probs[intra_filter_ctx],
1059 &intra_filter_encodings[mbmi->intra_filter]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001060 }
hui sueda3d762016-12-06 16:58:23 -08001061#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001062 }
1063
hui su45dc5972016-12-08 17:42:50 -08001064 if (av1_is_directional_mode(mbmi->uv_mode, bsize)) {
1065 write_uniform(w, 2 * MAX_ANGLE_DELTA_UV + 1,
1066 MAX_ANGLE_DELTA_UV + mbmi->angle_delta[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001067 }
1068}
1069#endif // CONFIG_EXT_INTRA
1070
Angie Chiang5678ad92016-11-21 09:38:40 -08001071static void write_mb_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd,
1072 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001073 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001074 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1075#if CONFIG_DUAL_FILTER
1076 int dir;
1077#endif
1078 if (cm->interp_filter == SWITCHABLE) {
1079#if CONFIG_EXT_INTERP
1080#if CONFIG_DUAL_FILTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001081 if (!av1_is_interp_needed(xd)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001082 assert(mbmi->interp_filter[0] == EIGHTTAP_REGULAR);
1083 return;
1084 }
1085#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001086 if (!av1_is_interp_needed(xd)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001087#if CONFIG_DUAL_FILTER
1088 assert(mbmi->interp_filter[0] == EIGHTTAP_REGULAR);
1089 assert(mbmi->interp_filter[1] == EIGHTTAP_REGULAR);
1090#else
1091 assert(mbmi->interp_filter == EIGHTTAP_REGULAR);
1092#endif
1093 return;
1094 }
1095#endif // CONFIG_DUAL_FILTER
1096#endif // CONFIG_EXT_INTERP
1097#if CONFIG_DUAL_FILTER
1098 for (dir = 0; dir < 2; ++dir) {
1099 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
1100 (mbmi->ref_frame[1] > INTRA_FRAME &&
1101 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001102 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
1103 av1_write_token(w, av1_switchable_interp_tree,
1104 cm->fc->switchable_interp_prob[ctx],
1105 &switchable_interp_encodings[mbmi->interp_filter[dir]]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001106 ++cpi->interp_filter_selected[0][mbmi->interp_filter[dir]];
1107 }
1108 }
1109#else
1110 {
Yaowu Xuf883b422016-08-30 14:01:10 -07001111 const int ctx = av1_get_pred_context_switchable_interp(xd);
Nathan E. Egge00b33312016-11-16 09:44:26 -05001112#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge56eeaa52016-07-25 10:23:33 -04001113 aom_write_symbol(w, av1_switchable_interp_ind[mbmi->interp_filter],
1114 cm->fc->switchable_interp_cdf[ctx], SWITCHABLE_FILTERS);
Nathan E. Egge4947c292016-04-26 11:37:06 -04001115#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001116 av1_write_token(w, av1_switchable_interp_tree,
1117 cm->fc->switchable_interp_prob[ctx],
1118 &switchable_interp_encodings[mbmi->interp_filter]);
Nathan E. Egge4947c292016-04-26 11:37:06 -04001119#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001120 ++cpi->interp_filter_selected[0][mbmi->interp_filter];
1121 }
1122#endif
1123 }
1124}
1125
Urvang Joshib100db72016-10-12 16:28:56 -07001126#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -07001127static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1128 const MODE_INFO *const mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001129 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1130 const MODE_INFO *const above_mi = xd->above_mi;
1131 const MODE_INFO *const left_mi = xd->left_mi;
1132 const BLOCK_SIZE bsize = mbmi->sb_type;
1133 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
1134 int palette_ctx = 0;
1135 int n, i;
1136
1137 if (mbmi->mode == DC_PRED) {
1138 n = pmi->palette_size[0];
1139 if (above_mi)
1140 palette_ctx += (above_mi->mbmi.palette_mode_info.palette_size[0] > 0);
1141 if (left_mi)
1142 palette_ctx += (left_mi->mbmi.palette_mode_info.palette_size[0] > 0);
Yaowu Xuf883b422016-08-30 14:01:10 -07001143 aom_write(w, n > 0,
1144 av1_default_palette_y_mode_prob[bsize - BLOCK_8X8][palette_ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001145 if (n > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001146 av1_write_token(w, av1_palette_size_tree,
1147 av1_default_palette_y_size_prob[bsize - BLOCK_8X8],
1148 &palette_size_encodings[n - 2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001149 for (i = 0; i < n; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07001150 aom_write_literal(w, pmi->palette_colors[i], cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001151 write_uniform(w, n, pmi->palette_first_color_idx[0]);
1152 }
1153 }
1154
1155 if (mbmi->uv_mode == DC_PRED) {
1156 n = pmi->palette_size[1];
Yaowu Xuf883b422016-08-30 14:01:10 -07001157 aom_write(w, n > 0,
1158 av1_default_palette_uv_mode_prob[pmi->palette_size[0] > 0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001159 if (n > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001160 av1_write_token(w, av1_palette_size_tree,
1161 av1_default_palette_uv_size_prob[bsize - BLOCK_8X8],
1162 &palette_size_encodings[n - 2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001163 for (i = 0; i < n; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001164 aom_write_literal(w, pmi->palette_colors[PALETTE_MAX_SIZE + i],
1165 cm->bit_depth);
1166 aom_write_literal(w, pmi->palette_colors[2 * PALETTE_MAX_SIZE + i],
1167 cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001168 }
1169 write_uniform(w, n, pmi->palette_first_color_idx[1]);
1170 }
1171 }
1172}
Urvang Joshib100db72016-10-12 16:28:56 -07001173#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001174
Jingning Han2a4da942016-11-03 18:31:30 -07001175static void write_tx_type(const AV1_COMMON *const cm,
1176 const MB_MODE_INFO *const mbmi,
1177#if CONFIG_SUPERTX
1178 const int supertx_enabled,
1179#endif
1180 aom_writer *w) {
1181 const int is_inter = is_inter_block(mbmi);
Jingning Hane67b38a2016-11-04 10:30:00 -07001182#if CONFIG_VAR_TX
1183 const TX_SIZE tx_size = is_inter ? mbmi->min_tx_size : mbmi->tx_size;
1184#else
Jingning Han641b1ad2016-11-04 09:58:36 -07001185 const TX_SIZE tx_size = mbmi->tx_size;
Jingning Hane67b38a2016-11-04 10:30:00 -07001186#endif
Jingning Han2a4da942016-11-03 18:31:30 -07001187 if (!FIXED_TX_TYPE) {
1188#if CONFIG_EXT_TX
1189 const BLOCK_SIZE bsize = mbmi->sb_type;
Jingning Han641b1ad2016-11-04 09:58:36 -07001190 if (get_ext_tx_types(tx_size, bsize, is_inter) > 1 && cm->base_qindex > 0 &&
1191 !mbmi->skip &&
Jingning Han2a4da942016-11-03 18:31:30 -07001192#if CONFIG_SUPERTX
1193 !supertx_enabled &&
1194#endif // CONFIG_SUPERTX
1195 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
Jingning Han641b1ad2016-11-04 09:58:36 -07001196 int eset = get_ext_tx_set(tx_size, bsize, is_inter);
Jingning Han2a4da942016-11-03 18:31:30 -07001197 if (is_inter) {
1198 assert(ext_tx_used_inter[eset][mbmi->tx_type]);
1199 if (eset > 0)
1200 av1_write_token(
1201 w, av1_ext_tx_inter_tree[eset],
Jingning Han641b1ad2016-11-04 09:58:36 -07001202 cm->fc->inter_ext_tx_prob[eset][txsize_sqr_map[tx_size]],
Jingning Han2a4da942016-11-03 18:31:30 -07001203 &ext_tx_inter_encodings[eset][mbmi->tx_type]);
1204 } else if (ALLOW_INTRA_EXT_TX) {
1205 if (eset > 0)
Jingning Han641b1ad2016-11-04 09:58:36 -07001206 av1_write_token(w, av1_ext_tx_intra_tree[eset],
1207 cm->fc->intra_ext_tx_prob[eset][tx_size][mbmi->mode],
1208 &ext_tx_intra_encodings[eset][mbmi->tx_type]);
Jingning Han2a4da942016-11-03 18:31:30 -07001209 }
1210 }
1211#else
Jingning Han641b1ad2016-11-04 09:58:36 -07001212 if (tx_size < TX_32X32 && cm->base_qindex > 0 && !mbmi->skip &&
Jingning Han2a4da942016-11-03 18:31:30 -07001213#if CONFIG_SUPERTX
1214 !supertx_enabled &&
1215#endif // CONFIG_SUPERTX
1216 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1217 if (is_inter) {
Nathan E. Eggedfa33f22016-11-16 09:44:26 -05001218#if CONFIG_EC_MULTISYMBOL
Jingning Han2a4da942016-11-03 18:31:30 -07001219 aom_write_symbol(w, av1_ext_tx_ind[mbmi->tx_type],
Jingning Han641b1ad2016-11-04 09:58:36 -07001220 cm->fc->inter_ext_tx_cdf[tx_size], TX_TYPES);
Jingning Han2a4da942016-11-03 18:31:30 -07001221#else
Jingning Han641b1ad2016-11-04 09:58:36 -07001222 av1_write_token(w, av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[tx_size],
Jingning Han2a4da942016-11-03 18:31:30 -07001223 &ext_tx_encodings[mbmi->tx_type]);
1224#endif
1225 } else {
Nathan E. Egge29ccee02016-11-16 09:44:26 -05001226#if CONFIG_EC_MULTISYMBOL
Jingning Han2a4da942016-11-03 18:31:30 -07001227 aom_write_symbol(
1228 w, av1_ext_tx_ind[mbmi->tx_type],
Jingning Han641b1ad2016-11-04 09:58:36 -07001229 cm->fc->intra_ext_tx_cdf[tx_size]
Jingning Han2a4da942016-11-03 18:31:30 -07001230 [intra_mode_to_tx_type_context[mbmi->mode]],
1231 TX_TYPES);
1232#else
1233 av1_write_token(
1234 w, av1_ext_tx_tree,
1235 cm->fc
Jingning Han641b1ad2016-11-04 09:58:36 -07001236 ->intra_ext_tx_prob[tx_size]
Jingning Han2a4da942016-11-03 18:31:30 -07001237 [intra_mode_to_tx_type_context[mbmi->mode]],
1238 &ext_tx_encodings[mbmi->tx_type]);
1239#endif
1240 }
Jingning Han2a4da942016-11-03 18:31:30 -07001241 }
1242#endif // CONFIG_EXT_TX
1243 }
1244}
1245
Yaowu Xuf883b422016-08-30 14:01:10 -07001246static void pack_inter_mode_mvs(AV1_COMP *cpi, const MODE_INFO *mi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001247#if CONFIG_SUPERTX
1248 int supertx_enabled,
1249#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001250 aom_writer *w) {
1251 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001252#if !CONFIG_REF_MV
Thomas9ac55082016-09-23 18:04:17 +01001253 nmv_context *nmvc = &cm->fc->nmvc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001254#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02001255
1256#if CONFIG_DELTA_Q
1257 MACROBLOCK *const x = &cpi->td.mb;
1258 MACROBLOCKD *const xd = &x->e_mbd;
1259#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001260 const MACROBLOCK *x = &cpi->td.mb;
1261 const MACROBLOCKD *xd = &x->e_mbd;
Arild Fuldseth07441162016-08-15 15:07:52 +02001262#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001263 const struct segmentation *const seg = &cm->seg;
Thomas9ac55082016-09-23 18:04:17 +01001264 struct segmentation_probs *const segp = &cm->fc->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001265 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1266 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
1267 const PREDICTION_MODE mode = mbmi->mode;
1268 const int segment_id = mbmi->segment_id;
1269 const BLOCK_SIZE bsize = mbmi->sb_type;
1270 const int allow_hp = cm->allow_high_precision_mv;
1271 const int is_inter = is_inter_block(mbmi);
1272 const int is_compound = has_second_ref(mbmi);
1273 int skip, ref;
Jingning Han52261842016-12-14 12:17:49 -08001274#if CONFIG_CB4X4
1275 const int unify_bsize = 1;
1276#else
1277 const int unify_bsize = 0;
1278#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001279
1280 if (seg->update_map) {
1281 if (seg->temporal_update) {
1282 const int pred_flag = mbmi->seg_id_predicted;
Yaowu Xuf883b422016-08-30 14:01:10 -07001283 aom_prob pred_prob = av1_get_pred_prob_seg_id(segp, xd);
1284 aom_write(w, pred_flag, pred_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001285 if (!pred_flag) write_segment_id(w, seg, segp, segment_id);
1286 } else {
1287 write_segment_id(w, seg, segp, segment_id);
1288 }
1289 }
1290
1291#if CONFIG_SUPERTX
1292 if (supertx_enabled)
1293 skip = mbmi->skip;
1294 else
1295 skip = write_skip(cm, xd, segment_id, mi, w);
1296#else
1297 skip = write_skip(cm, xd, segment_id, mi, w);
1298#endif // CONFIG_SUPERTX
Arild Fuldseth07441162016-08-15 15:07:52 +02001299#if CONFIG_DELTA_Q
1300 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001301 int mi_row = (-xd->mb_to_top_edge) >> (MI_SIZE_LOG2 + 3);
1302 int mi_col = (-xd->mb_to_left_edge) >> (MI_SIZE_LOG2 + 3);
1303 int super_block_upper_left =
1304 ((mi_row & MAX_MIB_MASK) == 0) && ((mi_col & MAX_MIB_MASK) == 0);
Arild Fuldseth07441162016-08-15 15:07:52 +02001305 if ((bsize != BLOCK_64X64 || skip == 0) && super_block_upper_left) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001306 int reduced_delta_qindex =
1307 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
1308 write_delta_qindex(cm, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001309 xd->prev_qindex = mbmi->current_q_index;
1310 }
1311 }
1312#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001313
1314#if CONFIG_SUPERTX
1315 if (!supertx_enabled)
1316#endif // CONFIG_SUPERTX
1317 if (!segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
Yaowu Xuf883b422016-08-30 14:01:10 -07001318 aom_write(w, is_inter, av1_get_intra_inter_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001319
1320 if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
1321#if CONFIG_SUPERTX
1322 !supertx_enabled &&
1323#endif // CONFIG_SUPERTX
1324 !(is_inter && skip) && !xd->lossless[segment_id]) {
1325#if CONFIG_VAR_TX
1326 if (is_inter) { // This implies skip flag is 0.
Jingning Han70e5f3f2016-11-09 17:03:07 -08001327 const TX_SIZE max_tx_size = max_txsize_rect_lookup[bsize];
Jingning Hanf64062f2016-11-02 16:22:18 -07001328 const int bh = tx_size_high_unit[max_tx_size];
1329 const int bw = tx_size_wide_unit[max_tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001330 const int width = num_4x4_blocks_wide_lookup[bsize];
1331 const int height = num_4x4_blocks_high_lookup[bsize];
1332 int idx, idy;
Jingning Hanfe45b212016-11-22 10:30:23 -08001333 for (idy = 0; idy < height; idy += bh)
1334 for (idx = 0; idx < width; idx += bw)
1335 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, height != width, idy,
1336 idx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001337 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001338 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001339 write_selected_tx_size(cm, xd, w);
1340 }
1341 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001342 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001343#else
1344 write_selected_tx_size(cm, xd, w);
1345#endif
1346 }
1347
1348 if (!is_inter) {
Jingning Han52261842016-12-14 12:17:49 -08001349 if (bsize >= BLOCK_8X8 || unify_bsize) {
Nathan E. Eggeecc21ec2016-11-16 09:44:26 -05001350#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge5710c722016-09-08 10:01:16 -04001351 aom_write_symbol(w, av1_intra_mode_ind[mode],
1352 cm->fc->y_mode_cdf[size_group_lookup[bsize]],
1353 INTRA_MODES);
1354#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001355 write_intra_mode(w, mode, cm->fc->y_mode_prob[size_group_lookup[bsize]]);
Nathan E. Egge5710c722016-09-08 10:01:16 -04001356#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001357 } else {
1358 int idx, idy;
1359 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1360 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1361 for (idy = 0; idy < 2; idy += num_4x4_h) {
1362 for (idx = 0; idx < 2; idx += num_4x4_w) {
1363 const PREDICTION_MODE b_mode = mi->bmi[idy * 2 + idx].as_mode;
Nathan E. Eggeecc21ec2016-11-16 09:44:26 -05001364#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge5710c722016-09-08 10:01:16 -04001365 aom_write_symbol(w, av1_intra_mode_ind[b_mode], cm->fc->y_mode_cdf[0],
1366 INTRA_MODES);
1367#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001368 write_intra_mode(w, b_mode, cm->fc->y_mode_prob[0]);
Nathan E. Egge5710c722016-09-08 10:01:16 -04001369#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001370 }
1371 }
1372 }
Nathan E. Eggedd28aed2016-11-16 09:44:26 -05001373#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04001374 aom_write_symbol(w, av1_intra_mode_ind[mbmi->uv_mode],
1375 cm->fc->uv_mode_cdf[mode], INTRA_MODES);
1376#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001377 write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mode]);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04001378#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001379#if CONFIG_EXT_INTRA
1380 write_intra_angle_info(cm, xd, w);
1381#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001382#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001383 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1384 write_palette_mode_info(cm, xd, mi, w);
Urvang Joshib100db72016-10-12 16:28:56 -07001385#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07001386#if CONFIG_FILTER_INTRA
1387 if (bsize >= BLOCK_8X8) write_filter_intra_mode_info(cm, mbmi, w);
1388#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001389 } else {
Yaowu Xub0d0d002016-11-22 09:26:43 -08001390 int16_t mode_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001391 write_ref_frames(cm, xd, w);
1392
1393#if CONFIG_REF_MV
1394#if CONFIG_EXT_INTER
1395 if (is_compound)
1396 mode_ctx = mbmi_ext->compound_mode_context[mbmi->ref_frame[0]];
1397 else
1398#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001399 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1400 mbmi->ref_frame, bsize, -1);
Yaowu Xub0d0d002016-11-22 09:26:43 -08001401#else // CONFIG_REF_MV
1402 mode_ctx = mbmi_ext->mode_context[mbmi->ref_frame[0]];
1403#endif // CONFIG_REF_MV
Yaowu Xuc27fc142016-08-22 16:08:15 -07001404
1405 // If segment skip is not enabled code the mode.
1406 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
Jingning Han52261842016-12-14 12:17:49 -08001407 if (bsize >= BLOCK_8X8 || unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001408#if CONFIG_EXT_INTER
1409 if (is_inter_compound_mode(mode))
1410 write_inter_compound_mode(cm, w, mode, mode_ctx);
1411 else if (is_inter_singleref_mode(mode))
1412#endif // CONFIG_EXT_INTER
1413 write_inter_mode(cm, w, mode,
1414#if CONFIG_REF_MV && CONFIG_EXT_INTER
1415 is_compound,
1416#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1417 mode_ctx);
1418
1419#if CONFIG_REF_MV
1420 if (mode == NEARMV || mode == NEWMV)
1421 write_drl_idx(cm, mbmi, mbmi_ext, w);
1422#endif
1423 }
1424 }
1425
Yue Chen69f18e12016-09-08 14:48:15 -07001426#if !CONFIG_EXT_INTERP && !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION
Angie Chiang5678ad92016-11-21 09:38:40 -08001427 write_mb_interp_filter(cpi, xd, w);
Yue Chen69f18e12016-09-08 14:48:15 -07001428#endif // !CONFIG_EXT_INTERP && !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001429
Jingning Han52261842016-12-14 12:17:49 -08001430 if (bsize < BLOCK_8X8 && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001431 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1432 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1433 int idx, idy;
1434 for (idy = 0; idy < 2; idy += num_4x4_h) {
1435 for (idx = 0; idx < 2; idx += num_4x4_w) {
1436 const int j = idy * 2 + idx;
1437 const PREDICTION_MODE b_mode = mi->bmi[j].as_mode;
1438#if CONFIG_REF_MV
1439#if CONFIG_EXT_INTER
1440 if (!is_compound)
1441#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001442 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1443 mbmi->ref_frame, bsize, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001444#endif
1445#if CONFIG_EXT_INTER
1446 if (is_inter_compound_mode(b_mode))
1447 write_inter_compound_mode(cm, w, b_mode, mode_ctx);
1448 else if (is_inter_singleref_mode(b_mode))
1449#endif // CONFIG_EXT_INTER
1450 write_inter_mode(cm, w, b_mode,
1451#if CONFIG_REF_MV && CONFIG_EXT_INTER
1452 has_second_ref(mbmi),
1453#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1454 mode_ctx);
1455
1456#if CONFIG_EXT_INTER
1457 if (b_mode == NEWMV || b_mode == NEWFROMNEARMV ||
1458 b_mode == NEW_NEWMV) {
1459#else
1460 if (b_mode == NEWMV) {
1461#endif // CONFIG_EXT_INTER
1462 for (ref = 0; ref < 1 + is_compound; ++ref) {
1463#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001464 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1465 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1466 mbmi_ext->ref_mv_stack[rf_type], ref,
1467 mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001468 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001469#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001470 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001471#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001472 &mi->bmi[j].ref_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001473#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001474 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001475#endif
1476#else
1477#if CONFIG_REF_MV
Yaowu Xuf5bbbfa2016-09-26 09:13:38 -07001478 &mi->bmi[j].pred_mv[ref].as_mv, is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001479#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001480 &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001481#endif // CONFIG_REF_MV
1482#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001483 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001484 }
1485 }
1486#if CONFIG_EXT_INTER
1487 else if (b_mode == NEAREST_NEWMV || b_mode == NEAR_NEWMV) {
1488#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001489 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1490 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1491 mbmi_ext->ref_mv_stack[rf_type], 1,
1492 mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001493 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001494#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001495 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[1].as_mv,
1496 &mi->bmi[j].ref_mv[1].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001497#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001498 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001499#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001500 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001501 } else if (b_mode == NEW_NEARESTMV || b_mode == NEW_NEARMV) {
1502#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001503 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1504 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1505 mbmi_ext->ref_mv_stack[rf_type], 0,
1506 mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001507 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001508#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001509 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[0].as_mv,
1510 &mi->bmi[j].ref_mv[0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001511#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001512 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001513#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001514 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001515 }
1516#endif // CONFIG_EXT_INTER
1517 }
1518 }
1519 } else {
1520#if CONFIG_EXT_INTER
1521 if (mode == NEWMV || mode == NEWFROMNEARMV || mode == NEW_NEWMV) {
1522#else
1523 if (mode == NEWMV) {
1524#endif // CONFIG_EXT_INTER
1525 int_mv ref_mv;
1526 for (ref = 0; ref < 1 + is_compound; ++ref) {
1527#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001528 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1529 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1530 mbmi_ext->ref_mv_stack[rf_type], ref,
1531 mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001532 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001533#endif
1534 ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0];
1535#if CONFIG_EXT_INTER
1536 if (mode == NEWFROMNEARMV)
Yaowu Xuf883b422016-08-30 14:01:10 -07001537 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv,
1538 &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][1].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001539#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001540 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001541#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001542 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001543 else
1544#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001545 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001546#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001547 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001548#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001549 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001550 }
1551#if CONFIG_EXT_INTER
1552 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
1553#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001554 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1555 int nmv_ctx =
1556 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1557 mbmi_ext->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001558 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001559#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001560 av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv,
1561 &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001562#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001563 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001564#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001565 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001566 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
1567#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001568 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1569 int nmv_ctx =
1570 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1571 mbmi_ext->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001572 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001573#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001574 av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv,
1575 &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001576#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001577 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001578#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001579 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001580#endif // CONFIG_EXT_INTER
1581 }
1582 }
1583
1584#if CONFIG_EXT_INTER
1585 if (cpi->common.reference_mode != COMPOUND_REFERENCE &&
1586#if CONFIG_SUPERTX
1587 !supertx_enabled &&
1588#endif // CONFIG_SUPERTX
1589 is_interintra_allowed(mbmi)) {
1590 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
1591 const int bsize_group = size_group_lookup[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -07001592 aom_write(w, interintra, cm->fc->interintra_prob[bsize_group]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001593 if (interintra) {
1594 write_interintra_mode(w, mbmi->interintra_mode,
1595 cm->fc->interintra_mode_prob[bsize_group]);
1596 if (is_interintra_wedge_used(bsize)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001597 aom_write(w, mbmi->use_wedge_interintra,
1598 cm->fc->wedge_interintra_prob[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001599 if (mbmi->use_wedge_interintra) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001600 aom_write_literal(w, mbmi->interintra_wedge_index,
1601 get_wedge_bits_lookup(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001602 assert(mbmi->interintra_wedge_sign == 0);
1603 }
1604 }
1605 }
1606 }
1607#endif // CONFIG_EXT_INTER
1608
Yue Chencb60b182016-10-13 15:18:22 -07001609#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001610#if CONFIG_SUPERTX
1611 if (!supertx_enabled)
1612#endif // CONFIG_SUPERTX
1613#if CONFIG_EXT_INTER
1614 if (mbmi->ref_frame[1] != INTRA_FRAME)
1615#endif // CONFIG_EXT_INTER
Yue Chen69f18e12016-09-08 14:48:15 -07001616 write_motion_mode(cm, mbmi, w);
Yue Chencb60b182016-10-13 15:18:22 -07001617#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001618
1619#if CONFIG_EXT_INTER
1620 if (cpi->common.reference_mode != SINGLE_REFERENCE &&
Sarah Parker6fdc8532016-11-16 17:47:13 -08001621 is_inter_compound_mode(mbmi->mode)
Yue Chencb60b182016-10-13 15:18:22 -07001622#if CONFIG_MOTION_VAR
Sarah Parker6fdc8532016-11-16 17:47:13 -08001623 && mbmi->motion_mode == SIMPLE_TRANSLATION
Yue Chencb60b182016-10-13 15:18:22 -07001624#endif // CONFIG_MOTION_VAR
Sarah Parker6fdc8532016-11-16 17:47:13 -08001625 ) {
1626 av1_write_token(
1627 w, av1_compound_type_tree, cm->fc->compound_type_prob[bsize],
1628 &compound_type_encodings[mbmi->interinter_compound_data.type]);
1629 if (mbmi->interinter_compound_data.type == COMPOUND_WEDGE) {
1630 aom_write_literal(w, mbmi->interinter_compound_data.wedge_index,
Yaowu Xuf883b422016-08-30 14:01:10 -07001631 get_wedge_bits_lookup(bsize));
Sarah Parker6fdc8532016-11-16 17:47:13 -08001632 aom_write_bit(w, mbmi->interinter_compound_data.wedge_sign);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001633 }
Sarah Parker569edda2016-12-14 14:57:38 -08001634#if CONFIG_COMPOUND_SEGMENT
1635 else if (mbmi->interinter_compound_data.type == COMPOUND_SEG) {
1636 aom_write_bit(w, mbmi->interinter_compound_data.which);
1637 }
1638#endif // CONFIG_COMPOUND_SEGMENT
Yaowu Xuc27fc142016-08-22 16:08:15 -07001639 }
1640#endif // CONFIG_EXT_INTER
1641
Yue Chen69f18e12016-09-08 14:48:15 -07001642#if CONFIG_WARPED_MOTION
1643 if (mbmi->motion_mode != WARPED_CAUSAL)
1644#endif // CONFIG_WARPED_MOTION
1645#if CONFIG_EXT_INTERP || CONFIG_DUAL_FILTER || CONFIG_WARPED_MOTION
1646 write_mb_interp_filter(cpi, xd, w);
1647#endif // CONFIG_EXT_INTERP || CONFIG_DUAL_FILTE || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001648 }
1649
Jingning Han2a4da942016-11-03 18:31:30 -07001650 write_tx_type(cm, mbmi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001651#if CONFIG_SUPERTX
Jingning Han2a4da942016-11-03 18:31:30 -07001652 supertx_enabled,
Nathan E. Egge93878c42016-05-03 10:01:32 -04001653#endif
Jingning Han2a4da942016-11-03 18:31:30 -07001654 w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001655}
1656
Arild Fuldseth07441162016-08-15 15:07:52 +02001657#if CONFIG_DELTA_Q
Thomas9ac55082016-09-23 18:04:17 +01001658static void write_mb_modes_kf(AV1_COMMON *cm, MACROBLOCKD *xd,
Arild Fuldseth07441162016-08-15 15:07:52 +02001659 MODE_INFO **mi_8x8, aom_writer *w) {
1660 int skip;
1661#else
Thomas9ac55082016-09-23 18:04:17 +01001662static void write_mb_modes_kf(AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuf883b422016-08-30 14:01:10 -07001663 MODE_INFO **mi_8x8, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +02001664#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001665 const struct segmentation *const seg = &cm->seg;
Thomas9ac55082016-09-23 18:04:17 +01001666 struct segmentation_probs *const segp = &cm->fc->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001667 const MODE_INFO *const mi = mi_8x8[0];
1668 const MODE_INFO *const above_mi = xd->above_mi;
1669 const MODE_INFO *const left_mi = xd->left_mi;
1670 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1671 const BLOCK_SIZE bsize = mbmi->sb_type;
Jingning Han52261842016-12-14 12:17:49 -08001672#if CONFIG_CB4X4
1673 const int unify_bsize = 1;
1674#else
1675 const int unify_bsize = 0;
1676#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001677
1678 if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id);
1679
Arild Fuldseth07441162016-08-15 15:07:52 +02001680#if CONFIG_DELTA_Q
1681 skip = write_skip(cm, xd, mbmi->segment_id, mi, w);
1682 if (cm->delta_q_present_flag) {
1683 int mi_row = (-xd->mb_to_top_edge) >> 6;
1684 int mi_col = (-xd->mb_to_left_edge) >> 6;
1685 int super_block_upper_left = ((mi_row & 7) == 0) && ((mi_col & 7) == 0);
1686 if ((bsize != BLOCK_64X64 || skip == 0) && super_block_upper_left) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001687 int reduced_delta_qindex =
1688 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
1689 write_delta_qindex(cm, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001690 xd->prev_qindex = mbmi->current_q_index;
1691 }
1692 }
1693#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001694 write_skip(cm, xd, mbmi->segment_id, mi, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001695#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001696
1697 if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
1698 !xd->lossless[mbmi->segment_id])
1699 write_selected_tx_size(cm, xd, w);
1700
Jingning Han52261842016-12-14 12:17:49 -08001701 if (bsize >= BLOCK_8X8 || unify_bsize) {
Nathan E. Egge10ba2be2016-11-16 09:44:26 -05001702#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04001703 aom_write_symbol(w, av1_intra_mode_ind[mbmi->mode],
1704 get_y_mode_cdf(cm, mi, above_mi, left_mi, 0), INTRA_MODES);
1705#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001706 write_intra_mode(w, mbmi->mode,
1707 get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04001708#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001709 } else {
1710 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1711 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1712 int idx, idy;
1713
1714 for (idy = 0; idy < 2; idy += num_4x4_h) {
1715 for (idx = 0; idx < 2; idx += num_4x4_w) {
1716 const int block = idy * 2 + idx;
Nathan E. Egge10ba2be2016-11-16 09:44:26 -05001717#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04001718 aom_write_symbol(w, av1_intra_mode_ind[mi->bmi[block].as_mode],
1719 get_y_mode_cdf(cm, mi, above_mi, left_mi, block),
1720 INTRA_MODES);
1721#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001722 write_intra_mode(w, mi->bmi[block].as_mode,
1723 get_y_mode_probs(cm, mi, above_mi, left_mi, block));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04001724#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001725 }
1726 }
1727 }
Nathan E. Eggedd28aed2016-11-16 09:44:26 -05001728#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04001729 aom_write_symbol(w, av1_intra_mode_ind[mbmi->uv_mode],
1730 cm->fc->uv_mode_cdf[mbmi->mode], INTRA_MODES);
1731#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001732 write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mbmi->mode]);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04001733#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001734#if CONFIG_EXT_INTRA
1735 write_intra_angle_info(cm, xd, w);
1736#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001737#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001738 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1739 write_palette_mode_info(cm, xd, mi, w);
Urvang Joshib100db72016-10-12 16:28:56 -07001740#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07001741#if CONFIG_FILTER_INTRA
1742 if (bsize >= BLOCK_8X8) write_filter_intra_mode_info(cm, mbmi, w);
1743#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001744
Jingning Han2a4da942016-11-03 18:31:30 -07001745 write_tx_type(cm, mbmi,
1746#if CONFIG_SUPERTX
1747 0,
Nathan E. Egge72762a22016-09-07 17:12:07 -04001748#endif
Jingning Han2a4da942016-11-03 18:31:30 -07001749 w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001750}
1751
1752#if CONFIG_SUPERTX
1753#define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1754 mi_row, mi_col) \
1755 write_modes_b(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col)
1756#else
1757#define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1758 mi_row, mi_col) \
1759 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col)
Alex Converseec6fb642016-10-19 11:31:48 -07001760#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001761
Angie Chiangd4022822016-11-02 18:30:25 -07001762#if CONFIG_RD_DEBUG
1763static void dump_mode_info(MODE_INFO *mi) {
1764 printf("\nmi->mbmi.mi_row == %d\n", mi->mbmi.mi_row);
1765 printf("&& mi->mbmi.mi_col == %d\n", mi->mbmi.mi_col);
1766 printf("&& mi->mbmi.sb_type == %d\n", mi->mbmi.sb_type);
1767 printf("&& mi->mbmi.tx_size == %d\n", mi->mbmi.tx_size);
1768 if (mi->mbmi.sb_type >= BLOCK_8X8) {
1769 printf("&& mi->mbmi.mode == %d\n", mi->mbmi.mode);
1770 } else {
1771 printf("&& mi->bmi[0].as_mode == %d\n", mi->bmi[0].as_mode);
1772 }
1773}
Angie Chiangd02001d2016-11-06 15:31:49 -08001774static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats,
1775 int plane) {
1776 if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) {
Angie Chiang3963d632016-11-10 18:41:40 -08001777#if CONFIG_VAR_TX
Angie Chiangd02001d2016-11-06 15:31:49 -08001778 int r, c;
Angie Chiang3963d632016-11-10 18:41:40 -08001779#endif
Angie Chiangd02001d2016-11-06 15:31:49 -08001780 printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n",
1781 plane, rd_stats->txb_coeff_cost[plane], token_stats->cost);
1782#if CONFIG_VAR_TX
1783 printf("rd txb_coeff_cost_map\n");
1784 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1785 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1786 printf("%d ", rd_stats->txb_coeff_cost_map[plane][r][c]);
1787 }
1788 printf("\n");
1789 }
1790
1791 printf("pack txb_coeff_cost_map\n");
1792 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1793 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1794 printf("%d ", token_stats->txb_coeff_cost_map[r][c]);
1795 }
1796 printf("\n");
1797 }
1798#endif
1799 return 1;
1800 }
1801 return 0;
1802}
Angie Chiangd4022822016-11-02 18:30:25 -07001803#endif
1804
Yushin Cho77bba8d2016-11-04 16:36:56 -07001805#if CONFIG_PVQ
1806PVQ_INFO *get_pvq_block(PVQ_QUEUE *pvq_q) {
1807 PVQ_INFO *pvq;
1808
1809 assert(pvq_q->curr_pos <= pvq_q->last_pos);
1810 assert(pvq_q->curr_pos < pvq_q->buf_len);
1811
1812 pvq = pvq_q->buf + pvq_q->curr_pos;
1813 ++pvq_q->curr_pos;
1814
1815 return pvq;
1816}
1817#endif
1818
Yaowu Xuf883b422016-08-30 14:01:10 -07001819static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
1820 aom_writer *w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001821 const TOKENEXTRA *const tok_end,
1822#if CONFIG_SUPERTX
1823 int supertx_enabled,
1824#endif
1825 int mi_row, int mi_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001826 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001827 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1828 MODE_INFO *m;
1829 int plane;
1830 int bh, bw;
Yushin Cho77bba8d2016-11-04 16:36:56 -07001831#if CONFIG_PVQ
1832 MB_MODE_INFO *mbmi;
1833 BLOCK_SIZE bsize;
1834 od_adapt_ctx *adapt;
1835 (void)tok;
1836 (void)tok_end;
1837#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001838 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1839 m = xd->mi[0];
1840
1841 assert(m->mbmi.sb_type <= cm->sb_size);
1842
Jingning Hanc709e1f2016-12-06 14:48:09 -08001843 bh = mi_size_high[m->mbmi.sb_type];
1844 bw = mi_size_wide[m->mbmi.sb_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001845
1846 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1847
1848 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
Yushin Cho77bba8d2016-11-04 16:36:56 -07001849#if CONFIG_PVQ
1850 mbmi = &m->mbmi;
1851 bsize = mbmi->sb_type;
1852 adapt = &cpi->td.mb.daala_enc.state.adapt;
1853#endif
1854
Yaowu Xuc27fc142016-08-22 16:08:15 -07001855 if (frame_is_intra_only(cm)) {
1856 write_mb_modes_kf(cm, xd, xd->mi, w);
1857 } else {
1858#if CONFIG_VAR_TX
1859 xd->above_txfm_context = cm->above_txfm_context + mi_col;
1860 xd->left_txfm_context =
1861 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
1862#endif
1863#if CONFIG_EXT_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -07001864 // av1_is_interp_needed needs the ref frame buffers set up to look
1865 // up if they are scaled. av1_is_interp_needed is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07001866 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
1867 set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
1868#endif // CONFIG_EXT_INTERP
1869#if 0
1870 // NOTE(zoeliu): For debug
1871 if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
1872 const PREDICTION_MODE mode = m->mbmi.mode;
1873 const int segment_id = m->mbmi.segment_id;
1874 const BLOCK_SIZE bsize = m->mbmi.sb_type;
1875
1876 // For sub8x8, simply dump out the first sub8x8 block info
1877 const PREDICTION_MODE b_mode =
1878 (bsize < BLOCK_8X8) ? m->bmi[0].as_mode : -1;
1879 const int mv_x = (bsize < BLOCK_8X8) ?
1880 m->bmi[0].as_mv[0].as_mv.row : m->mbmi.mv[0].as_mv.row;
1881 const int mv_y = (bsize < BLOCK_8X8) ?
1882 m->bmi[0].as_mv[0].as_mv.col : m->mbmi.mv[0].as_mv.col;
1883
1884 printf("Before pack_inter_mode_mvs(): "
1885 "Frame=%d, (mi_row,mi_col)=(%d,%d), "
1886 "mode=%d, segment_id=%d, bsize=%d, b_mode=%d, "
1887 "mv[0]=(%d, %d), ref[0]=%d, ref[1]=%d\n",
1888 cm->current_video_frame, mi_row, mi_col,
1889 mode, segment_id, bsize, b_mode, mv_x, mv_y,
1890 m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
1891 }
1892#endif // 0
1893 pack_inter_mode_mvs(cpi, m,
1894#if CONFIG_SUPERTX
1895 supertx_enabled,
1896#endif
1897 w);
1898 }
1899
Urvang Joshib100db72016-10-12 16:28:56 -07001900#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001901 for (plane = 0; plane <= 1; ++plane) {
1902 if (m->mbmi.palette_mode_info.palette_size[plane] > 0) {
Jingning Hanae5cfde2016-11-30 12:01:44 -08001903 const int rows =
1904 block_size_high[m->mbmi.sb_type] >> (xd->plane[plane].subsampling_y);
1905 const int cols =
1906 block_size_wide[m->mbmi.sb_type] >> (xd->plane[plane].subsampling_x);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001907 assert(*tok < tok_end);
1908 pack_palette_tokens(w, tok, m->mbmi.palette_mode_info.palette_size[plane],
1909 rows * cols - 1);
1910 assert(*tok < tok_end + m->mbmi.skip);
1911 }
1912 }
Urvang Joshib100db72016-10-12 16:28:56 -07001913#endif // CONFIG_PALETTE
Yushin Cho77bba8d2016-11-04 16:36:56 -07001914#if !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -07001915#if CONFIG_SUPERTX
1916 if (supertx_enabled) return;
1917#endif // CONFIG_SUPERTX
1918
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001919#if CONFIG_COEF_INTERLEAVE
1920 if (!m->mbmi.skip) {
1921 const struct macroblockd_plane *const pd_y = &xd->plane[0];
1922 const struct macroblockd_plane *const pd_c = &xd->plane[1];
1923 const TX_SIZE tx_log2_y = m->mbmi.tx_size;
1924 const TX_SIZE tx_log2_c = get_uv_tx_size(&m->mbmi, pd_c);
1925 const int tx_sz_y = (1 << tx_log2_y);
1926 const int tx_sz_c = (1 << tx_log2_c);
1927
1928 const BLOCK_SIZE plane_bsize_y =
1929 get_plane_block_size(AOMMAX(m->mbmi.sb_type, 3), pd_y);
1930 const BLOCK_SIZE plane_bsize_c =
1931 get_plane_block_size(AOMMAX(m->mbmi.sb_type, 3), pd_c);
1932
1933 const int num_4x4_w_y = num_4x4_blocks_wide_lookup[plane_bsize_y];
1934 const int num_4x4_w_c = num_4x4_blocks_wide_lookup[plane_bsize_c];
1935 const int num_4x4_h_y = num_4x4_blocks_high_lookup[plane_bsize_y];
1936 const int num_4x4_h_c = num_4x4_blocks_high_lookup[plane_bsize_c];
1937
1938 const int max_4x4_w_y = get_max_4x4_size(num_4x4_w_y, xd->mb_to_right_edge,
1939 pd_y->subsampling_x);
1940 const int max_4x4_h_y = get_max_4x4_size(num_4x4_h_y, xd->mb_to_bottom_edge,
1941 pd_y->subsampling_y);
1942 const int max_4x4_w_c = get_max_4x4_size(num_4x4_w_c, xd->mb_to_right_edge,
1943 pd_c->subsampling_x);
1944 const int max_4x4_h_c = get_max_4x4_size(num_4x4_h_c, xd->mb_to_bottom_edge,
1945 pd_c->subsampling_y);
1946
1947 // The max_4x4_w/h may be smaller than tx_sz under some corner cases,
1948 // i.e. when the SB is splitted by tile boundaries.
1949 const int tu_num_w_y = (max_4x4_w_y + tx_sz_y - 1) / tx_sz_y;
1950 const int tu_num_h_y = (max_4x4_h_y + tx_sz_y - 1) / tx_sz_y;
1951 const int tu_num_w_c = (max_4x4_w_c + tx_sz_c - 1) / tx_sz_c;
1952 const int tu_num_h_c = (max_4x4_h_c + tx_sz_c - 1) / tx_sz_c;
1953 const int tu_num_y = tu_num_w_y * tu_num_h_y;
1954 const int tu_num_c = tu_num_w_c * tu_num_h_c;
1955
1956 int tu_idx_y = 0, tu_idx_c = 0;
1957 TOKEN_STATS token_stats;
1958 init_token_stats(&token_stats);
1959
1960 assert(*tok < tok_end);
1961
1962 while (tu_idx_y < tu_num_y) {
1963 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_y, &token_stats);
1964 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1965 (*tok)++;
1966 tu_idx_y++;
1967
1968 if (tu_idx_c < tu_num_c) {
1969 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
1970 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1971 (*tok)++;
1972
1973 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
1974 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1975 (*tok)++;
1976
1977 tu_idx_c++;
1978 }
1979 }
1980
1981 // In 422 case, it's possilbe that Chroma has more TUs than Luma
1982 while (tu_idx_c < tu_num_c) {
1983 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
1984 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1985 (*tok)++;
1986
1987 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
1988 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1989 (*tok)++;
1990
1991 tu_idx_c++;
1992 }
1993 }
1994#else // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001995 if (!m->mbmi.skip) {
1996 assert(*tok < tok_end);
1997 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
Angie Chiang3963d632016-11-10 18:41:40 -08001998 MB_MODE_INFO *mbmi = &m->mbmi;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001999#if CONFIG_VAR_TX
2000 const struct macroblockd_plane *const pd = &xd->plane[plane];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002001 BLOCK_SIZE bsize = mbmi->sb_type;
2002 const BLOCK_SIZE plane_bsize =
Yaowu Xuf883b422016-08-30 14:01:10 -07002003 get_plane_block_size(AOMMAX(bsize, BLOCK_8X8), pd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002004
Jingning Han42a0fb32016-10-31 10:43:31 -07002005 const int num_4x4_w =
2006 block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
2007 const int num_4x4_h =
2008 block_size_high[plane_bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002009 int row, col;
Angie Chiangd4022822016-11-02 18:30:25 -07002010 TOKEN_STATS token_stats;
Angie Chiangd02001d2016-11-06 15:31:49 -08002011 init_token_stats(&token_stats);
Angie Chiangd4022822016-11-02 18:30:25 -07002012
Jingning Hanfe45b212016-11-22 10:30:23 -08002013 if (is_inter_block(mbmi)) {
Jingning Han70e5f3f2016-11-09 17:03:07 -08002014 const TX_SIZE max_tx_size = max_txsize_rect_lookup[plane_bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002015 int block = 0;
Jingning Han42a0fb32016-10-31 10:43:31 -07002016 const int step =
2017 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
2018 const int bkw = tx_size_wide_unit[max_tx_size];
2019 const int bkh = tx_size_high_unit[max_tx_size];
2020 for (row = 0; row < num_4x4_h; row += bkh) {
2021 for (col = 0; col < num_4x4_w; col += bkw) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002022 pack_txb_tokens(w, tok, tok_end, xd, mbmi, plane, plane_bsize,
Angie Chiangd4022822016-11-02 18:30:25 -07002023 cm->bit_depth, block, row, col, max_tx_size,
2024 &token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002025 block += step;
2026 }
2027 }
Angie Chiangd02001d2016-11-06 15:31:49 -08002028#if CONFIG_RD_DEBUG
Angie Chiang3963d632016-11-10 18:41:40 -08002029 if (mbmi->sb_type >= BLOCK_8X8 &&
2030 rd_token_stats_mismatch(&m->mbmi.rd_stats, &token_stats, plane)) {
Angie Chiangd02001d2016-11-06 15:31:49 -08002031 dump_mode_info(m);
2032 assert(0);
2033 }
Jingning Hanfe45b212016-11-22 10:30:23 -08002034#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07002035 } else {
2036 TX_SIZE tx = plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane])
2037 : m->mbmi.tx_size;
Jingning Han42a0fb32016-10-31 10:43:31 -07002038 const int bkw = tx_size_wide_unit[tx];
2039 const int bkh = tx_size_high_unit[tx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002040
Jingning Han42a0fb32016-10-31 10:43:31 -07002041 for (row = 0; row < num_4x4_h; row += bkh)
2042 for (col = 0; col < num_4x4_w; col += bkw)
Angie Chiangd4022822016-11-02 18:30:25 -07002043 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002044 }
2045#else
2046 TX_SIZE tx =
2047 plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane]) : m->mbmi.tx_size;
Angie Chiangd4022822016-11-02 18:30:25 -07002048 TOKEN_STATS token_stats;
Angie Chiang3963d632016-11-10 18:41:40 -08002049 init_token_stats(&token_stats);
Angie Chiangd4022822016-11-02 18:30:25 -07002050 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats);
Angie Chiang3963d632016-11-10 18:41:40 -08002051#if CONFIG_RD_DEBUG
2052 if (is_inter_block(mbmi) && mbmi->sb_type >= BLOCK_8X8 &&
2053 rd_token_stats_mismatch(&m->mbmi.rd_stats, &token_stats, plane)) {
2054 dump_mode_info(m);
2055 assert(0);
2056 }
2057#else
2058 (void)mbmi;
Jingning Hanfe45b212016-11-22 10:30:23 -08002059#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07002060#endif // CONFIG_VAR_TX
Angie Chiangd4022822016-11-02 18:30:25 -07002061
Yaowu Xuc27fc142016-08-22 16:08:15 -07002062 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2063 (*tok)++;
2064 }
2065 }
iole moccagattaf25a4cf2016-11-11 23:57:57 -08002066#endif // CONFIG_COEF_INTERLEAVE
Yushin Cho77bba8d2016-11-04 16:36:56 -07002067#else
2068 // PVQ writes its tokens (i.e. symbols) here.
2069 if (!m->mbmi.skip) {
2070 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
2071 PVQ_INFO *pvq;
2072 TX_SIZE tx_size =
2073 plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane]) : m->mbmi.tx_size;
2074 int idx, idy;
2075 const struct macroblockd_plane *const pd = &xd->plane[plane];
2076 int num_4x4_w;
2077 int num_4x4_h;
2078 int max_blocks_wide;
2079 int max_blocks_high;
2080 int step = (1 << tx_size);
2081 const int step_xy = 1 << (tx_size << 1);
2082 int block = 0;
2083
2084 if (tx_size == TX_4X4 && bsize <= BLOCK_8X8) {
2085 num_4x4_w = 2 >> xd->plane[plane].subsampling_x;
2086 num_4x4_h = 2 >> xd->plane[plane].subsampling_y;
2087 } else {
2088 num_4x4_w =
2089 num_4x4_blocks_wide_lookup[bsize] >> xd->plane[plane].subsampling_x;
2090 num_4x4_h =
2091 num_4x4_blocks_high_lookup[bsize] >> xd->plane[plane].subsampling_y;
2092 }
2093 // TODO: Do we need below for 4x4,4x8,8x4 cases as well?
2094 max_blocks_wide =
2095 num_4x4_w + (xd->mb_to_right_edge >= 0
2096 ? 0
2097 : xd->mb_to_right_edge >> (5 + pd->subsampling_x));
2098 max_blocks_high =
2099 num_4x4_h + (xd->mb_to_bottom_edge >= 0
2100 ? 0
2101 : xd->mb_to_bottom_edge >> (5 + pd->subsampling_y));
2102
2103 // TODO(yushin) Try to use av1_foreach_transformed_block_in_plane().
2104 // Logic like the mb_to_right_edge/mb_to_bottom_edge stuff should
2105 // really be centralized in one place.
2106
2107 for (idy = 0; idy < max_blocks_high; idy += step) {
2108 for (idx = 0; idx < max_blocks_wide; idx += step) {
2109 const int is_keyframe = 0;
2110 const int encode_flip = 0;
2111 const int flip = 0;
2112 const int robust = 1;
2113 int i;
2114 const int has_dc_skip = 1;
2115 int *exg = &adapt->pvq.pvq_exg[plane][tx_size][0];
2116 int *ext = adapt->pvq.pvq_ext + tx_size * PVQ_MAX_PARTITIONS;
2117 generic_encoder *model = adapt->pvq.pvq_param_model;
2118
2119 pvq = get_pvq_block(cpi->td.mb.pvq_q);
2120
2121 // encode block skip info
2122 od_encode_cdf_adapt(&w->ec, pvq->ac_dc_coded,
2123 adapt->skip_cdf[2 * tx_size + (plane != 0)], 4,
2124 adapt->skip_increment);
2125
2126 // AC coeffs coded?
2127 if (pvq->ac_dc_coded & 0x02) {
2128 assert(pvq->bs <= tx_size);
2129 for (i = 0; i < pvq->nb_bands; i++) {
2130 if (i == 0 || (!pvq->skip_rest &&
2131 !(pvq->skip_dir & (1 << ((i - 1) % 3))))) {
2132 pvq_encode_partition(
Nathan E. Egge6b0b4a92016-12-22 09:21:06 -05002133 w, pvq->qg[i], pvq->theta[i], pvq->max_theta[i],
Yushin Cho77bba8d2016-11-04 16:36:56 -07002134 pvq->y + pvq->off[i], pvq->size[i], pvq->k[i], model, adapt,
2135 exg + i, ext + i, robust || is_keyframe,
Yushin Cho48f84db2016-11-07 21:20:17 -08002136 (plane != 0) * OD_TXSIZES * PVQ_MAX_PARTITIONS +
Yushin Cho77bba8d2016-11-04 16:36:56 -07002137 pvq->bs * PVQ_MAX_PARTITIONS + i,
2138 is_keyframe, i == 0 && (i < pvq->nb_bands - 1),
2139 pvq->skip_rest, encode_flip, flip);
2140 }
2141 if (i == 0 && !pvq->skip_rest && pvq->bs > 0) {
2142 od_encode_cdf_adapt(
2143 &w->ec, pvq->skip_dir,
2144 &adapt->pvq
2145 .pvq_skip_dir_cdf[(plane != 0) + 2 * (pvq->bs - 1)][0],
2146 7, adapt->pvq.pvq_skip_dir_increment);
2147 }
2148 }
2149 }
2150 // Encode residue of DC coeff, if exist.
2151 if (!has_dc_skip || (pvq->ac_dc_coded & 1)) { // DC coded?
Nathan E. Egge760c27f2016-12-22 12:30:00 -05002152 generic_encode(w, &adapt->model_dc[plane],
Yushin Cho77bba8d2016-11-04 16:36:56 -07002153 abs(pvq->dq_dc_residue) - has_dc_skip, -1,
2154 &adapt->ex_dc[plane][pvq->bs][0], 2);
2155 }
2156 if ((pvq->ac_dc_coded & 1)) { // DC coded?
Nathan E. Eggee335fb72016-12-29 20:19:08 -05002157 aom_write_bit(w, pvq->dq_dc_residue < 0);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002158 }
2159 block += step_xy;
2160 }
2161 } // for (idy = 0;
2162 } // for (plane =
2163 } // if (!m->mbmi.skip)
Angie Chiangd4022822016-11-02 18:30:25 -07002164#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002165}
2166
Yaowu Xuf883b422016-08-30 14:01:10 -07002167static void write_partition(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002168 const MACROBLOCKD *const xd, int hbs, int mi_row,
2169 int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize,
Yaowu Xuf883b422016-08-30 14:01:10 -07002170 aom_writer *w) {
Jingning Hanbf9c6b72016-12-14 14:50:45 -08002171 const int is_partition_point = bsize >= BLOCK_8X8;
2172 const int ctx = is_partition_point
2173 ? partition_plane_context(xd, mi_row, mi_col, bsize)
2174 : 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002175 const aom_prob *const probs = cm->fc->partition_prob[ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002176 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2177 const int has_cols = (mi_col + hbs) < cm->mi_cols;
2178
Jingning Hanbf9c6b72016-12-14 14:50:45 -08002179 if (!is_partition_point) return;
2180
Yaowu Xuc27fc142016-08-22 16:08:15 -07002181 if (has_rows && has_cols) {
2182#if CONFIG_EXT_PARTITION_TYPES
2183 if (bsize <= BLOCK_8X8)
Yaowu Xuf883b422016-08-30 14:01:10 -07002184 av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002185 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002186 av1_write_token(w, av1_ext_partition_tree, probs,
2187 &ext_partition_encodings[p]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002188#else
Nathan E. Egge9d9eb6c2016-11-16 09:44:26 -05002189#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge56eeaa52016-07-25 10:23:33 -04002190 aom_write_symbol(w, p, cm->fc->partition_cdf[ctx], PARTITION_TYPES);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04002191#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002192 av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04002193#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002194#endif // CONFIG_EXT_PARTITION_TYPES
2195 } else if (!has_rows && has_cols) {
2196 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Yaowu Xuf883b422016-08-30 14:01:10 -07002197 aom_write(w, p == PARTITION_SPLIT, probs[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002198 } else if (has_rows && !has_cols) {
2199 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Yaowu Xuf883b422016-08-30 14:01:10 -07002200 aom_write(w, p == PARTITION_SPLIT, probs[2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002201 } else {
2202 assert(p == PARTITION_SPLIT);
2203 }
2204}
2205
2206#if CONFIG_SUPERTX
2207#define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
2208 mi_row, mi_col, bsize) \
2209 write_modes_sb(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col, \
2210 bsize)
2211#else
2212#define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
2213 mi_row, mi_col, bsize) \
2214 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, bsize)
Alex Converseec6fb642016-10-19 11:31:48 -07002215#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002216
Yaowu Xuf883b422016-08-30 14:01:10 -07002217static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile,
2218 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002219 const TOKENEXTRA *const tok_end,
2220#if CONFIG_SUPERTX
2221 int supertx_enabled,
2222#endif
2223 int mi_row, int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002224 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002225 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Jingning Hanc709e1f2016-12-06 14:48:09 -08002226 const int hbs = mi_size_wide[bsize] / 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002227 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
2228 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
Jingning Han52261842016-12-14 12:17:49 -08002229#if CONFIG_CB4X4
2230 const int unify_bsize = 1;
2231#else
2232 const int unify_bsize = 0;
2233#endif
2234
Yaowu Xuc27fc142016-08-22 16:08:15 -07002235#if CONFIG_SUPERTX
2236 const int mi_offset = mi_row * cm->mi_stride + mi_col;
2237 MB_MODE_INFO *mbmi;
2238 const int pack_token = !supertx_enabled;
2239 TX_SIZE supertx_size;
2240 int plane;
2241#endif
2242
2243 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2244
2245 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
2246#if CONFIG_SUPERTX
2247 mbmi = &cm->mi_grid_visible[mi_offset]->mbmi;
2248 xd->mi = cm->mi_grid_visible + mi_offset;
Jingning Han5b7706a2016-12-21 09:55:10 -08002249 set_mi_row_col(xd, tile, mi_row, mi_size_high[bsize], mi_col,
2250 mi_size_wide[bsize], cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002251 if (!supertx_enabled && !frame_is_intra_only(cm) &&
2252 partition != PARTITION_NONE && bsize <= MAX_SUPERTX_BLOCK_SIZE &&
2253 !xd->lossless[0]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002254 aom_prob prob;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002255 supertx_size = max_txsize_lookup[bsize];
2256 prob = cm->fc->supertx_prob[partition_supertx_context_lookup[partition]]
2257 [supertx_size];
2258 supertx_enabled = (xd->mi[0]->mbmi.tx_size == supertx_size);
Yaowu Xuf883b422016-08-30 14:01:10 -07002259 aom_write(w, supertx_enabled, prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002260 }
2261#endif // CONFIG_SUPERTX
Jingning Han52261842016-12-14 12:17:49 -08002262 if (subsize < BLOCK_8X8 && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002263 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row,
2264 mi_col);
2265 } else {
2266 switch (partition) {
2267 case PARTITION_NONE:
2268 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2269 mi_row, mi_col);
2270 break;
2271 case PARTITION_HORZ:
2272 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2273 mi_row, mi_col);
2274 if (mi_row + hbs < cm->mi_rows)
2275 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2276 mi_row + hbs, mi_col);
2277 break;
2278 case PARTITION_VERT:
2279 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2280 mi_row, mi_col);
2281 if (mi_col + hbs < cm->mi_cols)
2282 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2283 mi_row, mi_col + hbs);
2284 break;
2285 case PARTITION_SPLIT:
2286 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2287 mi_row, mi_col, subsize);
2288 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2289 mi_row, mi_col + hbs, subsize);
2290 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2291 mi_row + hbs, mi_col, subsize);
2292 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2293 mi_row + hbs, mi_col + hbs, subsize);
2294 break;
2295#if CONFIG_EXT_PARTITION_TYPES
2296 case PARTITION_HORZ_A:
2297 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2298 mi_row, mi_col);
2299 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2300 mi_row, mi_col + hbs);
2301 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2302 mi_row + hbs, mi_col);
2303 break;
2304 case PARTITION_HORZ_B:
2305 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2306 mi_row, mi_col);
2307 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2308 mi_row + hbs, mi_col);
2309 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2310 mi_row + hbs, mi_col + hbs);
2311 break;
2312 case PARTITION_VERT_A:
2313 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2314 mi_row, mi_col);
2315 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2316 mi_row + hbs, mi_col);
2317 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2318 mi_row, mi_col + hbs);
2319 break;
2320 case PARTITION_VERT_B:
2321 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2322 mi_row, mi_col);
2323 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2324 mi_row, mi_col + hbs);
2325 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2326 mi_row + hbs, mi_col + hbs);
2327 break;
2328#endif // CONFIG_EXT_PARTITION_TYPES
2329 default: assert(0);
2330 }
2331 }
2332#if CONFIG_SUPERTX
2333 if (partition != PARTITION_NONE && supertx_enabled && pack_token) {
2334 int skip;
Jingning Han5b7706a2016-12-21 09:55:10 -08002335 const int bsw = mi_size_wide[bsize];
2336 const int bsh = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002337 xd->mi = cm->mi_grid_visible + mi_offset;
2338 supertx_size = mbmi->tx_size;
Jingning Hane92bf1c2016-11-21 10:41:56 -08002339 set_mi_row_col(xd, tile, mi_row, bsh, mi_col, bsw, cm->mi_rows,
2340 cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002341
2342 assert(IMPLIES(!cm->seg.enabled, mbmi->segment_id_supertx == 0));
2343 assert(mbmi->segment_id_supertx < MAX_SEGMENTS);
2344
2345 skip = write_skip(cm, xd, mbmi->segment_id_supertx, xd->mi[0], w);
2346#if CONFIG_EXT_TX
2347 if (get_ext_tx_types(supertx_size, bsize, 1) > 1 && !skip) {
2348 int eset = get_ext_tx_set(supertx_size, bsize, 1);
2349 if (eset > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002350 av1_write_token(w, av1_ext_tx_inter_tree[eset],
2351 cm->fc->inter_ext_tx_prob[eset][supertx_size],
2352 &ext_tx_inter_encodings[eset][mbmi->tx_type]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002353 }
2354 }
2355#else
2356 if (supertx_size < TX_32X32 && !skip) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002357 av1_write_token(w, av1_ext_tx_tree,
2358 cm->fc->inter_ext_tx_prob[supertx_size],
2359 &ext_tx_encodings[mbmi->tx_type]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002360 }
2361#endif // CONFIG_EXT_TX
2362
2363 if (!skip) {
2364 assert(*tok < tok_end);
2365 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
Jingning Han5b7706a2016-12-21 09:55:10 -08002366 const struct macroblockd_plane *const pd = &xd->plane[plane];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002367 const int mbmi_txb_size = txsize_to_bsize[mbmi->tx_size];
Jingning Han5b7706a2016-12-21 09:55:10 -08002368 const BLOCK_SIZE plane_bsize = get_plane_block_size(mbmi_txb_size, pd);
2369
2370 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
2371 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
2372
Yaowu Xuc27fc142016-08-22 16:08:15 -07002373 int row, col;
2374 TX_SIZE tx =
2375 plane ? get_uv_tx_size(mbmi, &xd->plane[plane]) : mbmi->tx_size;
2376 BLOCK_SIZE txb_size = txsize_to_bsize[tx];
Jingning Han5b7706a2016-12-21 09:55:10 -08002377
2378 const int stepr = tx_size_high_unit[txb_size];
2379 const int stepc = tx_size_wide_unit[txb_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002380
Angie Chiangd4022822016-11-02 18:30:25 -07002381 TOKEN_STATS token_stats;
2382 token_stats.cost = 0;
Jingning Han5b7706a2016-12-21 09:55:10 -08002383 for (row = 0; row < max_blocks_high; row += stepr)
2384 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangd4022822016-11-02 18:30:25 -07002385 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002386 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2387 (*tok)++;
2388 }
2389 }
Jingning Hane92bf1c2016-11-21 10:41:56 -08002390#if CONFIG_VAR_TX
2391 xd->above_txfm_context = cm->above_txfm_context + mi_col;
2392 xd->left_txfm_context =
2393 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
2394 set_txfm_ctxs(xd->mi[0]->mbmi.tx_size, bsw, bsh, skip, xd);
2395#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002396 }
2397#endif // CONFIG_SUPERTX
2398
2399// update partition context
2400#if CONFIG_EXT_PARTITION_TYPES
2401 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
2402#else
2403 if (bsize >= BLOCK_8X8 &&
2404 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
2405 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01002406#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002407
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02002408#if CONFIG_DERING
David Barker9739f362016-11-10 09:29:32 +00002409#if CONFIG_EXT_PARTITION
2410 if (cm->sb_size == BLOCK_128X128 && bsize == BLOCK_128X128 &&
2411 cm->dering_level != 0 && !sb_all_skip(cm, mi_row, mi_col)) {
2412 aom_write_literal(
2413 w,
2414 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain,
2415 DERING_REFINEMENT_BITS);
2416 } else if (cm->sb_size == BLOCK_64X64 && bsize == BLOCK_64X64 &&
2417#else
2418 if (bsize == BLOCK_64X64 &&
2419#endif // CONFIG_EXT_PARTITION
2420 cm->dering_level != 0 && !sb_all_skip(cm, mi_row, mi_col)) {
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02002421 aom_write_literal(
2422 w,
2423 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain,
2424 DERING_REFINEMENT_BITS);
2425 }
2426#endif
2427
Yaowu Xud71be782016-10-14 08:47:03 -07002428#if CONFIG_CLPF
David Barker9739f362016-11-10 09:29:32 +00002429#if CONFIG_EXT_PARTITION
2430 if (cm->sb_size == BLOCK_128X128 && bsize == BLOCK_128X128 &&
2431 cm->clpf_blocks && cm->clpf_strength_y && cm->clpf_size != CLPF_NOSIZE) {
2432 const int tl = mi_row * MI_SIZE / MIN_FB_SIZE * cm->clpf_stride +
2433 mi_col * MI_SIZE / MIN_FB_SIZE;
2434 if (cm->clpf_size == CLPF_128X128 && cm->clpf_blocks[tl] != CLPF_NOFLAG) {
2435 aom_write_literal(w, cm->clpf_blocks[tl], 1);
2436 } else if (cm->clpf_size == CLPF_64X64) {
2437 const int tr = tl + 2;
2438 const int bl = tl + 2 * cm->clpf_stride;
2439 const int br = tr + 2 * cm->clpf_stride;
2440
2441 // Up to four bits per SB.
2442 if (cm->clpf_blocks[tl] != CLPF_NOFLAG)
2443 aom_write_literal(w, cm->clpf_blocks[tl], 1);
2444
2445 if (mi_col + MI_SIZE < cm->mi_cols && cm->clpf_blocks[tr] != CLPF_NOFLAG)
2446 aom_write_literal(w, cm->clpf_blocks[tr], 1);
2447
2448 if (mi_row + MI_SIZE < cm->mi_rows && cm->clpf_blocks[bl] != CLPF_NOFLAG)
2449 aom_write_literal(w, cm->clpf_blocks[bl], 1);
2450
2451 if (mi_row + MI_SIZE < cm->mi_rows && mi_col + MI_SIZE < cm->mi_cols &&
2452 cm->clpf_blocks[br] != CLPF_NOFLAG)
2453 aom_write_literal(w, cm->clpf_blocks[br], 1);
2454 } else if (cm->clpf_size == CLPF_32X32) {
2455 int i, j;
2456 const int size = 32 / MI_SIZE;
2457 // Up to sixteen bits per SB.
2458 for (i = 0; i < 4; ++i)
2459 for (j = 0; j < 4; ++j) {
2460 const int index = tl + i * cm->clpf_stride + j;
2461 if (mi_row + i * size < cm->mi_rows &&
2462 mi_col + j * size < cm->mi_cols &&
2463 cm->clpf_blocks[index] != CLPF_NOFLAG)
2464 aom_write_literal(w, cm->clpf_blocks[index], 1);
2465 }
2466 }
2467 } else if (cm->sb_size == BLOCK_64X64 && bsize == BLOCK_64X64 &&
2468#else
2469 if (bsize == BLOCK_64X64 &&
2470#endif // CONFIG_EXT_PARTITION
2471 cm->clpf_blocks && cm->clpf_strength_y &&
2472 cm->clpf_size != CLPF_NOSIZE) {
Yaowu Xud71be782016-10-14 08:47:03 -07002473 const int tl = mi_row * MI_SIZE / MIN_FB_SIZE * cm->clpf_stride +
2474 mi_col * MI_SIZE / MIN_FB_SIZE;
2475 const int tr = tl + 1;
2476 const int bl = tl + cm->clpf_stride;
2477 const int br = tr + cm->clpf_stride;
2478
2479 // Up to four bits per SB.
2480 // When clpf_size indicates a size larger than the SB size
2481 // (CLPF_128X128), one bit for every fourth SB will be transmitted
2482 // regardless of skip blocks.
2483 if (cm->clpf_blocks[tl] != CLPF_NOFLAG)
2484 aom_write_literal(w, cm->clpf_blocks[tl], 1);
2485
2486 if (mi_col + MI_SIZE / 2 < cm->mi_cols &&
2487 cm->clpf_blocks[tr] != CLPF_NOFLAG)
2488 aom_write_literal(w, cm->clpf_blocks[tr], 1);
2489
2490 if (mi_row + MI_SIZE / 2 < cm->mi_rows &&
2491 cm->clpf_blocks[bl] != CLPF_NOFLAG)
2492 aom_write_literal(w, cm->clpf_blocks[bl], 1);
2493
2494 if (mi_row + MI_SIZE / 2 < cm->mi_rows &&
2495 mi_col + MI_SIZE / 2 < cm->mi_cols &&
2496 cm->clpf_blocks[br] != CLPF_NOFLAG)
2497 aom_write_literal(w, cm->clpf_blocks[br], 1);
2498 }
David Barker9739f362016-11-10 09:29:32 +00002499#endif // CONFIG_CLPF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002500}
2501
Yaowu Xuf883b422016-08-30 14:01:10 -07002502static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile,
2503 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002504 const TOKENEXTRA *const tok_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002505 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002506 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
2507 const int mi_row_start = tile->mi_row_start;
2508 const int mi_row_end = tile->mi_row_end;
2509 const int mi_col_start = tile->mi_col_start;
2510 const int mi_col_end = tile->mi_col_end;
2511 int mi_row, mi_col;
Yaowu Xuf883b422016-08-30 14:01:10 -07002512 av1_zero_above_context(cm, mi_col_start, mi_col_end);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002513#if CONFIG_PVQ
2514 assert(cpi->td.mb.pvq_q->curr_pos == 0);
2515#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02002516#if CONFIG_DELTA_Q
2517 if (cpi->common.delta_q_present_flag) {
2518 xd->prev_qindex = cpi->common.base_qindex;
2519 }
2520#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002521
2522 for (mi_row = mi_row_start; mi_row < mi_row_end; mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002523 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002524
2525 for (mi_col = mi_col_start; mi_col < mi_col_end; mi_col += cm->mib_size) {
2526 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, 0, mi_row, mi_col,
2527 cm->sb_size);
2528 }
2529 }
Yushin Cho77bba8d2016-11-04 16:36:56 -07002530#if CONFIG_PVQ
2531 // Check that the number of PVQ blocks encoded and written to the bitstream
2532 // are the same
2533 assert(cpi->td.mb.pvq_q->curr_pos == cpi->td.mb.pvq_q->last_pos);
2534 // Reset curr_pos in case we repack the bitstream
2535 cpi->td.mb.pvq_q->curr_pos = 0;
2536#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002537}
2538
Yushin Cho77bba8d2016-11-04 16:36:56 -07002539#if !CONFIG_PVQ
Yaowu Xuf883b422016-08-30 14:01:10 -07002540static void build_tree_distribution(AV1_COMP *cpi, TX_SIZE tx_size,
2541 av1_coeff_stats *coef_branch_ct,
2542 av1_coeff_probs_model *coef_probs) {
2543 av1_coeff_count *coef_counts = cpi->td.rd_counts.coef_counts[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002544 unsigned int(*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
2545 cpi->common.counts.eob_branch[tx_size];
2546 int i, j, k, l, m;
2547
2548 for (i = 0; i < PLANE_TYPES; ++i) {
2549 for (j = 0; j < REF_TYPES; ++j) {
2550 for (k = 0; k < COEF_BANDS; ++k) {
2551 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002552 av1_tree_probs_from_distribution(av1_coef_tree,
2553 coef_branch_ct[i][j][k][l],
2554 coef_counts[i][j][k][l]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002555 coef_branch_ct[i][j][k][l][0][1] =
2556 eob_branch_ct[i][j][k][l] - coef_branch_ct[i][j][k][l][0][0];
2557 for (m = 0; m < UNCONSTRAINED_NODES; ++m)
2558 coef_probs[i][j][k][l][m] =
2559 get_binary_prob(coef_branch_ct[i][j][k][l][m][0],
2560 coef_branch_ct[i][j][k][l][m][1]);
2561 }
2562 }
2563 }
2564 }
2565}
2566
Yaowu Xuf883b422016-08-30 14:01:10 -07002567static void update_coef_probs_common(aom_writer *const bc, AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002568 TX_SIZE tx_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07002569 av1_coeff_stats *frame_branch_ct,
2570 av1_coeff_probs_model *new_coef_probs) {
2571 av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
2572 const aom_prob upd = DIFF_UPDATE_PROB;
Thomas9ac55082016-09-23 18:04:17 +01002573#if CONFIG_EC_ADAPT
Thomas Davies09ebbfb2016-10-20 18:28:47 +01002574 const int entropy_nodes_update = UNCONSTRAINED_NODES - 1;
Thomas9ac55082016-09-23 18:04:17 +01002575#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002576 const int entropy_nodes_update = UNCONSTRAINED_NODES;
Thomas9ac55082016-09-23 18:04:17 +01002577#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002578 int i, j, k, l, t;
2579 int stepsize = cpi->sf.coeff_prob_appx_step;
Thomas Davies80188d12016-10-26 16:08:35 -07002580#if CONFIG_TILE_GROUPS
2581 const int probwt = cpi->common.num_tg;
2582#else
2583 const int probwt = 1;
2584#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002585
2586 switch (cpi->sf.use_fast_coef_updates) {
2587 case TWO_LOOP: {
2588 /* dry run to see if there is any update at all needed */
2589 int savings = 0;
2590 int update[2] = { 0, 0 };
2591 for (i = 0; i < PLANE_TYPES; ++i) {
2592 for (j = 0; j < REF_TYPES; ++j) {
2593 for (k = 0; k < COEF_BANDS; ++k) {
2594 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2595 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002596 aom_prob newp = new_coef_probs[i][j][k][l][t];
2597 const aom_prob oldp = old_coef_probs[i][j][k][l][t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002598 int s;
2599 int u = 0;
2600 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002601 s = av1_prob_diff_update_savings_search_model(
Alex Conversea9ce4b72016-05-25 10:28:03 -07002602 frame_branch_ct[i][j][k][l][0], oldp, &newp, upd,
2603 stepsize, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002604 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002605 s = av1_prob_diff_update_savings_search(
Thomas Davies80188d12016-10-26 16:08:35 -07002606 frame_branch_ct[i][j][k][l][t], oldp, &newp, upd, probwt);
2607
Yaowu Xuc27fc142016-08-22 16:08:15 -07002608 if (s > 0 && newp != oldp) u = 1;
2609 if (u)
Yaowu Xuf883b422016-08-30 14:01:10 -07002610 savings += s - (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002611 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002612 savings -= (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002613 update[u]++;
2614 }
2615 }
2616 }
2617 }
2618 }
2619
2620 /* Is coef updated at all */
2621 if (update[1] == 0 || savings < 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002622 aom_write_bit(bc, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002623 return;
2624 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002625 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002626 for (i = 0; i < PLANE_TYPES; ++i) {
2627 for (j = 0; j < REF_TYPES; ++j) {
2628 for (k = 0; k < COEF_BANDS; ++k) {
2629 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2630 // calc probs and branch cts for this frame only
2631 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002632 aom_prob newp = new_coef_probs[i][j][k][l][t];
2633 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002634 int s;
2635 int u = 0;
2636 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002637 s = av1_prob_diff_update_savings_search_model(
Alex Conversea9ce4b72016-05-25 10:28:03 -07002638 frame_branch_ct[i][j][k][l][0], *oldp, &newp, upd,
2639 stepsize, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002640 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002641 s = av1_prob_diff_update_savings_search(
Thomas Davies80188d12016-10-26 16:08:35 -07002642 frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd,
2643 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002644 if (s > 0 && newp != *oldp) u = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002645 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002646 if (u) {
2647 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002648 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002649 *oldp = newp;
2650 }
2651 }
2652 }
2653 }
2654 }
2655 }
2656 return;
2657 }
2658
2659 case ONE_LOOP_REDUCED: {
2660 int updates = 0;
2661 int noupdates_before_first = 0;
2662 for (i = 0; i < PLANE_TYPES; ++i) {
2663 for (j = 0; j < REF_TYPES; ++j) {
2664 for (k = 0; k < COEF_BANDS; ++k) {
2665 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2666 // calc probs and branch cts for this frame only
2667 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002668 aom_prob newp = new_coef_probs[i][j][k][l][t];
2669 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002670 int s;
2671 int u = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002672 if (t == PIVOT_NODE) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002673 s = av1_prob_diff_update_savings_search_model(
Alex Conversea9ce4b72016-05-25 10:28:03 -07002674 frame_branch_ct[i][j][k][l][0], *oldp, &newp, upd,
2675 stepsize, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002676 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002677 s = av1_prob_diff_update_savings_search(
Thomas Davies80188d12016-10-26 16:08:35 -07002678 frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd,
2679 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002680 }
2681
2682 if (s > 0 && newp != *oldp) u = 1;
2683 updates += u;
2684 if (u == 0 && updates == 0) {
2685 noupdates_before_first++;
2686 continue;
2687 }
2688 if (u == 1 && updates == 1) {
2689 int v;
2690 // first update
Yaowu Xuf883b422016-08-30 14:01:10 -07002691 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002692 for (v = 0; v < noupdates_before_first; ++v)
Yaowu Xuf883b422016-08-30 14:01:10 -07002693 aom_write(bc, 0, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002694 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002695 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002696 if (u) {
2697 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002698 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002699 *oldp = newp;
2700 }
2701 }
2702 }
2703 }
2704 }
2705 }
2706 if (updates == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002707 aom_write_bit(bc, 0); // no updates
Yaowu Xuc27fc142016-08-22 16:08:15 -07002708 }
2709 return;
2710 }
2711 default: assert(0);
2712 }
2713}
2714
2715#if CONFIG_ENTROPY
2716// Calculate the token counts between subsequent subframe updates.
clang-format67948d32016-09-07 22:40:40 -07002717static void get_coef_counts_diff(AV1_COMP *cpi, int index,
2718 av1_coeff_count coef_counts[TX_SIZES]
2719 [PLANE_TYPES],
2720 unsigned int eob_counts[TX_SIZES][PLANE_TYPES]
2721 [REF_TYPES][COEF_BANDS]
2722 [COEFF_CONTEXTS]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002723 int i, j, k, l, m, tx_size, val;
2724 const int max_idx = cpi->common.coef_probs_update_idx;
2725 const TX_MODE tx_mode = cpi->common.tx_mode;
Urvang Joshicb586f32016-09-20 11:36:33 -07002726 const int max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002727 const SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats;
2728
2729 assert(max_idx < COEF_PROBS_BUFS);
2730
2731 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
2732 for (i = 0; i < PLANE_TYPES; ++i)
2733 for (j = 0; j < REF_TYPES; ++j)
2734 for (k = 0; k < COEF_BANDS; ++k)
2735 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2736 if (index == max_idx) {
2737 val =
2738 cpi->common.counts.eob_branch[tx_size][i][j][k][l] -
2739 subframe_stats->eob_counts_buf[max_idx][tx_size][i][j][k][l];
2740 } else {
clang-format67948d32016-09-07 22:40:40 -07002741 val = subframe_stats->eob_counts_buf[index + 1][tx_size][i][j][k]
2742 [l] -
Yaowu Xuc27fc142016-08-22 16:08:15 -07002743 subframe_stats->eob_counts_buf[index][tx_size][i][j][k][l];
2744 }
2745 assert(val >= 0);
2746 eob_counts[tx_size][i][j][k][l] = val;
2747
2748 for (m = 0; m < ENTROPY_TOKENS; ++m) {
2749 if (index == max_idx) {
2750 val = cpi->td.rd_counts.coef_counts[tx_size][i][j][k][l][m] -
clang-format67948d32016-09-07 22:40:40 -07002751 subframe_stats->coef_counts_buf[max_idx][tx_size][i][j][k]
2752 [l][m];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002753 } else {
clang-format67948d32016-09-07 22:40:40 -07002754 val = subframe_stats->coef_counts_buf[index + 1][tx_size][i][j]
2755 [k][l][m] -
2756 subframe_stats->coef_counts_buf[index][tx_size][i][j][k]
2757 [l][m];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002758 }
2759 assert(val >= 0);
2760 coef_counts[tx_size][i][j][k][l][m] = val;
2761 }
2762 }
2763}
2764
2765static void update_coef_probs_subframe(
Yaowu Xuf883b422016-08-30 14:01:10 -07002766 aom_writer *const bc, AV1_COMP *cpi, TX_SIZE tx_size,
2767 av1_coeff_stats branch_ct[COEF_PROBS_BUFS][TX_SIZES][PLANE_TYPES],
2768 av1_coeff_probs_model *new_coef_probs) {
2769 av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
2770 const aom_prob upd = DIFF_UPDATE_PROB;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002771 const int entropy_nodes_update = UNCONSTRAINED_NODES;
2772 int i, j, k, l, t;
2773 int stepsize = cpi->sf.coeff_prob_appx_step;
2774 const int max_idx = cpi->common.coef_probs_update_idx;
2775 int idx;
2776 unsigned int this_branch_ct[ENTROPY_NODES][COEF_PROBS_BUFS][2];
2777
2778 switch (cpi->sf.use_fast_coef_updates) {
2779 case TWO_LOOP: {
2780 /* dry run to see if there is any update at all needed */
2781 int savings = 0;
2782 int update[2] = { 0, 0 };
2783 for (i = 0; i < PLANE_TYPES; ++i) {
2784 for (j = 0; j < REF_TYPES; ++j) {
2785 for (k = 0; k < COEF_BANDS; ++k) {
2786 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2787 for (t = 0; t < ENTROPY_NODES; ++t) {
2788 for (idx = 0; idx <= max_idx; ++idx) {
2789 memcpy(this_branch_ct[t][idx],
2790 branch_ct[idx][tx_size][i][j][k][l][t],
2791 2 * sizeof(this_branch_ct[t][idx][0]));
2792 }
2793 }
2794 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002795 aom_prob newp = new_coef_probs[i][j][k][l][t];
2796 const aom_prob oldp = old_coef_probs[i][j][k][l][t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002797 int s, u = 0;
2798
2799 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002800 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002801 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2802 stepsize, max_idx);
2803 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002804 s = av1_prob_update_search_subframe(this_branch_ct[t], oldp,
2805 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002806 if (s > 0 && newp != oldp) u = 1;
2807 if (u)
Yaowu Xuf883b422016-08-30 14:01:10 -07002808 savings += s - (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002809 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002810 savings -= (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002811 update[u]++;
2812 }
2813 }
2814 }
2815 }
2816 }
2817
2818 /* Is coef updated at all */
2819 if (update[1] == 0 || savings < 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002820 aom_write_bit(bc, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002821 return;
2822 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002823 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002824 for (i = 0; i < PLANE_TYPES; ++i) {
2825 for (j = 0; j < REF_TYPES; ++j) {
2826 for (k = 0; k < COEF_BANDS; ++k) {
2827 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2828 for (t = 0; t < ENTROPY_NODES; ++t) {
2829 for (idx = 0; idx <= max_idx; ++idx) {
2830 memcpy(this_branch_ct[t][idx],
2831 branch_ct[idx][tx_size][i][j][k][l][t],
2832 2 * sizeof(this_branch_ct[t][idx][0]));
2833 }
2834 }
2835 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002836 aom_prob newp = new_coef_probs[i][j][k][l][t];
2837 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002838 int s;
2839 int u = 0;
2840
2841 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002842 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002843 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2844 stepsize, max_idx);
2845 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002846 s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp,
2847 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002848 if (s > 0 && newp != *oldp) u = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002849 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002850 if (u) {
2851 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002852 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002853 *oldp = newp;
2854 }
2855 }
2856 }
2857 }
2858 }
2859 }
2860 return;
2861 }
2862
2863 case ONE_LOOP_REDUCED: {
2864 int updates = 0;
2865 int noupdates_before_first = 0;
2866 for (i = 0; i < PLANE_TYPES; ++i) {
2867 for (j = 0; j < REF_TYPES; ++j) {
2868 for (k = 0; k < COEF_BANDS; ++k) {
2869 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2870 for (t = 0; t < ENTROPY_NODES; ++t) {
2871 for (idx = 0; idx <= max_idx; ++idx) {
2872 memcpy(this_branch_ct[t][idx],
2873 branch_ct[idx][tx_size][i][j][k][l][t],
2874 2 * sizeof(this_branch_ct[t][idx][0]));
2875 }
2876 }
2877 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002878 aom_prob newp = new_coef_probs[i][j][k][l][t];
2879 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002880 int s;
2881 int u = 0;
2882
2883 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002884 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002885 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2886 stepsize, max_idx);
2887 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002888 s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp,
2889 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002890 if (s > 0 && newp != *oldp) u = 1;
2891 updates += u;
2892 if (u == 0 && updates == 0) {
2893 noupdates_before_first++;
2894 continue;
2895 }
2896 if (u == 1 && updates == 1) {
2897 int v;
2898 // first update
Yaowu Xuf883b422016-08-30 14:01:10 -07002899 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002900 for (v = 0; v < noupdates_before_first; ++v)
Yaowu Xuf883b422016-08-30 14:01:10 -07002901 aom_write(bc, 0, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002902 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002903 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002904 if (u) {
2905 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002906 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002907 *oldp = newp;
2908 }
2909 }
2910 }
2911 }
2912 }
2913 }
2914 if (updates == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002915 aom_write_bit(bc, 0); // no updates
Yaowu Xuc27fc142016-08-22 16:08:15 -07002916 }
2917 return;
2918 }
2919 default: assert(0);
2920 }
2921}
2922#endif // CONFIG_ENTROPY
2923
Yaowu Xuf883b422016-08-30 14:01:10 -07002924static void update_coef_probs(AV1_COMP *cpi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002925 const TX_MODE tx_mode = cpi->common.tx_mode;
2926 const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
2927 TX_SIZE tx_size;
Alex Converse1e4e29f2016-11-08 14:12:14 -08002928#if CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002929 int update = 0;
Alex Converse1e4e29f2016-11-08 14:12:14 -08002930#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002931#if CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07002932 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002933 SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002934 int i;
Yaowu Xuf883b422016-08-30 14:01:10 -07002935 av1_coeff_probs_model dummy_frame_coef_probs[PLANE_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002936
2937 if (cm->do_subframe_update &&
2938 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002939 av1_copy(cpi->common.fc->coef_probs,
2940 subframe_stats->enc_starting_coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002941 for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) {
2942 get_coef_counts_diff(cpi, i, cpi->wholeframe_stats.coef_counts_buf[i],
2943 cpi->wholeframe_stats.eob_counts_buf[i]);
2944 }
2945 }
2946#endif // CONFIG_ENTROPY
2947
Jingning Han83630632016-12-16 11:27:25 -08002948 for (tx_size = 0; tx_size <= max_tx_size; ++tx_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002949 av1_coeff_stats frame_branch_ct[PLANE_TYPES];
2950 av1_coeff_probs_model frame_coef_probs[PLANE_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002951 if (cpi->td.counts->tx_size_totals[tx_size] <= 20 ||
2952 (tx_size >= TX_16X16 && cpi->sf.tx_size_search_method == USE_TX_8X8)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002953 aom_write_bit(w, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002954 } else {
2955#if CONFIG_ENTROPY
2956 if (cm->do_subframe_update &&
2957 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Urvang Joshi43e62812016-10-20 14:51:01 -07002958 unsigned int this_eob_counts_copy[PLANE_TYPES][REF_TYPES][COEF_BANDS]
2959 [COEFF_CONTEXTS];
Yaowu Xuf883b422016-08-30 14:01:10 -07002960 av1_coeff_count coef_counts_copy[PLANE_TYPES];
Urvang Joshi43e62812016-10-20 14:51:01 -07002961 av1_copy(this_eob_counts_copy, cpi->common.counts.eob_branch[tx_size]);
Yaowu Xuf883b422016-08-30 14:01:10 -07002962 av1_copy(coef_counts_copy, cpi->td.rd_counts.coef_counts[tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002963 build_tree_distribution(cpi, tx_size, frame_branch_ct,
2964 frame_coef_probs);
2965 for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002966 av1_copy(cpi->common.counts.eob_branch[tx_size],
2967 cpi->wholeframe_stats.eob_counts_buf[i][tx_size]);
2968 av1_copy(cpi->td.rd_counts.coef_counts[tx_size],
2969 cpi->wholeframe_stats.coef_counts_buf[i][tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002970 build_tree_distribution(cpi, tx_size, cpi->branch_ct_buf[i][tx_size],
2971 dummy_frame_coef_probs);
2972 }
Urvang Joshi43e62812016-10-20 14:51:01 -07002973 av1_copy(cpi->common.counts.eob_branch[tx_size], this_eob_counts_copy);
Yaowu Xuf883b422016-08-30 14:01:10 -07002974 av1_copy(cpi->td.rd_counts.coef_counts[tx_size], coef_counts_copy);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002975
2976 update_coef_probs_subframe(w, cpi, tx_size, cpi->branch_ct_buf,
2977 frame_coef_probs);
Alex Converse1e4e29f2016-11-08 14:12:14 -08002978#if CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002979 update = 1;
Alex Converse1e4e29f2016-11-08 14:12:14 -08002980#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002981 } else {
2982#endif // CONFIG_ENTROPY
2983 build_tree_distribution(cpi, tx_size, frame_branch_ct,
2984 frame_coef_probs);
2985 update_coef_probs_common(w, cpi, tx_size, frame_branch_ct,
2986 frame_coef_probs);
Alex Converse1e4e29f2016-11-08 14:12:14 -08002987#if CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002988 update = 1;
Alex Converse1e4e29f2016-11-08 14:12:14 -08002989#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002990#if CONFIG_ENTROPY
2991 }
2992#endif // CONFIG_ENTROPY
2993 }
2994 }
2995
2996#if CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07002997 av1_copy(cm->starting_coef_probs, cm->fc->coef_probs);
2998 av1_copy(subframe_stats->coef_probs_buf[0], cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002999 if (cm->do_subframe_update &&
3000 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Urvang Joshi43e62812016-10-20 14:51:01 -07003001 unsigned int eob_counts_copy[TX_SIZES][PLANE_TYPES][REF_TYPES][COEF_BANDS]
3002 [COEFF_CONTEXTS];
Yaowu Xuf883b422016-08-30 14:01:10 -07003003 av1_copy(eob_counts_copy, cm->counts.eob_branch);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003004 for (i = 1; i <= cpi->common.coef_probs_update_idx; ++i) {
3005 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
Yaowu Xuf883b422016-08-30 14:01:10 -07003006 av1_full_to_model_counts(cm->counts.coef[tx_size],
3007 subframe_stats->coef_counts_buf[i][tx_size]);
3008 av1_copy(cm->counts.eob_branch, subframe_stats->eob_counts_buf[i]);
3009 av1_partial_adapt_probs(cm, 0, 0);
3010 av1_copy(subframe_stats->coef_probs_buf[i], cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003011 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003012 av1_copy(cm->fc->coef_probs, subframe_stats->coef_probs_buf[0]);
3013 av1_copy(cm->counts.eob_branch, eob_counts_copy);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003014 }
3015#endif // CONFIG_ENTROPY
Alex Converse1e4e29f2016-11-08 14:12:14 -08003016#if CONFIG_EC_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -07003017 if (update) av1_coef_pareto_cdfs(cpi->common.fc);
Alex Converse1e4e29f2016-11-08 14:12:14 -08003018#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003019}
Yushin Cho77bba8d2016-11-04 16:36:56 -07003020#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003021
3022#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003023static void encode_restoration_mode(AV1_COMMON *cm,
3024 struct aom_write_bit_buffer *wb) {
3025 RestorationInfo *rst = &cm->rst_info;
3026 switch (rst->frame_restoration_type) {
3027 case RESTORE_NONE:
3028 aom_wb_write_bit(wb, 0);
3029 aom_wb_write_bit(wb, 0);
3030 break;
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003031 case RESTORE_WIENER:
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003032 aom_wb_write_bit(wb, 1);
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003033 aom_wb_write_bit(wb, 0);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003034 break;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003035 case RESTORE_SGRPROJ:
3036 aom_wb_write_bit(wb, 1);
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003037 aom_wb_write_bit(wb, 1);
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003038 aom_wb_write_bit(wb, 0);
3039 break;
3040 case RESTORE_DOMAINTXFMRF:
3041 aom_wb_write_bit(wb, 1);
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003042 aom_wb_write_bit(wb, 1);
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003043 aom_wb_write_bit(wb, 1);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003044 break;
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003045 case RESTORE_SWITCHABLE:
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003046 aom_wb_write_bit(wb, 0);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003047 aom_wb_write_bit(wb, 1);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003048 break;
3049 default: assert(0);
3050 }
3051}
3052
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003053static void write_wiener_filter(WienerInfo *wiener_info, aom_writer *wb) {
3054 aom_write_literal(wb, wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV,
3055 WIENER_FILT_TAP0_BITS);
3056 aom_write_literal(wb, wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV,
3057 WIENER_FILT_TAP1_BITS);
3058 aom_write_literal(wb, wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV,
3059 WIENER_FILT_TAP2_BITS);
3060 aom_write_literal(wb, wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV,
3061 WIENER_FILT_TAP0_BITS);
3062 aom_write_literal(wb, wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV,
3063 WIENER_FILT_TAP1_BITS);
3064 aom_write_literal(wb, wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV,
3065 WIENER_FILT_TAP2_BITS);
3066}
3067
3068static void write_sgrproj_filter(SgrprojInfo *sgrproj_info, aom_writer *wb) {
3069 aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS);
3070 aom_write_literal(wb, sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
3071 SGRPROJ_PRJ_BITS);
3072 aom_write_literal(wb, sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
3073 SGRPROJ_PRJ_BITS);
3074}
3075
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003076static void write_domaintxfmrf_filter(DomaintxfmrfInfo *domaintxfmrf_info,
3077 aom_writer *wb) {
3078 aom_write_literal(wb, domaintxfmrf_info->sigma_r, DOMAINTXFMRF_PARAMS_BITS);
3079}
3080
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003081static void encode_restoration(AV1_COMMON *cm, aom_writer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003082 int i;
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07003083 RestorationInfo *rsi = &cm->rst_info;
3084 if (rsi->frame_restoration_type != RESTORE_NONE) {
3085 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003086 // RESTORE_SWITCHABLE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003087 for (i = 0; i < cm->rst_internal.ntiles; ++i) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003088 av1_write_token(
clang-formatbda8d612016-09-19 15:55:46 -07003089 wb, av1_switchable_restore_tree, cm->fc->switchable_restore_prob,
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07003090 &switchable_restore_encodings[rsi->restoration_type[i]]);
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003091 if (rsi->restoration_type[i] == RESTORE_WIENER) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003092 write_wiener_filter(&rsi->wiener_info[i], wb);
3093 } else if (rsi->restoration_type[i] == RESTORE_SGRPROJ) {
3094 write_sgrproj_filter(&rsi->sgrproj_info[i], wb);
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003095 } else if (rsi->restoration_type[i] == RESTORE_DOMAINTXFMRF) {
3096 write_domaintxfmrf_filter(&rsi->domaintxfmrf_info[i], wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003097 }
3098 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07003099 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003100 for (i = 0; i < cm->rst_internal.ntiles; ++i) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07003101 aom_write(wb, rsi->wiener_info[i].level != 0, RESTORE_NONE_WIENER_PROB);
3102 if (rsi->wiener_info[i].level) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003103 write_wiener_filter(&rsi->wiener_info[i], wb);
3104 }
3105 }
3106 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
3107 for (i = 0; i < cm->rst_internal.ntiles; ++i) {
3108 aom_write(wb, rsi->sgrproj_info[i].level != 0,
3109 RESTORE_NONE_SGRPROJ_PROB);
3110 if (rsi->sgrproj_info[i].level) {
3111 write_sgrproj_filter(&rsi->sgrproj_info[i], wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003112 }
3113 }
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003114 } else if (rsi->frame_restoration_type == RESTORE_DOMAINTXFMRF) {
3115 for (i = 0; i < cm->rst_internal.ntiles; ++i) {
3116 aom_write(wb, rsi->domaintxfmrf_info[i].level != 0,
3117 RESTORE_NONE_DOMAINTXFMRF_PROB);
3118 if (rsi->domaintxfmrf_info[i].level) {
3119 write_domaintxfmrf_filter(&rsi->domaintxfmrf_info[i], wb);
3120 }
3121 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003122 }
3123 }
3124}
3125#endif // CONFIG_LOOP_RESTORATION
3126
Yaowu Xuf883b422016-08-30 14:01:10 -07003127static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003128 int i;
3129 struct loopfilter *lf = &cm->lf;
3130
3131 // Encode the loop filter level and type
Yaowu Xuf883b422016-08-30 14:01:10 -07003132 aom_wb_write_literal(wb, lf->filter_level, 6);
3133 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003134
3135 // Write out loop filter deltas applied at the MB level based on mode or
3136 // ref frame (if they are enabled).
Yaowu Xuf883b422016-08-30 14:01:10 -07003137 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003138
3139 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003140 aom_wb_write_bit(wb, lf->mode_ref_delta_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003141 if (lf->mode_ref_delta_update) {
3142 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
3143 const int delta = lf->ref_deltas[i];
3144 const int changed = delta != lf->last_ref_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07003145 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003146 if (changed) {
3147 lf->last_ref_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07003148 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003149 }
3150 }
3151
3152 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
3153 const int delta = lf->mode_deltas[i];
3154 const int changed = delta != lf->last_mode_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07003155 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003156 if (changed) {
3157 lf->last_mode_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07003158 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003159 }
3160 }
3161 }
3162 }
3163}
3164
3165#if CONFIG_CLPF
Yaowu Xuf883b422016-08-30 14:01:10 -07003166static void encode_clpf(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +02003167 aom_wb_write_literal(wb, cm->clpf_strength_y, 2);
3168 aom_wb_write_literal(wb, cm->clpf_strength_u, 2);
3169 aom_wb_write_literal(wb, cm->clpf_strength_v, 2);
3170 if (cm->clpf_strength_y) {
Steinar Midtskogend06588a2016-05-06 13:48:20 +02003171 aom_wb_write_literal(wb, cm->clpf_size, 2);
Steinar Midtskogend06588a2016-05-06 13:48:20 +02003172 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003173}
3174#endif
3175
3176#if CONFIG_DERING
Yaowu Xuf883b422016-08-30 14:01:10 -07003177static void encode_dering(int level, struct aom_write_bit_buffer *wb) {
3178 aom_wb_write_literal(wb, level, DERING_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003179}
3180#endif // CONFIG_DERING
3181
Yaowu Xuf883b422016-08-30 14:01:10 -07003182static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003183 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003184 aom_wb_write_bit(wb, 1);
3185 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003186 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003187 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003188 }
3189}
3190
Yaowu Xuf883b422016-08-30 14:01:10 -07003191static void encode_quantization(const AV1_COMMON *const cm,
3192 struct aom_write_bit_buffer *wb) {
3193 aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003194 write_delta_q(wb, cm->y_dc_delta_q);
3195 write_delta_q(wb, cm->uv_dc_delta_q);
3196 write_delta_q(wb, cm->uv_ac_delta_q);
3197#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07003198 aom_wb_write_bit(wb, cm->using_qmatrix);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003199 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003200 aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS);
3201 aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003202 }
3203#endif
3204}
3205
Yaowu Xuf883b422016-08-30 14:01:10 -07003206static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
3207 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003208 int i, j;
3209 const struct segmentation *seg = &cm->seg;
3210
Yaowu Xuf883b422016-08-30 14:01:10 -07003211 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003212 if (!seg->enabled) return;
3213
3214 // Segmentation map
3215 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003216 aom_wb_write_bit(wb, seg->update_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003217 } else {
3218 assert(seg->update_map == 1);
3219 }
3220 if (seg->update_map) {
3221 // Select the coding strategy (temporal or spatial)
Yaowu Xuf883b422016-08-30 14:01:10 -07003222 av1_choose_segmap_coding_method(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003223
3224 // Write out the chosen coding method.
3225 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003226 aom_wb_write_bit(wb, seg->temporal_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003227 } else {
3228 assert(seg->temporal_update == 0);
3229 }
3230 }
3231
3232 // Segmentation data
Yaowu Xuf883b422016-08-30 14:01:10 -07003233 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003234 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003235 aom_wb_write_bit(wb, seg->abs_delta);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003236
3237 for (i = 0; i < MAX_SEGMENTS; i++) {
3238 for (j = 0; j < SEG_LVL_MAX; j++) {
3239 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07003240 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003241 if (active) {
3242 const int data = get_segdata(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07003243 const int data_max = av1_seg_feature_data_max(j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003244
Yaowu Xuf883b422016-08-30 14:01:10 -07003245 if (av1_is_segfeature_signed(j)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003246 encode_unsigned_max(wb, abs(data), data_max);
Yaowu Xuf883b422016-08-30 14:01:10 -07003247 aom_wb_write_bit(wb, data < 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003248 } else {
3249 encode_unsigned_max(wb, data, data_max);
3250 }
3251 }
3252 }
3253 }
3254 }
3255}
3256
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04003257#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -07003258static void update_seg_probs(AV1_COMP *cpi, aom_writer *w) {
3259 AV1_COMMON *cm = &cpi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07003260#if CONFIG_TILE_GROUPS
3261 const int probwt = cm->num_tg;
3262#else
3263 const int probwt = 1;
3264#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003265
3266 if (!cm->seg.enabled || !cm->seg.update_map) return;
3267
3268 if (cm->seg.temporal_update) {
3269 int i;
3270
3271 for (i = 0; i < PREDICTION_PROBS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07003272 av1_cond_prob_diff_update(w, &cm->fc->seg.pred_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -07003273 cm->counts.seg.pred[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003274
Yaowu Xuf883b422016-08-30 14:01:10 -07003275 prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs,
Thomas Davies80188d12016-10-26 16:08:35 -07003276 cm->counts.seg.tree_mispred, MAX_SEGMENTS, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003277 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003278 prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs,
Thomas Davies80188d12016-10-26 16:08:35 -07003279 cm->counts.seg.tree_total, MAX_SEGMENTS, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003280 }
3281}
Thomas Davies6519beb2016-10-19 14:46:07 +01003282#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003283
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08003284static void write_tx_mode(TX_MODE mode, struct aom_write_bit_buffer *wb) {
3285#if CONFIG_TX64X64
3286 aom_wb_write_bit(wb, mode == TX_MODE_SELECT);
3287 if (mode != TX_MODE_SELECT) {
3288 aom_wb_write_literal(wb, AOMMIN(mode, ALLOW_32X32), 2);
3289 if (mode >= ALLOW_32X32) aom_wb_write_bit(wb, mode == ALLOW_64X64);
3290 }
3291#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003292 aom_wb_write_bit(wb, mode == TX_MODE_SELECT);
3293 if (mode != TX_MODE_SELECT) aom_wb_write_literal(wb, mode, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08003294#endif // CONFIG_TX64X64
Yaowu Xuc27fc142016-08-22 16:08:15 -07003295}
3296
Yaowu Xuf883b422016-08-30 14:01:10 -07003297static void update_txfm_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003298 FRAME_COUNTS *counts) {
Thomas Davies80188d12016-10-26 16:08:35 -07003299#if CONFIG_TILE_GROUPS
3300 const int probwt = cm->num_tg;
3301#else
3302 const int probwt = 1;
3303#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003304 if (cm->tx_mode == TX_MODE_SELECT) {
3305 int i, j;
Jingning Hanaae72a62016-10-25 15:35:29 -07003306 for (i = 0; i < MAX_TX_DEPTH; ++i)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003307 for (j = 0; j < TX_SIZE_CONTEXTS; ++j)
Yaowu Xuf883b422016-08-30 14:01:10 -07003308 prob_diff_update(av1_tx_size_tree[i], cm->fc->tx_size_probs[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07003309 counts->tx_size[i][j], i + 2, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003310 }
3311}
3312
Angie Chiang5678ad92016-11-21 09:38:40 -08003313static void write_frame_interp_filter(InterpFilter filter,
3314 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003315 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003316 if (filter != SWITCHABLE)
Angie Chiang6305abe2016-10-24 12:24:44 -07003317 aom_wb_write_literal(wb, filter, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003318}
3319
Yaowu Xuf883b422016-08-30 14:01:10 -07003320static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003321 if (cm->interp_filter == SWITCHABLE) {
3322 // Check to see if only one of the filters is actually used
3323 int count[SWITCHABLE_FILTERS];
3324 int i, j, c = 0;
3325 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
3326 count[i] = 0;
3327 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
3328 count[i] += counts->switchable_interp[j][i];
3329 c += (count[i] > 0);
3330 }
3331 if (c == 1) {
3332 // Only one filter is used. So set the filter at frame level
3333 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
3334 if (count[i]) {
3335 cm->interp_filter = i;
3336 break;
3337 }
3338 }
3339 }
3340 }
3341}
3342
Yaowu Xuf883b422016-08-30 14:01:10 -07003343static void write_tile_info(const AV1_COMMON *const cm,
3344 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003345#if CONFIG_EXT_TILE
3346 const int tile_width =
3347 ALIGN_POWER_OF_TWO(cm->tile_width, cm->mib_size_log2) >>
3348 cm->mib_size_log2;
3349 const int tile_height =
3350 ALIGN_POWER_OF_TWO(cm->tile_height, cm->mib_size_log2) >>
3351 cm->mib_size_log2;
3352
3353 assert(tile_width > 0);
3354 assert(tile_height > 0);
3355
3356// Write the tile sizes
3357#if CONFIG_EXT_PARTITION
3358 if (cm->sb_size == BLOCK_128X128) {
3359 assert(tile_width <= 32);
3360 assert(tile_height <= 32);
Yaowu Xuf883b422016-08-30 14:01:10 -07003361 aom_wb_write_literal(wb, tile_width - 1, 5);
3362 aom_wb_write_literal(wb, tile_height - 1, 5);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003363 } else
3364#endif // CONFIG_EXT_PARTITION
3365 {
3366 assert(tile_width <= 64);
3367 assert(tile_height <= 64);
Yaowu Xuf883b422016-08-30 14:01:10 -07003368 aom_wb_write_literal(wb, tile_width - 1, 6);
3369 aom_wb_write_literal(wb, tile_height - 1, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003370 }
3371#else
3372 int min_log2_tile_cols, max_log2_tile_cols, ones;
Yaowu Xuf883b422016-08-30 14:01:10 -07003373 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003374
3375 // columns
3376 ones = cm->log2_tile_cols - min_log2_tile_cols;
Yaowu Xuf883b422016-08-30 14:01:10 -07003377 while (ones--) aom_wb_write_bit(wb, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003378
Yaowu Xuf883b422016-08-30 14:01:10 -07003379 if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003380
3381 // rows
Yaowu Xuf883b422016-08-30 14:01:10 -07003382 aom_wb_write_bit(wb, cm->log2_tile_rows != 0);
3383 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003384#endif // CONFIG_EXT_TILE
Ryan Lei7386eda2016-12-08 21:08:31 -08003385
3386#if CONFIG_DEBLOCKING_ACROSS_TILES
3387 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_enabled);
3388#endif // CONFIG_DEBLOCKING_ACROSS_TILES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003389}
3390
Yaowu Xuf883b422016-08-30 14:01:10 -07003391static int get_refresh_mask(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003392 int refresh_mask = 0;
3393
3394#if CONFIG_EXT_REFS
3395 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
3396 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
3397 // the 3 LAST reference frames will be updated accordingly, i.e.:
3398 // (1) The original virtual index for LAST3_FRAME will become the new virtual
3399 // index for LAST_FRAME; and
3400 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
3401 // shifted and become the new virtual indexes for LAST2_FRAME and
3402 // LAST3_FRAME.
3403 refresh_mask |=
3404 (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]);
3405 if (cpi->rc.is_bwd_ref_frame && cpi->num_extra_arfs) {
3406 // We have swapped the virtual indices
3407 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->arf_map[0]);
3408 } else {
3409 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
3410 }
3411#else
3412 refresh_mask |= (cpi->refresh_last_frame << cpi->lst_fb_idx);
3413#endif // CONFIG_EXT_REFS
3414
Yaowu Xuf883b422016-08-30 14:01:10 -07003415 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003416 // We have decided to preserve the previously existing golden frame as our
3417 // new ARF frame. However, in the short term we leave it in the GF slot and,
3418 // if we're updating the GF with the current decoded frame, we save it
3419 // instead to the ARF slot.
Yaowu Xuf883b422016-08-30 14:01:10 -07003420 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
Yaowu Xuc27fc142016-08-22 16:08:15 -07003421 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
3422 // there so that it can be done outside of the recode loop.
3423 // Note: This is highly specific to the use of ARF as a forward reference,
3424 // and this needs to be generalized as other uses are implemented
3425 // (like RTC/temporal scalability).
3426 return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx);
3427 } else {
3428 int arf_idx = cpi->alt_fb_idx;
3429#if CONFIG_EXT_REFS
3430 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
3431 arf_idx = cpi->arf_map[gf_group->arf_update_idx[gf_group->index]];
3432#else
3433 if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
3434 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
3435 arf_idx = gf_group->arf_update_idx[gf_group->index];
3436 }
3437#endif // CONFIG_EXT_REFS
3438 return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) |
3439 (cpi->refresh_alt_ref_frame << arf_idx);
3440 }
3441}
3442
3443#if CONFIG_EXT_TILE
3444static INLINE int find_identical_tile(
3445 const int tile_row, const int tile_col,
3446 TileBufferEnc (*const tile_buffers)[1024]) {
3447 const MV32 candidate_offset[1] = { { 1, 0 } };
3448 const uint8_t *const cur_tile_data =
3449 tile_buffers[tile_row][tile_col].data + 4;
3450 const unsigned int cur_tile_size = tile_buffers[tile_row][tile_col].size;
3451
3452 int i;
3453
3454 if (tile_row == 0) return 0;
3455
3456 // (TODO: yunqingwang) For now, only above tile is checked and used.
3457 // More candidates such as left tile can be added later.
3458 for (i = 0; i < 1; i++) {
3459 int row_offset = candidate_offset[0].row;
3460 int col_offset = candidate_offset[0].col;
3461 int row = tile_row - row_offset;
3462 int col = tile_col - col_offset;
3463 uint8_t tile_hdr;
3464 const uint8_t *tile_data;
3465 TileBufferEnc *candidate;
3466
3467 if (row < 0 || col < 0) continue;
3468
3469 tile_hdr = *(tile_buffers[row][col].data);
3470
3471 // Read out tcm bit
3472 if ((tile_hdr >> 7) == 1) {
3473 // The candidate is a copy tile itself
3474 row_offset += tile_hdr & 0x7f;
3475 row = tile_row - row_offset;
3476 }
3477
3478 candidate = &tile_buffers[row][col];
3479
3480 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
3481
3482 tile_data = candidate->data + 4;
3483
3484 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
3485
3486 // Identical tile found
3487 assert(row_offset > 0);
3488 return row_offset;
3489 }
3490
3491 // No identical tile found
3492 return 0;
3493}
3494#endif // CONFIG_EXT_TILE
3495
Thomas Davies80188d12016-10-26 16:08:35 -07003496#if CONFIG_TILE_GROUPS
3497static uint32_t write_tiles(AV1_COMP *const cpi,
3498 struct aom_write_bit_buffer *wb,
3499 unsigned int *max_tile_size,
3500 unsigned int *max_tile_col_size) {
3501#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003502static uint32_t write_tiles(AV1_COMP *const cpi, uint8_t *const dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003503 unsigned int *max_tile_size,
3504 unsigned int *max_tile_col_size) {
Thomas Davies80188d12016-10-26 16:08:35 -07003505#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003506 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003507#if CONFIG_ANS
Alex Converse2a1b3af2016-10-26 13:11:26 -07003508 struct BufAnsCoder *buf_ans = &cpi->buf_ans;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003509#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003510 aom_writer mode_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003511#endif // CONFIG_ANS
3512 int tile_row, tile_col;
3513 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
clang-format67948d32016-09-07 22:40:40 -07003514 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003515 size_t total_size = 0;
3516 const int tile_cols = cm->tile_cols;
3517 const int tile_rows = cm->tile_rows;
Thomas Daviesaf6df172016-11-09 14:04:18 +00003518 unsigned int tile_size = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07003519#if CONFIG_TILE_GROUPS
3520 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
3521 const int have_tiles = n_log2_tiles > 0;
3522 size_t comp_hdr_size;
3523 // Fixed size tile groups for the moment
3524 const int num_tg_hdrs = cm->num_tg;
3525 const int tg_size = (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
3526 int tile_count = 0;
3527 int uncompressed_hdr_size = 0;
3528 uint8_t *dst = NULL;
3529 struct aom_write_bit_buffer comp_hdr_len_wb;
3530 struct aom_write_bit_buffer tg_params_wb;
3531 int saved_offset;
Thomas Daviesaf6df172016-11-09 14:04:18 +00003532 int mtu_size = cpi->oxcf.mtu;
3533 int curr_tg_data_size = 0;
3534 int hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07003535#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003536#if CONFIG_EXT_TILE
3537 const int have_tiles = tile_cols * tile_rows > 1;
3538#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003539
3540 *max_tile_size = 0;
3541 *max_tile_col_size = 0;
3542
3543// All tile size fields are output on 4 bytes. A call to remux_tiles will
3544// later compact the data if smaller headers are adequate.
3545
3546#if CONFIG_EXT_TILE
3547 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3548 TileInfo tile_info;
3549 const int is_last_col = (tile_col == tile_cols - 1);
3550 const size_t col_offset = total_size;
3551
Yaowu Xuf883b422016-08-30 14:01:10 -07003552 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003553
3554 // The last column does not have a column header
3555 if (!is_last_col) total_size += 4;
3556
3557 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3558 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003559 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3560 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3561 const int data_offset = have_tiles ? 4 : 0;
3562
Yaowu Xuf883b422016-08-30 14:01:10 -07003563 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003564
3565 buf->data = dst + total_size;
3566
3567 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
3568 // even for the last one, unless no tiling is used at all.
3569 total_size += data_offset;
3570#if !CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -07003571 aom_start_encode(&mode_bc, buf->data + data_offset);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003572 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
3573 assert(tok == tok_end);
Yaowu Xuf883b422016-08-30 14:01:10 -07003574 aom_stop_encode(&mode_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003575 tile_size = mode_bc.pos;
3576#else
Alex Converse2a1b3af2016-10-26 13:11:26 -07003577 buf_ans_write_init(buf_ans, buf->data + data_offset);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003578 write_modes(cpi, &tile_info, buf_ans, &tok, tok_end);
3579 assert(tok == tok_end);
Alex Converse1ecdf2b2016-11-30 15:51:12 -08003580 aom_buf_ans_flush(buf_ans);
Alex Converse2a1b3af2016-10-26 13:11:26 -07003581 tile_size = buf_ans_write_end(buf_ans);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003582#endif // !CONFIG_ANS
3583
3584 buf->size = tile_size;
3585
3586 // Record the maximum tile size we see, so we can compact headers later.
Yaowu Xuf883b422016-08-30 14:01:10 -07003587 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003588
3589 if (have_tiles) {
3590 // tile header: size of this tile, or copy offset
3591 uint32_t tile_header = tile_size;
3592
3593 // Check if this tile is a copy tile.
3594 // Very low chances to have copy tiles on the key frames, so don't
3595 // search on key frames to reduce unnecessary search.
3596 if (cm->frame_type != KEY_FRAME) {
3597 const int idendical_tile_offset =
3598 find_identical_tile(tile_row, tile_col, tile_buffers);
3599
3600 if (idendical_tile_offset > 0) {
3601 tile_size = 0;
3602 tile_header = idendical_tile_offset | 0x80;
3603 tile_header <<= 24;
3604 }
3605 }
3606
3607 mem_put_le32(buf->data, tile_header);
3608 }
3609
3610 total_size += tile_size;
3611 }
3612
3613 if (!is_last_col) {
3614 size_t col_size = total_size - col_offset - 4;
3615 mem_put_le32(dst + col_offset, col_size);
3616
3617 // If it is not final packing, record the maximum tile column size we see,
3618 // otherwise, check if the tile size is out of the range.
Yaowu Xuf883b422016-08-30 14:01:10 -07003619 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003620 }
3621 }
3622#else
Thomas Davies80188d12016-10-26 16:08:35 -07003623#if CONFIG_TILE_GROUPS
3624 write_uncompressed_header(cpi, wb);
3625
3626 // Write the tile length code. Use full 32 bit length fields for the moment
Thomas Davies4974e522016-11-07 17:44:05 +00003627 aom_wb_write_literal(wb, 3, 2);
Thomas Davies80188d12016-10-26 16:08:35 -07003628
3629 /* Write a placeholder for the number of tiles in each tile group */
3630 tg_params_wb = *wb;
3631 saved_offset = wb->bit_offset;
Thomas Davies4974e522016-11-07 17:44:05 +00003632 if (have_tiles) aom_wb_write_literal(wb, 0, n_log2_tiles * 2);
Thomas Davies80188d12016-10-26 16:08:35 -07003633
3634 /* Write a placeholder for the compressed header length */
3635 comp_hdr_len_wb = *wb;
3636 aom_wb_write_literal(wb, 0, 16);
3637
3638 uncompressed_hdr_size = aom_wb_bytes_written(wb);
3639 dst = wb->bit_buffer;
3640 comp_hdr_size = write_compressed_header(cpi, dst + uncompressed_hdr_size);
Thomas Daviesfaa7fcf2016-11-14 11:59:43 +00003641 aom_wb_overwrite_literal(&comp_hdr_len_wb, (int)(comp_hdr_size), 16);
Thomas Daviesaf6df172016-11-09 14:04:18 +00003642 hdr_size = uncompressed_hdr_size + comp_hdr_size;
3643 total_size += hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07003644#endif
3645
Yaowu Xuc27fc142016-08-22 16:08:15 -07003646 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3647 TileInfo tile_info;
Yaowu Xu8acaa552016-11-21 09:50:22 -08003648#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003649 const int is_last_row = (tile_row == tile_rows - 1);
Yaowu Xu8acaa552016-11-21 09:50:22 -08003650#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003651 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003652
3653 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
Yushin Cho77bba8d2016-11-04 16:36:56 -07003654 const int tile_idx = tile_row * tile_cols + tile_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003655 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
Yushin Cho77bba8d2016-11-04 16:36:56 -07003656#if CONFIG_PVQ
3657 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
3658#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003659 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3660 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
Thomas Davies8fe64a32016-10-04 13:19:31 +01003661#if !CONFIG_TILE_GROUPS
Yaowu Xu8acaa552016-11-21 09:50:22 -08003662 const int is_last_col = (tile_col == tile_cols - 1);
Thomas Davies8fe64a32016-10-04 13:19:31 +01003663 const int is_last_tile = is_last_col && is_last_row;
Yushin Cho77bba8d2016-11-04 16:36:56 -07003664 (void)tile_idx;
Thomas Davies8fe64a32016-10-04 13:19:31 +01003665#else
Thomas Davies8fe64a32016-10-04 13:19:31 +01003666 // All tiles in a tile group have a length
3667 const int is_last_tile = 0;
Thomas Daviesaf6df172016-11-09 14:04:18 +00003668
3669 if ((!mtu_size && tile_count > tg_size) ||
3670 (mtu_size && tile_count && curr_tg_data_size >= mtu_size)) {
3671 // We've exceeded the packet size
3672 if (tile_count > 1) {
3673 /* The last tile exceeded the packet size. The tile group size
3674 should therefore be tile_count-1.
3675 Move the last tile and insert headers before it
3676 */
3677 int old_total_size = total_size - tile_size - 4;
3678 memmove(dst + old_total_size + hdr_size, dst + old_total_size,
3679 (tile_size + 4) * sizeof(uint8_t));
3680 // Copy uncompressed header
3681 memmove(dst + old_total_size, dst,
3682 uncompressed_hdr_size * sizeof(uint8_t));
3683 // Write the number of tiles in the group into the last uncompressed
3684 // header before the one we've just inserted
Thomas Daviesfaa7fcf2016-11-14 11:59:43 +00003685 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
3686 n_log2_tiles);
3687 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 2, n_log2_tiles);
Thomas Daviesaf6df172016-11-09 14:04:18 +00003688 // Update the pointer to the last TG params
3689 tg_params_wb.bit_offset = saved_offset + 8 * old_total_size;
3690 // Copy compressed header
3691 memmove(dst + old_total_size + uncompressed_hdr_size,
3692 dst + uncompressed_hdr_size, comp_hdr_size * sizeof(uint8_t));
3693 total_size += hdr_size;
3694 tile_count = 1;
3695 curr_tg_data_size = hdr_size + tile_size + 4;
3696
3697 } else {
3698 // We exceeded the packet size in just one tile
3699 // Copy uncompressed header
3700 memmove(dst + total_size, dst,
3701 uncompressed_hdr_size * sizeof(uint8_t));
3702 // Write the number of tiles in the group into the last uncompressed
3703 // header
Thomas Daviesfaa7fcf2016-11-14 11:59:43 +00003704 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
3705 n_log2_tiles);
3706 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1, n_log2_tiles);
Thomas Daviesaf6df172016-11-09 14:04:18 +00003707 tg_params_wb.bit_offset = saved_offset + 8 * total_size;
3708 // Copy compressed header
3709 memmove(dst + total_size + uncompressed_hdr_size,
3710 dst + uncompressed_hdr_size, comp_hdr_size * sizeof(uint8_t));
3711 total_size += hdr_size;
3712 tile_count = 0;
3713 curr_tg_data_size = hdr_size;
3714 }
Thomas Davies80188d12016-10-26 16:08:35 -07003715 }
3716 tile_count++;
3717#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003718 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003719
3720 buf->data = dst + total_size;
3721
3722 // The last tile does not have a header.
3723 if (!is_last_tile) total_size += 4;
3724
Thomas Davies80188d12016-10-26 16:08:35 -07003725#if CONFIG_ANS
Alex Converse2a1b3af2016-10-26 13:11:26 -07003726 buf_ans_write_init(buf_ans, dst + total_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003727 write_modes(cpi, &tile_info, buf_ans, &tok, tok_end);
3728 assert(tok == tok_end);
Alex Converse1ecdf2b2016-11-30 15:51:12 -08003729 aom_buf_ans_flush(buf_ans);
Alex Converse2a1b3af2016-10-26 13:11:26 -07003730 tile_size = buf_ans_write_end(buf_ans);
Thomas Davies80188d12016-10-26 16:08:35 -07003731#else
3732 aom_start_encode(&mode_bc, dst + total_size);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003733#if CONFIG_PVQ
3734 // NOTE: This will not work with CONFIG_ANS turned on.
3735 od_adapt_ctx_reset(&cpi->td.mb.daala_enc.state.adapt, 0);
3736 cpi->td.mb.pvq_q = &this_tile->pvq_q;
3737#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003738 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
3739 assert(tok == tok_end);
3740 aom_stop_encode(&mode_bc);
3741 tile_size = mode_bc.pos;
Alex Converse2a1b3af2016-10-26 13:11:26 -07003742#endif // CONFIG_ANS
Yushin Cho77bba8d2016-11-04 16:36:56 -07003743#if CONFIG_PVQ
3744 cpi->td.mb.pvq_q = NULL;
Alex Converse2a1b3af2016-10-26 13:11:26 -07003745#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003746
3747 assert(tile_size > 0);
3748
Thomas Daviesaf6df172016-11-09 14:04:18 +00003749#if CONFIG_TILE_GROUPS
3750 curr_tg_data_size += tile_size + 4;
3751#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003752 buf->size = tile_size;
3753
3754 if (!is_last_tile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003755 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003756 // size of this tile
3757 mem_put_le32(buf->data, tile_size);
3758 }
3759
3760 total_size += tile_size;
3761 }
3762 }
Thomas Davies80188d12016-10-26 16:08:35 -07003763#if CONFIG_TILE_GROUPS
3764 // Write the final tile group size
3765 if (n_log2_tiles) {
3766 aom_wb_write_literal(&tg_params_wb, (1 << n_log2_tiles) - tile_count,
3767 n_log2_tiles);
3768 aom_wb_write_literal(&tg_params_wb, tile_count - 1, n_log2_tiles);
3769 }
3770#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003771#endif // CONFIG_EXT_TILE
3772 return (uint32_t)total_size;
3773}
3774
Yaowu Xuf883b422016-08-30 14:01:10 -07003775static void write_render_size(const AV1_COMMON *cm,
3776 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003777 const int scaling_active =
3778 cm->width != cm->render_width || cm->height != cm->render_height;
Yaowu Xuf883b422016-08-30 14:01:10 -07003779 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003780 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003781 aom_wb_write_literal(wb, cm->render_width - 1, 16);
3782 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003783 }
3784}
3785
Yaowu Xuf883b422016-08-30 14:01:10 -07003786static void write_frame_size(const AV1_COMMON *cm,
3787 struct aom_write_bit_buffer *wb) {
3788 aom_wb_write_literal(wb, cm->width - 1, 16);
3789 aom_wb_write_literal(wb, cm->height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003790
3791 write_render_size(cm, wb);
3792}
3793
Yaowu Xuf883b422016-08-30 14:01:10 -07003794static void write_frame_size_with_refs(AV1_COMP *cpi,
3795 struct aom_write_bit_buffer *wb) {
3796 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003797 int found = 0;
3798
3799 MV_REFERENCE_FRAME ref_frame;
3800 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3801 YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
3802
3803 if (cfg != NULL) {
3804 found =
3805 cm->width == cfg->y_crop_width && cm->height == cfg->y_crop_height;
3806 found &= cm->render_width == cfg->render_width &&
3807 cm->render_height == cfg->render_height;
3808 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003809 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003810 if (found) {
3811 break;
3812 }
3813 }
3814
3815 if (!found) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003816 aom_wb_write_literal(wb, cm->width - 1, 16);
3817 aom_wb_write_literal(wb, cm->height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003818 write_render_size(cm, wb);
3819 }
3820}
3821
Yaowu Xuf883b422016-08-30 14:01:10 -07003822static void write_sync_code(struct aom_write_bit_buffer *wb) {
3823 aom_wb_write_literal(wb, AV1_SYNC_CODE_0, 8);
3824 aom_wb_write_literal(wb, AV1_SYNC_CODE_1, 8);
3825 aom_wb_write_literal(wb, AV1_SYNC_CODE_2, 8);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003826}
3827
3828static void write_profile(BITSTREAM_PROFILE profile,
Yaowu Xuf883b422016-08-30 14:01:10 -07003829 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003830 switch (profile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003831 case PROFILE_0: aom_wb_write_literal(wb, 0, 2); break;
3832 case PROFILE_1: aom_wb_write_literal(wb, 2, 2); break;
3833 case PROFILE_2: aom_wb_write_literal(wb, 1, 2); break;
3834 case PROFILE_3: aom_wb_write_literal(wb, 6, 3); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003835 default: assert(0);
3836 }
3837}
3838
3839static void write_bitdepth_colorspace_sampling(
Yaowu Xuf883b422016-08-30 14:01:10 -07003840 AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003841 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003842 assert(cm->bit_depth > AOM_BITS_8);
3843 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003844 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003845 aom_wb_write_literal(wb, cm->color_space, 3);
3846 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003847 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Yaowu Xuf883b422016-08-30 14:01:10 -07003848 aom_wb_write_bit(wb, cm->color_range);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003849 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
3850 assert(cm->subsampling_x != 1 || cm->subsampling_y != 1);
Yaowu Xuf883b422016-08-30 14:01:10 -07003851 aom_wb_write_bit(wb, cm->subsampling_x);
3852 aom_wb_write_bit(wb, cm->subsampling_y);
3853 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003854 } else {
3855 assert(cm->subsampling_x == 1 && cm->subsampling_y == 1);
3856 }
3857 } else {
3858 assert(cm->profile == PROFILE_1 || cm->profile == PROFILE_3);
Yaowu Xuf883b422016-08-30 14:01:10 -07003859 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003860 }
3861}
3862
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003863#if CONFIG_REFERENCE_BUFFER
3864void write_sequence_header(SequenceHeader *seq_params) {
3865 /* Placeholder for actually writing to the bitstream */
3866 seq_params->frame_id_numbers_present_flag = FRAME_ID_NUMBERS_PRESENT_FLAG;
3867 seq_params->frame_id_length_minus7 = FRAME_ID_LENGTH_MINUS7;
3868 seq_params->delta_frame_id_length_minus2 = DELTA_FRAME_ID_LENGTH_MINUS2;
3869}
3870#endif
3871
Yaowu Xuf883b422016-08-30 14:01:10 -07003872static void write_uncompressed_header(AV1_COMP *cpi,
3873 struct aom_write_bit_buffer *wb) {
3874 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003875 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3876
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003877#if CONFIG_REFERENCE_BUFFER
3878 /* TODO: Move outside frame loop or inside key-frame branch */
3879 write_sequence_header(&cpi->seq_params);
3880#endif
3881
Yaowu Xuf883b422016-08-30 14:01:10 -07003882 aom_wb_write_literal(wb, AOM_FRAME_MARKER, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003883
3884 write_profile(cm->profile, wb);
3885
3886#if CONFIG_EXT_REFS
3887 // NOTE: By default all coded frames to be used as a reference
3888 cm->is_reference_frame = 1;
3889
3890 if (cm->show_existing_frame) {
3891 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3892 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3893
3894 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003895 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003896 "Buffer %d does not contain a reconstructed frame",
3897 frame_to_show);
3898 }
3899 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3900
Yaowu Xuf883b422016-08-30 14:01:10 -07003901 aom_wb_write_bit(wb, 1); // show_existing_frame
3902 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003903
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003904#if CONFIG_REFERENCE_BUFFER
3905 if (cpi->seq_params.frame_id_numbers_present_flag) {
3906 int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7;
3907 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
3908 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
3909 /* Add a zero byte to prevent emulation of superframe marker */
3910 /* Same logic as when when terminating the entropy coder */
3911 /* Consider to have this logic only one place */
3912 aom_wb_write_literal(wb, 0, 8);
3913 }
3914#endif
3915
Yaowu Xuc27fc142016-08-22 16:08:15 -07003916 return;
3917 } else {
3918#endif // CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07003919 aom_wb_write_bit(wb, 0); // show_existing_frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07003920#if CONFIG_EXT_REFS
3921 }
3922#endif // CONFIG_EXT_REFS
3923
Yaowu Xuf883b422016-08-30 14:01:10 -07003924 aom_wb_write_bit(wb, cm->frame_type);
3925 aom_wb_write_bit(wb, cm->show_frame);
3926 aom_wb_write_bit(wb, cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003927
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003928#if CONFIG_REFERENCE_BUFFER
3929 cm->invalid_delta_frame_id_minus1 = 0;
3930 if (cpi->seq_params.frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003931 int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7;
3932 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003933 }
3934#endif
3935
Yaowu Xuc27fc142016-08-22 16:08:15 -07003936 if (cm->frame_type == KEY_FRAME) {
3937 write_sync_code(wb);
3938 write_bitdepth_colorspace_sampling(cm, wb);
3939 write_frame_size(cm, wb);
Urvang Joshib100db72016-10-12 16:28:56 -07003940#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07003941 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Urvang Joshib100db72016-10-12 16:28:56 -07003942#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003943 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003944 if (!cm->show_frame) aom_wb_write_bit(wb, cm->intra_only);
Urvang Joshib100db72016-10-12 16:28:56 -07003945#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07003946 if (cm->intra_only) aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Urvang Joshib100db72016-10-12 16:28:56 -07003947#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003948 if (!cm->error_resilient_mode) {
3949 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003950 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003951 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3952 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003953 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003954 cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
3955 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003956 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003957 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3958 }
3959 }
3960
3961#if CONFIG_EXT_REFS
3962 cpi->refresh_frame_mask = get_refresh_mask(cpi);
3963#endif // CONFIG_EXT_REFS
3964
3965 if (cm->intra_only) {
3966 write_sync_code(wb);
3967 write_bitdepth_colorspace_sampling(cm, wb);
3968
3969#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07003970 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003971#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003972 aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003973#endif // CONFIG_EXT_REFS
3974 write_frame_size(cm, wb);
3975 } else {
3976 MV_REFERENCE_FRAME ref_frame;
3977
3978#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07003979 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003980#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003981 aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003982#endif // CONFIG_EXT_REFS
3983
3984#if CONFIG_EXT_REFS
3985 if (!cpi->refresh_frame_mask) {
3986 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
3987 // will not be used as a reference
3988 cm->is_reference_frame = 0;
3989 }
3990#endif // CONFIG_EXT_REFS
3991
3992 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3993 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
Yaowu Xuf883b422016-08-30 14:01:10 -07003994 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003995 REF_FRAMES_LOG2);
Yaowu Xuf883b422016-08-30 14:01:10 -07003996 aom_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003997#if CONFIG_REFERENCE_BUFFER
3998 if (cpi->seq_params.frame_id_numbers_present_flag) {
3999 int i = get_ref_frame_map_idx(cpi, ref_frame);
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004000 int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7;
4001 int diff_len = cpi->seq_params.delta_frame_id_length_minus2 + 2;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004002 int delta_frame_id_minus1 =
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004003 ((cm->current_frame_id - cm->ref_frame_id[i] +
4004 (1 << frame_id_len)) %
4005 (1 << frame_id_len)) -
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004006 1;
4007 if (delta_frame_id_minus1 < 0 ||
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004008 delta_frame_id_minus1 >= (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004009 cm->invalid_delta_frame_id_minus1 = 1;
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004010 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004011 }
4012#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004013 }
4014
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004015#if CONFIG_FRAME_SIZE
4016 if (cm->error_resilient_mode == 0) {
4017 write_frame_size_with_refs(cpi, wb);
4018 } else {
4019 write_frame_size(cm, wb);
4020 }
4021#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004022 write_frame_size_with_refs(cpi, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004023#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004024
Yaowu Xuf883b422016-08-30 14:01:10 -07004025 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004026
4027 fix_interp_filter(cm, cpi->td.counts);
Angie Chiang5678ad92016-11-21 09:38:40 -08004028 write_frame_interp_filter(cm->interp_filter, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004029 }
4030 }
4031
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004032#if CONFIG_REFERENCE_BUFFER
4033 cm->refresh_mask = cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
4034#endif
4035
Yaowu Xuc27fc142016-08-22 16:08:15 -07004036 if (!cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004037 aom_wb_write_bit(
Yaowu Xuc27fc142016-08-22 16:08:15 -07004038 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
4039 }
4040
Yaowu Xuf883b422016-08-30 14:01:10 -07004041 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004042
Jingning Hanc709e1f2016-12-06 14:48:09 -08004043 assert(cm->mib_size == mi_size_wide[cm->sb_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004044 assert(cm->mib_size == 1 << cm->mib_size_log2);
4045#if CONFIG_EXT_PARTITION
4046 assert(cm->sb_size == BLOCK_128X128 || cm->sb_size == BLOCK_64X64);
Yaowu Xuf883b422016-08-30 14:01:10 -07004047 aom_wb_write_bit(wb, cm->sb_size == BLOCK_128X128 ? 1 : 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004048#else
4049 assert(cm->sb_size == BLOCK_64X64);
4050#endif // CONFIG_EXT_PARTITION
4051
4052 encode_loopfilter(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004053#if CONFIG_DERING
4054 encode_dering(cm->dering_level, wb);
4055#endif // CONFIG_DERING
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02004056#if CONFIG_CLPF
4057 encode_clpf(cm, wb);
4058#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004059#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004060 encode_restoration_mode(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004061#endif // CONFIG_LOOP_RESTORATION
4062 encode_quantization(cm, wb);
4063 encode_segmentation(cm, xd, wb);
Arild Fuldseth07441162016-08-15 15:07:52 +02004064#if CONFIG_DELTA_Q
4065 {
4066 int i;
4067 struct segmentation *const seg = &cm->seg;
4068 int segment_quantizer_active = 0;
4069 for (i = 0; i < MAX_SEGMENTS; i++) {
4070 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
4071 segment_quantizer_active = 1;
4072 }
4073 }
4074 if (segment_quantizer_active == 0) {
4075 cm->delta_q_present_flag = cpi->oxcf.aq_mode == DELTA_AQ;
4076 aom_wb_write_bit(wb, cm->delta_q_present_flag);
4077 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01004078 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
Arild Fuldseth07441162016-08-15 15:07:52 +02004079 xd->prev_qindex = cm->base_qindex;
4080 }
4081 }
4082 }
4083#endif
4084
Yaowu Xuc27fc142016-08-22 16:08:15 -07004085 if (!cm->seg.enabled && xd->lossless[0])
Urvang Joshicb586f32016-09-20 11:36:33 -07004086 cm->tx_mode = ONLY_4X4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004087 else
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08004088 write_tx_mode(cm->tx_mode, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004089
4090 if (cpi->allow_comp_inter_inter) {
4091 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
4092 const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE;
4093
Yaowu Xuf883b422016-08-30 14:01:10 -07004094 aom_wb_write_bit(wb, use_hybrid_pred);
4095 if (!use_hybrid_pred) aom_wb_write_bit(wb, use_compound_pred);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004096 }
4097
4098 write_tile_info(cm, wb);
4099}
4100
4101#if CONFIG_GLOBAL_MOTION
David Barkercf3d0b02016-11-10 10:14:49 +00004102static void write_global_motion_params(WarpedMotionParams *params,
Yaowu Xuf883b422016-08-30 14:01:10 -07004103 aom_prob *probs, aom_writer *w) {
David Barkercf3d0b02016-11-10 10:14:49 +00004104 TransformationType type = params->wmtype;
Yaowu Xuf883b422016-08-30 14:01:10 -07004105 av1_write_token(w, av1_global_motion_types_tree, probs,
David Barkercf3d0b02016-11-10 10:14:49 +00004106 &global_motion_types_encodings[type]);
4107 switch (type) {
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004108 case HOMOGRAPHY:
Debargha Mukherjee949097c2016-11-15 17:27:38 -08004109 aom_write_primitive_symmetric(
4110 w, (params->wmmat[6] >> GM_ROW3HOMO_PREC_DIFF), GM_ABS_ROW3HOMO_BITS);
4111 aom_write_primitive_symmetric(
4112 w, (params->wmmat[7] >> GM_ROW3HOMO_PREC_DIFF), GM_ABS_ROW3HOMO_BITS);
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004113 // fallthrough intended
David Barkercf3d0b02016-11-10 10:14:49 +00004114 case AFFINE:
4115 case ROTZOOM:
Yaowu Xuf883b422016-08-30 14:01:10 -07004116 aom_write_primitive_symmetric(
David Barkercf3d0b02016-11-10 10:14:49 +00004117 w,
4118 (params->wmmat[2] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS),
Yaowu Xuc27fc142016-08-22 16:08:15 -07004119 GM_ABS_ALPHA_BITS);
David Barkercf3d0b02016-11-10 10:14:49 +00004120 aom_write_primitive_symmetric(w, (params->wmmat[3] >> GM_ALPHA_PREC_DIFF),
4121 GM_ABS_ALPHA_BITS);
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004122 if (type == AFFINE || type == HOMOGRAPHY) {
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004123 aom_write_primitive_symmetric(
David Barkercf3d0b02016-11-10 10:14:49 +00004124 w, (params->wmmat[4] >> GM_ALPHA_PREC_DIFF), GM_ABS_ALPHA_BITS);
4125 aom_write_primitive_symmetric(w,
4126 (params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
4127 (1 << GM_ALPHA_PREC_BITS),
4128 GM_ABS_ALPHA_BITS);
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004129 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004130 // fallthrough intended
David Barkercf3d0b02016-11-10 10:14:49 +00004131 case TRANSLATION:
4132 aom_write_primitive_symmetric(w, (params->wmmat[0] >> GM_TRANS_PREC_DIFF),
4133 GM_ABS_TRANS_BITS);
4134 aom_write_primitive_symmetric(w, (params->wmmat[1] >> GM_TRANS_PREC_DIFF),
4135 GM_ABS_TRANS_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004136 break;
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004137 case IDENTITY: break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004138 default: assert(0);
4139 }
4140}
4141
Yaowu Xuf883b422016-08-30 14:01:10 -07004142static void write_global_motion(AV1_COMP *cpi, aom_writer *w) {
4143 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004144 int frame;
4145 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barker43479c62016-11-30 10:34:20 +00004146#if !CONFIG_REF_MV
4147 // With ref-mv, clearing unused global motion models here is
4148 // unsafe, and we need to rely on the recode loop to do it
4149 // instead. See av1_find_mv_refs for details.
Debargha Mukherjee705544c2016-11-22 08:55:49 -08004150 if (!cpi->global_motion_used[frame][0]) {
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004151 set_default_gmparams(&cm->global_motion[frame]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004152 }
David Barker43479c62016-11-30 10:34:20 +00004153#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004154 write_global_motion_params(&cm->global_motion[frame],
4155 cm->fc->global_motion_types_prob, w);
Sarah Parkere5299862016-08-16 14:57:37 -07004156 /*
Debargha Mukherjee705544c2016-11-22 08:55:49 -08004157 printf("Frame %d/%d: Enc Ref %d (used %d/%d): %d %d %d %d\n",
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08004158 cm->current_video_frame, cm->show_frame, frame,
Debargha Mukherjee705544c2016-11-22 08:55:49 -08004159 cpi->global_motion_used[frame][0], cpi->global_motion_used[frame][1],
4160 cm->global_motion[frame].wmmat[0], cm->global_motion[frame].wmmat[1],
4161 cm->global_motion[frame].wmmat[2],
4162 cm->global_motion[frame].wmmat[3]);
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004163 */
Yaowu Xuc27fc142016-08-22 16:08:15 -07004164 }
4165}
4166#endif
4167
Yaowu Xuf883b422016-08-30 14:01:10 -07004168static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data) {
4169 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004170#if CONFIG_SUPERTX
4171 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
4172#endif // CONFIG_SUPERTX
4173 FRAME_CONTEXT *const fc = cm->fc;
4174 FRAME_COUNTS *counts = cpi->td.counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07004175 aom_writer *header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004176 int i, j;
Thomas9ac55082016-09-23 18:04:17 +01004177
Thomas Davies80188d12016-10-26 16:08:35 -07004178#if CONFIG_TILE_GROUPS
4179 const int probwt = cm->num_tg;
4180#else
4181 const int probwt = 1;
4182#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004183
4184#if CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004185 int header_size;
4186 header_bc = &cpi->buf_ans;
Alex Converse2a1b3af2016-10-26 13:11:26 -07004187 buf_ans_write_init(header_bc, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004188#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004189 aom_writer real_header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004190 header_bc = &real_header_bc;
Yaowu Xuf883b422016-08-30 14:01:10 -07004191 aom_start_encode(header_bc, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004192#endif
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004193
4194#if CONFIG_LOOP_RESTORATION
4195 encode_restoration(cm, header_bc);
4196#endif // CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -07004197 update_txfm_probs(cm, header_bc, counts);
Yushin Cho77bba8d2016-11-04 16:36:56 -07004198#if !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -07004199 update_coef_probs(cpi, header_bc);
Yushin Cho77bba8d2016-11-04 16:36:56 -07004200#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004201#if CONFIG_VAR_TX
Thomas Davies80188d12016-10-26 16:08:35 -07004202 update_txfm_partition_probs(cm, header_bc, counts, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004203#endif
4204
4205 update_skip_probs(cm, header_bc, counts);
Thomas Daviesf6936102016-09-05 16:51:31 +01004206#if CONFIG_DELTA_Q
4207 update_delta_q_probs(cm, header_bc, counts);
4208#endif
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004209#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004210 update_seg_probs(cpi, header_bc);
4211
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04004212 for (i = 0; i < INTRA_MODES; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004213 prob_diff_update(av1_intra_mode_tree, fc->uv_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004214 counts->uv_mode[i], INTRA_MODES, probwt, header_bc);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04004215 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004216
4217#if CONFIG_EXT_PARTITION_TYPES
Yaowu Xuf883b422016-08-30 14:01:10 -07004218 prob_diff_update(av1_partition_tree, fc->partition_prob[0],
Thomas Davies80188d12016-10-26 16:08:35 -07004219 counts->partition[0], PARTITION_TYPES, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004220 for (i = 1; i < PARTITION_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07004221 prob_diff_update(av1_ext_partition_tree, fc->partition_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004222 counts->partition[i], EXT_PARTITION_TYPES, probwt,
4223 header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004224#else
Nathan E. Eggefba2be62016-05-03 09:48:54 -04004225 for (i = 0; i < PARTITION_CONTEXTS; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004226 prob_diff_update(av1_partition_tree, fc->partition_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004227 counts->partition[i], PARTITION_TYPES, probwt, header_bc);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04004228 }
Thomas Davies6519beb2016-10-19 14:46:07 +01004229#endif // CONFIG_EC_ADAPT, CONFIG_DAALA_EC
Yaowu Xuc27fc142016-08-22 16:08:15 -07004230
4231#if CONFIG_EXT_INTRA
hui sueda3d762016-12-06 16:58:23 -08004232#if CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07004233 for (i = 0; i < INTRA_FILTERS + 1; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07004234 prob_diff_update(av1_intra_filter_tree, fc->intra_filter_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004235 counts->intra_filter[i], INTRA_FILTERS, probwt, header_bc);
hui sueda3d762016-12-06 16:58:23 -08004236#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07004237#endif // CONFIG_EXT_INTRA
Thomas9ac55082016-09-23 18:04:17 +01004238#endif // CONFIG_EC_ADAPT, CONFIG_DAALA_EC
Yaowu Xuc27fc142016-08-22 16:08:15 -07004239 if (frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004240 av1_copy(cm->kf_y_prob, av1_kf_y_mode_prob);
Nathan E. Egge10ba2be2016-11-16 09:44:26 -05004241#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04004242 av1_copy(cm->kf_y_cdf, av1_kf_y_mode_cdf);
4243#endif
Thomas9ac55082016-09-23 18:04:17 +01004244
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004245#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004246 for (i = 0; i < INTRA_MODES; ++i)
Thomas Davies6519beb2016-10-19 14:46:07 +01004247 for (j = 0; j < INTRA_MODES; ++j)
Yaowu Xuf883b422016-08-30 14:01:10 -07004248 prob_diff_update(av1_intra_mode_tree, cm->kf_y_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07004249 counts->kf_y_mode[i][j], INTRA_MODES, probwt,
4250 header_bc);
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004251#endif // CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004252 } else {
4253#if CONFIG_REF_MV
4254 update_inter_mode_probs(cm, header_bc, counts);
4255#else
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004256#if !CONFIG_EC_ADAPT
Nathan E. Egge6ec4d102016-09-08 10:41:20 -04004257 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004258 prob_diff_update(av1_inter_mode_tree, cm->fc->inter_mode_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004259 counts->inter_mode[i], INTER_MODES, probwt, header_bc);
Nathan E. Egge6ec4d102016-09-08 10:41:20 -04004260 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004261#endif
Thomas Davies6519beb2016-10-19 14:46:07 +01004262#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004263#if CONFIG_EXT_INTER
Thomas Davies80188d12016-10-26 16:08:35 -07004264 update_inter_compound_mode_probs(cm, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004265
4266 if (cm->reference_mode != COMPOUND_REFERENCE) {
4267 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
4268 if (is_interintra_allowed_bsize_group(i)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004269 av1_cond_prob_diff_update(header_bc, &fc->interintra_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004270 cm->counts.interintra[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004271 }
4272 }
4273 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
4274 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -07004275 av1_interintra_mode_tree, cm->fc->interintra_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004276 counts->interintra_mode[i], INTERINTRA_MODES, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004277 }
4278 for (i = 0; i < BLOCK_SIZES; i++) {
4279 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i))
Yaowu Xuf883b422016-08-30 14:01:10 -07004280 av1_cond_prob_diff_update(header_bc, &fc->wedge_interintra_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004281 cm->counts.wedge_interintra[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004282 }
4283 }
4284 if (cm->reference_mode != SINGLE_REFERENCE) {
4285 for (i = 0; i < BLOCK_SIZES; i++)
Sarah Parker6fdc8532016-11-16 17:47:13 -08004286 prob_diff_update(av1_compound_type_tree, fc->compound_type_prob[i],
4287 cm->counts.compound_interinter[i], COMPOUND_TYPES,
4288 probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004289 }
4290#endif // CONFIG_EXT_INTER
4291
Yue Chencb60b182016-10-13 15:18:22 -07004292#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07004293 for (i = BLOCK_8X8; i < BLOCK_SIZES; ++i)
Yue Chencb60b182016-10-13 15:18:22 -07004294 prob_diff_update(av1_motion_mode_tree, fc->motion_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004295 counts->motion_mode[i], MOTION_MODES, probwt, header_bc);
Yue Chencb60b182016-10-13 15:18:22 -07004296#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004297#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004298 if (cm->interp_filter == SWITCHABLE)
4299 update_switchable_interp_probs(cm, header_bc, counts);
Thomas9ac55082016-09-23 18:04:17 +01004300#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004301
4302 for (i = 0; i < INTRA_INTER_CONTEXTS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07004303 av1_cond_prob_diff_update(header_bc, &fc->intra_inter_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004304 counts->intra_inter[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004305
4306 if (cpi->allow_comp_inter_inter) {
4307 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
4308 if (use_hybrid_pred)
4309 for (i = 0; i < COMP_INTER_CONTEXTS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07004310 av1_cond_prob_diff_update(header_bc, &fc->comp_inter_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004311 counts->comp_inter[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004312 }
4313
4314 if (cm->reference_mode != COMPOUND_REFERENCE) {
4315 for (i = 0; i < REF_CONTEXTS; i++) {
4316 for (j = 0; j < (SINGLE_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004317 av1_cond_prob_diff_update(header_bc, &fc->single_ref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07004318 counts->single_ref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004319 }
4320 }
4321 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004322 if (cm->reference_mode != SINGLE_REFERENCE) {
4323 for (i = 0; i < REF_CONTEXTS; i++) {
4324#if CONFIG_EXT_REFS
4325 for (j = 0; j < (FWD_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004326 av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07004327 counts->comp_ref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004328 }
4329 for (j = 0; j < (BWD_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004330 av1_cond_prob_diff_update(header_bc, &fc->comp_bwdref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07004331 counts->comp_bwdref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004332 }
4333#else
4334 for (j = 0; j < (COMP_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004335 av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07004336 counts->comp_ref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004337 }
4338#endif // CONFIG_EXT_REFS
4339 }
4340 }
4341
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004342#if !CONFIG_EC_ADAPT
Nathan E. Egge5710c722016-09-08 10:01:16 -04004343 for (i = 0; i < BLOCK_SIZE_GROUPS; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004344 prob_diff_update(av1_intra_mode_tree, cm->fc->y_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004345 counts->y_mode[i], INTRA_MODES, probwt, header_bc);
Nathan E. Egge5710c722016-09-08 10:01:16 -04004346 }
Thomas Davies6519beb2016-10-19 14:46:07 +01004347#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004348
Jingning Hanfd0cf162016-09-30 10:33:50 -07004349 av1_write_nmv_probs(cm, cm->allow_high_precision_mv, header_bc,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004350#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07004351 counts->mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004352#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004353 &counts->mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004354#endif
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004355#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004356 update_ext_tx_probs(cm, header_bc);
Thomas9ac55082016-09-23 18:04:17 +01004357#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004358#if CONFIG_SUPERTX
Thomas Davies80188d12016-10-26 16:08:35 -07004359 if (!xd->lossless[0]) update_supertx_probs(cm, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004360#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07004361#if CONFIG_GLOBAL_MOTION
Sarah Parker689b0ca2016-10-11 12:06:33 -07004362 write_global_motion(cpi, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004363#endif // CONFIG_GLOBAL_MOTION
Sarah Parker689b0ca2016-10-11 12:06:33 -07004364 }
Thomas Davies6519beb2016-10-19 14:46:07 +01004365#if CONFIG_EC_MULTISYMBOL
4366 av1_coef_pareto_cdfs(fc);
David Barker599dfd02016-11-10 13:20:12 +00004367#if CONFIG_REF_MV
4368 for (i = 0; i < NMV_CONTEXTS; ++i) av1_set_mv_cdfs(&fc->nmvc[i]);
4369#else
Thomas Davies6519beb2016-10-19 14:46:07 +01004370 av1_set_mv_cdfs(&fc->nmvc);
David Barker599dfd02016-11-10 13:20:12 +00004371#endif
Nathan E. Egge31296062016-11-16 09:44:26 -05004372#if CONFIG_EC_MULTISYMBOL
Thomas Davies6519beb2016-10-19 14:46:07 +01004373 av1_set_mode_cdfs(cm);
4374#endif
4375#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004376#if CONFIG_ANS
Alex Converse1ecdf2b2016-11-30 15:51:12 -08004377 aom_buf_ans_flush(header_bc);
Alex Converse2a1b3af2016-10-26 13:11:26 -07004378 header_size = buf_ans_write_end(header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004379 assert(header_size <= 0xffff);
4380 return header_size;
4381#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004382 aom_stop_encode(header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004383 assert(header_bc->pos <= 0xffff);
4384 return header_bc->pos;
4385#endif // CONFIG_ANS
4386}
4387
Thomas Davies80188d12016-10-26 16:08:35 -07004388#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004389static int choose_size_bytes(uint32_t size, int spare_msbs) {
4390 // Choose the number of bytes required to represent size, without
4391 // using the 'spare_msbs' number of most significant bits.
4392
4393 // Make sure we will fit in 4 bytes to start with..
4394 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
4395
4396 // Normalise to 32 bits
4397 size <<= spare_msbs;
4398
4399 if (size >> 24 != 0)
4400 return 4;
4401 else if (size >> 16 != 0)
4402 return 3;
4403 else if (size >> 8 != 0)
4404 return 2;
4405 else
4406 return 1;
4407}
4408
4409static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) {
4410 switch (sz) {
4411 case 1: dst[0] = (uint8_t)(val & 0xff); break;
4412 case 2: mem_put_le16(dst, val); break;
4413 case 3: mem_put_le24(dst, val); break;
4414 case 4: mem_put_le32(dst, val); break;
4415 default: assert("Invalid size" && 0); break;
4416 }
4417}
Yaowu Xuf883b422016-08-30 14:01:10 -07004418static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004419 const uint32_t data_size, const uint32_t max_tile_size,
4420 const uint32_t max_tile_col_size,
4421 int *const tile_size_bytes,
4422 int *const tile_col_size_bytes) {
4423// Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
4424#if CONFIG_EXT_TILE
4425 // The top bit in the tile size field indicates tile copy mode, so we
4426 // have 1 less bit to code the tile size
4427 const int tsb = choose_size_bytes(max_tile_size, 1);
4428 const int tcsb = choose_size_bytes(max_tile_col_size, 0);
4429#else
4430 const int tsb = choose_size_bytes(max_tile_size, 0);
4431 const int tcsb = 4; // This is ignored
4432 (void)max_tile_col_size;
4433#endif // CONFIG_EXT_TILE
4434
4435 assert(tsb > 0);
4436 assert(tcsb > 0);
4437
4438 *tile_size_bytes = tsb;
4439 *tile_col_size_bytes = tcsb;
4440
4441 if (tsb == 4 && tcsb == 4) {
4442 return data_size;
4443 } else {
4444 uint32_t wpos = 0;
4445 uint32_t rpos = 0;
4446
4447#if CONFIG_EXT_TILE
4448 int tile_row;
4449 int tile_col;
4450
4451 for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
4452 // All but the last column has a column header
4453 if (tile_col < cm->tile_cols - 1) {
4454 uint32_t tile_col_size = mem_get_le32(dst + rpos);
4455 rpos += 4;
4456
4457 // Adjust the tile column size by the number of bytes removed
4458 // from the tile size fields.
4459 tile_col_size -= (4 - tsb) * cm->tile_rows;
4460
4461 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
4462 wpos += tcsb;
4463 }
4464
4465 for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
4466 // All, including the last row has a header
4467 uint32_t tile_header = mem_get_le32(dst + rpos);
4468 rpos += 4;
4469
4470 // If this is a copy tile, we need to shift the MSB to the
4471 // top bit of the new width, and there is no data to copy.
4472 if (tile_header >> 31 != 0) {
4473 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
4474 mem_put_varsize(dst + wpos, tsb, tile_header);
4475 wpos += tsb;
4476 } else {
4477 mem_put_varsize(dst + wpos, tsb, tile_header);
4478 wpos += tsb;
4479
4480 memmove(dst + wpos, dst + rpos, tile_header);
4481 rpos += tile_header;
4482 wpos += tile_header;
4483 }
4484 }
4485 }
4486#else
4487 const int n_tiles = cm->tile_cols * cm->tile_rows;
4488 int n;
4489
4490 for (n = 0; n < n_tiles; n++) {
4491 int tile_size;
4492
4493 if (n == n_tiles - 1) {
4494 tile_size = data_size - rpos;
4495 } else {
4496 tile_size = mem_get_le32(dst + rpos);
4497 rpos += 4;
4498 mem_put_varsize(dst + wpos, tsb, tile_size);
4499 wpos += tsb;
4500 }
4501
4502 memmove(dst + wpos, dst + rpos, tile_size);
4503
4504 rpos += tile_size;
4505 wpos += tile_size;
4506 }
4507#endif // CONFIG_EXT_TILE
4508
4509 assert(rpos > wpos);
4510 assert(rpos == data_size);
4511
4512 return wpos;
4513 }
4514}
Thomas Davies80188d12016-10-26 16:08:35 -07004515#endif // CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004516
Yaowu Xuf883b422016-08-30 14:01:10 -07004517void av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004518 uint8_t *data = dst;
Thomas Davies80188d12016-10-26 16:08:35 -07004519#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004520 uint32_t compressed_header_size;
4521 uint32_t uncompressed_header_size;
Thomas Davies80188d12016-10-26 16:08:35 -07004522 struct aom_write_bit_buffer saved_wb;
4523#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004524 uint32_t data_size;
Yaowu Xuf883b422016-08-30 14:01:10 -07004525 struct aom_write_bit_buffer wb = { data, 0 };
Thomas Davies80188d12016-10-26 16:08:35 -07004526
Yaowu Xuc27fc142016-08-22 16:08:15 -07004527 unsigned int max_tile_size;
4528 unsigned int max_tile_col_size;
Thomas Davies80188d12016-10-26 16:08:35 -07004529
4530#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004531 int tile_size_bytes;
4532 int tile_col_size_bytes;
Yaowu Xuf883b422016-08-30 14:01:10 -07004533 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004534 const int have_tiles = cm->tile_cols * cm->tile_rows > 1;
4535
4536#if CONFIG_BITSTREAM_DEBUG
4537 bitstream_queue_reset_write();
4538#endif
4539
4540 // Write the uncompressed header
4541 write_uncompressed_header(cpi, &wb);
4542
4543#if CONFIG_EXT_REFS
4544 if (cm->show_existing_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004545 *size = aom_wb_bytes_written(&wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004546 return;
4547 }
4548#endif // CONFIG_EXT_REFS
4549
4550 // We do not know these in advance. Output placeholder bit.
4551 saved_wb = wb;
4552 // Write tile size magnitudes
4553 if (have_tiles) {
4554// Note that the last item in the uncompressed header is the data
4555// describing tile configuration.
4556#if CONFIG_EXT_TILE
4557 // Number of bytes in tile column size - 1
Yaowu Xuf883b422016-08-30 14:01:10 -07004558 aom_wb_write_literal(&wb, 0, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004559#endif // CONFIG_EXT_TILE
4560 // Number of bytes in tile size - 1
Yaowu Xuf883b422016-08-30 14:01:10 -07004561 aom_wb_write_literal(&wb, 0, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004562 }
4563 // Size of compressed header
Yaowu Xuf883b422016-08-30 14:01:10 -07004564 aom_wb_write_literal(&wb, 0, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004565
Yaowu Xuf883b422016-08-30 14:01:10 -07004566 uncompressed_header_size = (uint32_t)aom_wb_bytes_written(&wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004567 data += uncompressed_header_size;
4568
Yaowu Xuf883b422016-08-30 14:01:10 -07004569 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07004570
4571 // Write the compressed header
4572 compressed_header_size = write_compressed_header(cpi, data);
4573 data += compressed_header_size;
4574
4575 // Write the encoded tile data
4576 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
Thomas Davies80188d12016-10-26 16:08:35 -07004577#else
4578 data_size = write_tiles(cpi, &wb, &max_tile_size, &max_tile_col_size);
4579#endif
4580#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004581 if (have_tiles) {
4582 data_size =
4583 remux_tiles(cm, data, data_size, max_tile_size, max_tile_col_size,
4584 &tile_size_bytes, &tile_col_size_bytes);
4585 }
4586
4587 data += data_size;
4588
4589 // Now fill in the gaps in the uncompressed header.
4590 if (have_tiles) {
4591#if CONFIG_EXT_TILE
4592 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
Yaowu Xuf883b422016-08-30 14:01:10 -07004593 aom_wb_write_literal(&saved_wb, tile_col_size_bytes - 1, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004594#endif // CONFIG_EXT_TILE
4595 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
Yaowu Xuf883b422016-08-30 14:01:10 -07004596 aom_wb_write_literal(&saved_wb, tile_size_bytes - 1, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004597 }
4598 // TODO(jbb): Figure out what to do if compressed_header_size > 16 bits.
4599 assert(compressed_header_size <= 0xffff);
Yaowu Xuf883b422016-08-30 14:01:10 -07004600 aom_wb_write_literal(&saved_wb, compressed_header_size, 16);
Thomas Davies80188d12016-10-26 16:08:35 -07004601#else
4602 data += data_size;
4603#endif
Alex Converseb0bbd602016-10-21 14:15:06 -07004604#if CONFIG_ANS && ANS_REVERSE
4605 // Avoid aliasing the superframe index
4606 *data++ = 0;
4607#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004608 *size = data - dst;
4609}