blob: fc5b8a3c806428c214794b93542621c2ff81786b [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070017#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070018#include "aom_dsp/aom_dsp_common.h"
19#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070020#include "aom_ports/mem_ops.h"
21#include "aom_ports/system_state.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070022#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070023#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070024#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070025
26#if CONFIG_CLPF
27#include "av1/common/clpf.h"
28#endif
29#if CONFIG_DERING
30#include "av1/common/dering.h"
31#endif // CONFIG_DERING
32#include "av1/common/entropy.h"
33#include "av1/common/entropymode.h"
34#include "av1/common/entropymv.h"
35#include "av1/common/mvref_common.h"
Thomas Daviesf6936102016-09-05 16:51:31 +010036#include "av1/common/odintrin.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070037#include "av1/common/pred_common.h"
38#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080039#if CONFIG_EXT_INTRA
40#include "av1/common/reconintra.h"
41#endif // CONFIG_EXT_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -070042#include "av1/common/seg_common.h"
43#include "av1/common/tile_common.h"
44
45#if CONFIG_ANS
Alex Converse1ac1ae72016-09-17 15:11:16 -070046#include "aom_dsp/buf_ans.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#endif // CONFIG_ANS
48#include "av1/encoder/bitstream.h"
49#include "av1/encoder/cost.h"
50#include "av1/encoder/encodemv.h"
51#include "av1/encoder/mcomp.h"
52#include "av1/encoder/segmentation.h"
53#include "av1/encoder/subexp.h"
54#include "av1/encoder/tokenize.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070055#if CONFIG_PVQ
56#include "av1/encoder/pvq_encoder.h"
57#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070058
Nathan E. Egge3c056792016-05-20 08:58:44 -040059static struct av1_token intra_mode_encodings[INTRA_MODES];
60static struct av1_token switchable_interp_encodings[SWITCHABLE_FILTERS];
Yaowu Xuc27fc142016-08-22 16:08:15 -070061#if CONFIG_EXT_PARTITION_TYPES
Yaowu Xuf883b422016-08-30 14:01:10 -070062static const struct av1_token ext_partition_encodings[EXT_PARTITION_TYPES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070063 { 0, 1 }, { 4, 3 }, { 12, 4 }, { 7, 3 },
64 { 10, 4 }, { 11, 4 }, { 26, 5 }, { 27, 5 }
65};
66#endif
Nathan E. Egge3c056792016-05-20 08:58:44 -040067static struct av1_token partition_encodings[PARTITION_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -070068#if !CONFIG_REF_MV
Nathan E. Egge3c056792016-05-20 08:58:44 -040069static struct av1_token inter_mode_encodings[INTER_MODES];
Yaowu Xuc27fc142016-08-22 16:08:15 -070070#endif
71#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -070072static const struct av1_token
Yaowu Xuc27fc142016-08-22 16:08:15 -070073 inter_compound_mode_encodings[INTER_COMPOUND_MODES] = {
74 { 2, 2 }, { 50, 6 }, { 51, 6 }, { 24, 5 }, { 52, 6 },
75 { 53, 6 }, { 54, 6 }, { 55, 6 }, { 0, 1 }, { 7, 3 }
76 };
77#endif // CONFIG_EXT_INTER
Urvang Joshib100db72016-10-12 16:28:56 -070078#if CONFIG_PALETTE
Urvang Joshi0b325972016-10-24 14:06:43 -070079static struct av1_token palette_size_encodings[PALETTE_MAX_SIZE - 1];
80static struct av1_token palette_color_encodings[PALETTE_MAX_SIZE - 1]
81 [PALETTE_MAX_SIZE];
Urvang Joshib100db72016-10-12 16:28:56 -070082#endif // CONFIG_PALETTE
Jingning Hanaae72a62016-10-25 15:35:29 -070083static const struct av1_token tx_size_encodings[MAX_TX_DEPTH][TX_SIZES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070084 { { 0, 1 }, { 1, 1 } }, // Max tx_size is 8X8
85 { { 0, 1 }, { 2, 2 }, { 3, 2 } }, // Max tx_size is 16X16
86 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 7, 3 } }, // Max tx_size is 32X32
Debargha Mukherjee25ed5302016-11-22 12:13:41 -080087#if CONFIG_TX64X64
88 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 14, 4 }, { 15, 4 } }, // Max tx_size 64X64
89#endif // CONFIG_TX64X64
Yaowu Xuc27fc142016-08-22 16:08:15 -070090};
91
hui su5db97432016-10-14 16:10:14 -070092#if CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -070093static INLINE void write_uniform(aom_writer *w, int n, int v) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070094 int l = get_unsigned_bits(n);
95 int m = (1 << l) - n;
96 if (l == 0) return;
97 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -070098 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -070099 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700100 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
101 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700102 }
103}
hui su5db97432016-10-14 16:10:14 -0700104#endif // CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700105
106#if CONFIG_EXT_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700107static struct av1_token ext_tx_inter_encodings[EXT_TX_SETS_INTER][TX_TYPES];
108static struct av1_token ext_tx_intra_encodings[EXT_TX_SETS_INTRA][TX_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700109#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700110static struct av1_token ext_tx_encodings[TX_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700111#endif // CONFIG_EXT_TX
112#if CONFIG_GLOBAL_MOTION
David Barkercf3d0b02016-11-10 10:14:49 +0000113static struct av1_token global_motion_types_encodings[GLOBAL_TRANS_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700114#endif // CONFIG_GLOBAL_MOTION
115#if CONFIG_EXT_INTRA
hui sueda3d762016-12-06 16:58:23 -0800116#if CONFIG_INTRA_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -0700117static struct av1_token intra_filter_encodings[INTRA_FILTERS];
hui sueda3d762016-12-06 16:58:23 -0800118#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700119#endif // CONFIG_EXT_INTRA
120#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700121static struct av1_token interintra_mode_encodings[INTERINTRA_MODES];
Sarah Parker6fddd182016-11-10 20:57:20 -0800122static struct av1_token compound_type_encodings[COMPOUND_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700123#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -0700124#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
125static struct av1_token motion_mode_encodings[MOTION_MODES];
126#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700127#if CONFIG_LOOP_RESTORATION
128static struct av1_token switchable_restore_encodings[RESTORE_SWITCHABLE_TYPES];
129#endif // CONFIG_LOOP_RESTORATION
Thomas Davies80188d12016-10-26 16:08:35 -0700130static void write_uncompressed_header(AV1_COMP *cpi,
131 struct aom_write_bit_buffer *wb);
132static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700133
Yaowu Xuf883b422016-08-30 14:01:10 -0700134void av1_encode_token_init(void) {
Urvang Joshi0b325972016-10-24 14:06:43 -0700135#if CONFIG_EXT_TX || CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700136 int s;
Urvang Joshi0b325972016-10-24 14:06:43 -0700137#endif // CONFIG_EXT_TX || CONFIG_PALETTE
138#if CONFIG_EXT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -0700139 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700140 av1_tokens_from_tree(ext_tx_inter_encodings[s], av1_ext_tx_inter_tree[s]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700141 }
142 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700143 av1_tokens_from_tree(ext_tx_intra_encodings[s], av1_ext_tx_intra_tree[s]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700144 }
145#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700146 av1_tokens_from_tree(ext_tx_encodings, av1_ext_tx_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700147#endif // CONFIG_EXT_TX
Nathan E. Egge3c056792016-05-20 08:58:44 -0400148 av1_tokens_from_tree(intra_mode_encodings, av1_intra_mode_tree);
149 av1_tokens_from_tree(switchable_interp_encodings, av1_switchable_interp_tree);
150 av1_tokens_from_tree(partition_encodings, av1_partition_tree);
151#if !CONFIG_REF_MV
152 av1_tokens_from_tree(inter_mode_encodings, av1_inter_mode_tree);
153#endif
154
Urvang Joshi0b325972016-10-24 14:06:43 -0700155#if CONFIG_PALETTE
156 av1_tokens_from_tree(palette_size_encodings, av1_palette_size_tree);
157 for (s = 0; s < PALETTE_MAX_SIZE - 1; ++s) {
158 av1_tokens_from_tree(palette_color_encodings[s], av1_palette_color_tree[s]);
159 }
160#endif // CONFIG_PALETTE
161
Yaowu Xuc27fc142016-08-22 16:08:15 -0700162#if CONFIG_EXT_INTRA
hui sueda3d762016-12-06 16:58:23 -0800163#if CONFIG_INTRA_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -0700164 av1_tokens_from_tree(intra_filter_encodings, av1_intra_filter_tree);
hui sueda3d762016-12-06 16:58:23 -0800165#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700166#endif // CONFIG_EXT_INTRA
167#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700168 av1_tokens_from_tree(interintra_mode_encodings, av1_interintra_mode_tree);
Sarah Parker6fddd182016-11-10 20:57:20 -0800169 av1_tokens_from_tree(compound_type_encodings, av1_compound_type_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700170#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -0700171#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
172 av1_tokens_from_tree(motion_mode_encodings, av1_motion_mode_tree);
173#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -0700174#if CONFIG_GLOBAL_MOTION
Yaowu Xuf883b422016-08-30 14:01:10 -0700175 av1_tokens_from_tree(global_motion_types_encodings,
176 av1_global_motion_types_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700177#endif // CONFIG_GLOBAL_MOTION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700178#if CONFIG_LOOP_RESTORATION
179 av1_tokens_from_tree(switchable_restore_encodings,
180 av1_switchable_restore_tree);
181#endif // CONFIG_LOOP_RESTORATION
Nathan E. Egge4947c292016-04-26 11:37:06 -0400182
Nathan E. Eggedfa33f22016-11-16 09:44:26 -0500183#if CONFIG_EC_MULTISYMBOL
Angie Chiang1733f6b2017-01-05 09:52:20 -0800184 /* This hack is necessary when CONFIG_DUAL_FILTER is enabled because the five
Nathan E. Egge4947c292016-04-26 11:37:06 -0400185 SWITCHABLE_FILTERS are not consecutive, e.g., 0, 1, 2, 3, 4, when doing
186 an in-order traversal of the av1_switchable_interp_tree structure. */
187 av1_indices_from_tree(av1_switchable_interp_ind, av1_switchable_interp_inv,
188 SWITCHABLE_FILTERS, av1_switchable_interp_tree);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700189/* This hack is necessary because the four TX_TYPES are not consecutive,
190 e.g., 0, 1, 2, 3, when doing an in-order traversal of the av1_ext_tx_tree
191 structure. */
David Barkerf5419322016-11-10 12:04:21 +0000192#if !CONFIG_EXT_TX
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400193 av1_indices_from_tree(av1_ext_tx_ind, av1_ext_tx_inv, TX_TYPES,
194 av1_ext_tx_tree);
David Barkerf5419322016-11-10 12:04:21 +0000195#endif
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400196 av1_indices_from_tree(av1_intra_mode_ind, av1_intra_mode_inv, INTRA_MODES,
197 av1_intra_mode_tree);
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400198 av1_indices_from_tree(av1_inter_mode_ind, av1_inter_mode_inv, INTER_MODES,
199 av1_inter_mode_tree);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400200#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700201}
202
Nathan E. Egge380cb1a2016-09-08 10:13:42 -0400203#if !CONFIG_DAALA_EC
Yaowu Xuf883b422016-08-30 14:01:10 -0700204static void write_intra_mode(aom_writer *w, PREDICTION_MODE mode,
205 const aom_prob *probs) {
206 av1_write_token(w, av1_intra_mode_tree, probs, &intra_mode_encodings[mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700207}
Nathan E. Egge380cb1a2016-09-08 10:13:42 -0400208#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700209
210#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700211static void write_interintra_mode(aom_writer *w, INTERINTRA_MODE mode,
212 const aom_prob *probs) {
213 av1_write_token(w, av1_interintra_mode_tree, probs,
214 &interintra_mode_encodings[mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700215}
216#endif // CONFIG_EXT_INTER
217
Yaowu Xuf883b422016-08-30 14:01:10 -0700218static void write_inter_mode(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 PREDICTION_MODE mode,
220#if CONFIG_REF_MV && CONFIG_EXT_INTER
221 int is_compound,
222#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
223 const int16_t mode_ctx) {
224#if CONFIG_REF_MV
225 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700226 const aom_prob newmv_prob = cm->fc->newmv_prob[newmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700227#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700228 aom_write(w, mode != NEWMV && mode != NEWFROMNEARMV, newmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700229
230 if (!is_compound && (mode == NEWMV || mode == NEWFROMNEARMV))
Yaowu Xuf883b422016-08-30 14:01:10 -0700231 aom_write(w, mode == NEWFROMNEARMV, cm->fc->new2mv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700232
233 if (mode != NEWMV && mode != NEWFROMNEARMV) {
234#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700235 aom_write(w, mode != NEWMV, newmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700236
237 if (mode != NEWMV) {
238#endif // CONFIG_EXT_INTER
239 const int16_t zeromv_ctx = (mode_ctx >> ZEROMV_OFFSET) & ZEROMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700240 const aom_prob zeromv_prob = cm->fc->zeromv_prob[zeromv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700241
242 if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) {
243 assert(mode == ZEROMV);
244 return;
245 }
246
Yaowu Xuf883b422016-08-30 14:01:10 -0700247 aom_write(w, mode != ZEROMV, zeromv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700248
249 if (mode != ZEROMV) {
250 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700251 aom_prob refmv_prob;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700252
253 if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6;
254 if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7;
255 if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8;
256
257 refmv_prob = cm->fc->refmv_prob[refmv_ctx];
Yaowu Xuf883b422016-08-30 14:01:10 -0700258 aom_write(w, mode != NEARESTMV, refmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700259 }
260 }
261#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700262 assert(is_inter_mode(mode));
Nathan E. Eggea59b23d2016-11-16 09:44:26 -0500263#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400264 aom_write_symbol(w, av1_inter_mode_ind[INTER_OFFSET(mode)],
265 cm->fc->inter_mode_cdf[mode_ctx], INTER_MODES);
266#else
267 {
268 const aom_prob *const inter_probs = cm->fc->inter_mode_probs[mode_ctx];
269 av1_write_token(w, av1_inter_mode_tree, inter_probs,
270 &inter_mode_encodings[INTER_OFFSET(mode)]);
271 }
272#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700273#endif
274}
275
276#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700277static void write_drl_idx(const AV1_COMMON *cm, const MB_MODE_INFO *mbmi,
278 const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) {
279 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700280
281 assert(mbmi->ref_mv_idx < 3);
282
283 if (mbmi->mode == NEWMV) {
284 int idx;
285 for (idx = 0; idx < 2; ++idx) {
286 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
287 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700288 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
289 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700290
Yaowu Xuf883b422016-08-30 14:01:10 -0700291 aom_write(w, mbmi->ref_mv_idx != idx, drl_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700292 if (mbmi->ref_mv_idx == idx) return;
293 }
294 }
295 return;
296 }
297
298 if (mbmi->mode == NEARMV) {
299 int idx;
300 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
301 for (idx = 1; idx < 3; ++idx) {
302 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
303 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700304 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
305 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700306
Yaowu Xuf883b422016-08-30 14:01:10 -0700307 aom_write(w, mbmi->ref_mv_idx != (idx - 1), drl_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700308 if (mbmi->ref_mv_idx == (idx - 1)) return;
309 }
310 }
311 return;
312 }
313}
314#endif
315
316#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700317static void write_inter_compound_mode(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700318 PREDICTION_MODE mode,
319 const int16_t mode_ctx) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700320 const aom_prob *const inter_compound_probs =
Yaowu Xuc27fc142016-08-22 16:08:15 -0700321 cm->fc->inter_compound_mode_probs[mode_ctx];
322
323 assert(is_inter_compound_mode(mode));
Yaowu Xuf883b422016-08-30 14:01:10 -0700324 av1_write_token(w, av1_inter_compound_mode_tree, inter_compound_probs,
325 &inter_compound_mode_encodings[INTER_COMPOUND_OFFSET(mode)]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700326}
327#endif // CONFIG_EXT_INTER
328
Yaowu Xuf883b422016-08-30 14:01:10 -0700329static void encode_unsigned_max(struct aom_write_bit_buffer *wb, int data,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700330 int max) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700331 aom_wb_write_literal(wb, data, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700332}
333
Yaowu Xuf883b422016-08-30 14:01:10 -0700334static void prob_diff_update(const aom_tree_index *tree,
335 aom_prob probs[/*n - 1*/],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700336 const unsigned int counts[/*n - 1*/], int n,
Thomas Davies80188d12016-10-26 16:08:35 -0700337 int probwt, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700338 int i;
339 unsigned int branch_ct[32][2];
340
341 // Assuming max number of probabilities <= 32
342 assert(n <= 32);
343
Yaowu Xuf883b422016-08-30 14:01:10 -0700344 av1_tree_probs_from_distribution(tree, branch_ct, counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700345 for (i = 0; i < n - 1; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700346 av1_cond_prob_diff_update(w, &probs[i], branch_ct[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700347}
Yaowu Xu17fd2f22016-11-17 18:23:28 -0800348#if CONFIG_EXT_INTER || CONFIG_EXT_TX || !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700349static int prob_diff_update_savings(const aom_tree_index *tree,
350 aom_prob probs[/*n - 1*/],
Thomas Davies80188d12016-10-26 16:08:35 -0700351 const unsigned int counts[/*n - 1*/], int n,
352 int probwt) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700353 int i;
354 unsigned int branch_ct[32][2];
355 int savings = 0;
356
357 // Assuming max number of probabilities <= 32
358 assert(n <= 32);
Yaowu Xuf883b422016-08-30 14:01:10 -0700359 av1_tree_probs_from_distribution(tree, branch_ct, counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700360 for (i = 0; i < n - 1; ++i) {
Thomas Davies80188d12016-10-26 16:08:35 -0700361 savings +=
362 av1_cond_prob_diff_update_savings(&probs[i], branch_ct[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700363 }
364 return savings;
365}
Yaowu Xu17fd2f22016-11-17 18:23:28 -0800366#endif // CONFIG_EXT_INTER || CONFIG_EXT_TX || !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700367
368#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700369static void write_tx_size_vartx(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700370 const MB_MODE_INFO *mbmi, TX_SIZE tx_size,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700371 int depth, int blk_row, int blk_col,
372 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700373 const int tx_row = blk_row >> 1;
374 const int tx_col = blk_col >> 1;
Jingning Hanf65b8702016-10-31 12:13:20 -0700375 const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0);
376 const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0);
377
Yaowu Xuc27fc142016-08-22 16:08:15 -0700378 int ctx = txfm_partition_context(xd->above_txfm_context + tx_col,
Jingning Hanc8b89362016-11-01 10:28:53 -0700379 xd->left_txfm_context + tx_row,
380 mbmi->sb_type, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700381
Yaowu Xuc27fc142016-08-22 16:08:15 -0700382 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
383
Jingning Han571189c2016-10-24 10:38:43 -0700384 if (depth == MAX_VARTX_DEPTH) {
Jingning Han94d5bfc2016-10-21 10:14:36 -0700385 txfm_partition_update(xd->above_txfm_context + tx_col,
Jingning Han581d1692017-01-05 16:03:54 -0800386 xd->left_txfm_context + tx_row, tx_size, tx_size);
Jingning Han94d5bfc2016-10-21 10:14:36 -0700387 return;
388 }
389
Yaowu Xuc27fc142016-08-22 16:08:15 -0700390 if (tx_size == mbmi->inter_tx_size[tx_row][tx_col]) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700391 aom_write(w, 0, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700392 txfm_partition_update(xd->above_txfm_context + tx_col,
Jingning Han581d1692017-01-05 16:03:54 -0800393 xd->left_txfm_context + tx_row, tx_size, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700394 } else {
Jingning Hanf64062f2016-11-02 16:22:18 -0700395 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
396 const int bsl = tx_size_wide_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700397 int i;
Jingning Hanf64062f2016-11-02 16:22:18 -0700398
Yaowu Xuf883b422016-08-30 14:01:10 -0700399 aom_write(w, 1, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700400
401 if (tx_size == TX_8X8) {
402 txfm_partition_update(xd->above_txfm_context + tx_col,
Jingning Han581d1692017-01-05 16:03:54 -0800403 xd->left_txfm_context + tx_row, TX_4X4, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700404 return;
405 }
406
407 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700408 for (i = 0; i < 4; ++i) {
Jingning Hanf64062f2016-11-02 16:22:18 -0700409 int offsetr = blk_row + (i >> 1) * bsl;
410 int offsetc = blk_col + (i & 0x01) * bsl;
411 write_tx_size_vartx(cm, xd, mbmi, sub_txs, depth + 1, offsetr, offsetc,
412 w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700413 }
414 }
415}
416
Yaowu Xuf883b422016-08-30 14:01:10 -0700417static void update_txfm_partition_probs(AV1_COMMON *cm, aom_writer *w,
Thomas Davies80188d12016-10-26 16:08:35 -0700418 FRAME_COUNTS *counts, int probwt) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700419 int k;
420 for (k = 0; k < TXFM_PARTITION_CONTEXTS; ++k)
Yaowu Xuf883b422016-08-30 14:01:10 -0700421 av1_cond_prob_diff_update(w, &cm->fc->txfm_partition_prob[k],
Thomas Davies80188d12016-10-26 16:08:35 -0700422 counts->txfm_partition[k], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700423}
424#endif
425
Yaowu Xuf883b422016-08-30 14:01:10 -0700426static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd,
427 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700428 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
429 const BLOCK_SIZE bsize = mbmi->sb_type;
430 // For sub8x8 blocks the tx_size symbol does not need to be sent
431 if (bsize >= BLOCK_8X8) {
432 const TX_SIZE tx_size = mbmi->tx_size;
433 const int is_inter = is_inter_block(mbmi);
434 const int tx_size_ctx = get_tx_size_context(xd);
435 const int tx_size_cat = is_inter ? inter_tx_size_cat_lookup[bsize]
436 : intra_tx_size_cat_lookup[bsize];
437 const TX_SIZE coded_tx_size = txsize_sqr_up_map[tx_size];
Jingning Han4e1737a2016-10-25 16:05:02 -0700438 const int depth = tx_size_to_depth(coded_tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700439
440#if CONFIG_EXT_TX && CONFIG_RECT_TX
Yue Chen49587a72016-09-28 17:09:47 -0700441 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700442 assert(
443 IMPLIES(is_rect_tx(tx_size), tx_size == max_txsize_rect_lookup[bsize]));
444#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
445
Yaowu Xuf883b422016-08-30 14:01:10 -0700446 av1_write_token(w, av1_tx_size_tree[tx_size_cat],
447 cm->fc->tx_size_probs[tx_size_cat][tx_size_ctx],
Jingning Han4e1737a2016-10-25 16:05:02 -0700448 &tx_size_encodings[tx_size_cat][depth]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700449 }
450}
451
452#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700453static void update_inter_mode_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700454 FRAME_COUNTS *counts) {
455 int i;
Thomas Davies80188d12016-10-26 16:08:35 -0700456#if CONFIG_TILE_GROUPS
457 const int probwt = cm->num_tg;
458#else
459 const int probwt = 1;
460#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700461 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700462 av1_cond_prob_diff_update(w, &cm->fc->newmv_prob[i], counts->newmv_mode[i],
463 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700464 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700465 av1_cond_prob_diff_update(w, &cm->fc->zeromv_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700466 counts->zeromv_mode[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700467 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700468 av1_cond_prob_diff_update(w, &cm->fc->refmv_prob[i], counts->refmv_mode[i],
469 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700470 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700471 av1_cond_prob_diff_update(w, &cm->fc->drl_prob[i], counts->drl_mode[i],
472 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700473#if CONFIG_EXT_INTER
Thomas Davies80188d12016-10-26 16:08:35 -0700474 av1_cond_prob_diff_update(w, &cm->fc->new2mv_prob, counts->new2mv_mode,
475 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700476#endif // CONFIG_EXT_INTER
477}
478#endif
479
480#if CONFIG_EXT_INTER
Thomas Davies80188d12016-10-26 16:08:35 -0700481static void update_inter_compound_mode_probs(AV1_COMMON *cm, int probwt,
482 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700483 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
484 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700485 int i;
486 int savings = 0;
487 int do_update = 0;
488 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
489 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700490 av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700491 cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700492 }
493 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700494 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700495 if (do_update) {
496 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
497 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700498 av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700499 cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700500 }
501 }
502}
503#endif // CONFIG_EXT_INTER
504
Yaowu Xuf883b422016-08-30 14:01:10 -0700505static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
506 int segment_id, const MODE_INFO *mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700507 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
508 return 1;
509 } else {
510 const int skip = mi->mbmi.skip;
Yaowu Xuf883b422016-08-30 14:01:10 -0700511 aom_write(w, skip, av1_get_skip_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700512 return skip;
513 }
514}
515
Yue Chen69f18e12016-09-08 14:48:15 -0700516#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
517static void write_motion_mode(const AV1_COMMON *cm, const MB_MODE_INFO *mbmi,
518 aom_writer *w) {
519 MOTION_MODE last_motion_mode_allowed = motion_mode_allowed(mbmi);
520
521 if (last_motion_mode_allowed == SIMPLE_TRANSLATION) return;
522#if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
523 if (last_motion_mode_allowed == OBMC_CAUSAL) {
524 aom_write(w, mbmi->motion_mode == OBMC_CAUSAL,
525 cm->fc->obmc_prob[mbmi->sb_type]);
526 } else {
527#endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
528 av1_write_token(w, av1_motion_mode_tree,
529 cm->fc->motion_mode_prob[mbmi->sb_type],
530 &motion_mode_encodings[mbmi->motion_mode]);
531#if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
532 }
533#endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
534}
535#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
536
Arild Fuldseth07441162016-08-15 15:07:52 +0200537#if CONFIG_DELTA_Q
Thomas Daviesf6936102016-09-05 16:51:31 +0100538static void write_delta_qindex(const AV1_COMMON *cm, int delta_qindex,
539 aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +0200540 int sign = delta_qindex < 0;
541 int abs = sign ? -delta_qindex : delta_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +0100542 int rem_bits, thr, i = 0;
543 int smallval = abs < DELTA_Q_SMALL ? 1 : 0;
544
545 while (i < DELTA_Q_SMALL && i <= abs) {
546 int bit = (i < abs);
547 aom_write(w, bit, cm->fc->delta_q_prob[i]);
548 i++;
549 }
550
551 if (!smallval) {
552 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
553 thr = (1 << rem_bits) + 1;
554 aom_write_literal(w, rem_bits, 3);
555 aom_write_literal(w, abs - thr, rem_bits);
Arild Fuldseth07441162016-08-15 15:07:52 +0200556 }
557 if (abs > 0) {
558 aom_write_bit(w, sign);
559 }
560}
Thomas Daviesf6936102016-09-05 16:51:31 +0100561
562static void update_delta_q_probs(AV1_COMMON *cm, aom_writer *w,
563 FRAME_COUNTS *counts) {
564 int k;
Jingning Hanbe44c5f2016-09-30 11:35:22 -0700565#if CONFIG_TILE_GROUPS
566 const int probwt = cm->num_tg;
567#else
568 const int probwt = 1;
569#endif
Thomas Daviesf6936102016-09-05 16:51:31 +0100570 for (k = 0; k < DELTA_Q_CONTEXTS; ++k) {
Jingning Hanbe44c5f2016-09-30 11:35:22 -0700571 av1_cond_prob_diff_update(w, &cm->fc->delta_q_prob[k], counts->delta_q[k],
572 probwt);
Thomas Daviesf6936102016-09-05 16:51:31 +0100573 }
574}
Arild Fuldseth07441162016-08-15 15:07:52 +0200575#endif
576
Yaowu Xuf883b422016-08-30 14:01:10 -0700577static void update_skip_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700578 FRAME_COUNTS *counts) {
579 int k;
Thomas Davies80188d12016-10-26 16:08:35 -0700580#if CONFIG_TILE_GROUPS
581 const int probwt = cm->num_tg;
582#else
583 const int probwt = 1;
584#endif
585 for (k = 0; k < SKIP_CONTEXTS; ++k) {
586 av1_cond_prob_diff_update(w, &cm->fc->skip_probs[k], counts->skip[k],
587 probwt);
588 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700589}
590
Nathan E. Eggebaaaa162016-10-24 09:50:52 -0400591#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700592static void update_switchable_interp_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700593 FRAME_COUNTS *counts) {
594 int j;
Nathan E. Egge4947c292016-04-26 11:37:06 -0400595 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) {
Thomas Davies80188d12016-10-26 16:08:35 -0700596#if CONFIG_TILE_GROUPS
597 const int probwt = cm->num_tg;
598#else
599 const int probwt = 1;
600#endif
601 prob_diff_update(
602 av1_switchable_interp_tree, cm->fc->switchable_interp_prob[j],
603 counts->switchable_interp[j], SWITCHABLE_FILTERS, probwt, w);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400604 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700605}
Thomas Davies6519beb2016-10-19 14:46:07 +0100606#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700607
608#if CONFIG_EXT_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700609static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) {
610 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
611 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700612 int i, j;
613 int s;
Thomas Davies80188d12016-10-26 16:08:35 -0700614#if CONFIG_TILE_GROUPS
615 const int probwt = cm->num_tg;
616#else
617 const int probwt = 1;
618#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700619 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
620 int savings = 0;
621 int do_update = 0;
622 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
623 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
624 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700625 av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i],
Thomas Davies80188d12016-10-26 16:08:35 -0700626 cm->counts.inter_ext_tx[s][i], num_ext_tx_set_inter[s], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700627 }
628 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700629 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700630 if (do_update) {
631 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
632 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
633 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700634 av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i],
Thomas Davies80188d12016-10-26 16:08:35 -0700635 cm->counts.inter_ext_tx[s][i], num_ext_tx_set_inter[s], probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700636 }
637 }
638 }
639
640 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
641 int savings = 0;
642 int do_update = 0;
643 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
644 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
645 for (j = 0; j < INTRA_MODES; ++j)
646 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700647 av1_ext_tx_intra_tree[s], cm->fc->intra_ext_tx_prob[s][i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700648 cm->counts.intra_ext_tx[s][i][j], num_ext_tx_set_intra[s], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700649 }
650 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700651 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700652 if (do_update) {
653 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
654 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
655 for (j = 0; j < INTRA_MODES; ++j)
Thomas Davies80188d12016-10-26 16:08:35 -0700656 prob_diff_update(av1_ext_tx_intra_tree[s],
657 cm->fc->intra_ext_tx_prob[s][i][j],
658 cm->counts.intra_ext_tx[s][i][j],
659 num_ext_tx_set_intra[s], probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700660 }
661 }
662 }
663}
664
665#else
Nathan E. Eggebaaaa162016-10-24 09:50:52 -0400666#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700667static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) {
668 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
669 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700670 int i, j;
671
672 int savings = 0;
673 int do_update = 0;
Thomas Davies80188d12016-10-26 16:08:35 -0700674#if CONFIG_TILE_GROUPS
675 const int probwt = cm->num_tg;
676#else
677 const int probwt = 1;
678#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700679 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
680 for (j = 0; j < TX_TYPES; ++j)
681 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700682 av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700683 cm->counts.intra_ext_tx[i][j], TX_TYPES, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700684 }
685 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700686 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700687 if (do_update) {
688 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400689 for (j = 0; j < TX_TYPES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700690 prob_diff_update(av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700691 cm->counts.intra_ext_tx[i][j], TX_TYPES, probwt, w);
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400692 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700693 }
694 }
Thomas Davies6519beb2016-10-19 14:46:07 +0100695
Yaowu Xuc27fc142016-08-22 16:08:15 -0700696 savings = 0;
697 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
698 savings +=
Yaowu Xuf883b422016-08-30 14:01:10 -0700699 prob_diff_update_savings(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700700 cm->counts.inter_ext_tx[i], TX_TYPES, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700701 }
702 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700703 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700704 if (do_update) {
705 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700706 prob_diff_update(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700707 cm->counts.inter_ext_tx[i], TX_TYPES, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700708 }
709 }
710}
711#endif // CONFIG_EXT_TX
Thomas9ac55082016-09-23 18:04:17 +0100712#endif
Urvang Joshib100db72016-10-12 16:28:56 -0700713#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -0700714static void pack_palette_tokens(aom_writer *w, const TOKENEXTRA **tp, int n,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700715 int num) {
716 int i;
717 const TOKENEXTRA *p = *tp;
718
719 for (i = 0; i < num; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700720 av1_write_token(w, av1_palette_color_tree[n - 2], p->context_tree,
721 &palette_color_encodings[n - 2][p->token]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700722 ++p;
723 }
724
725 *tp = p;
726}
Urvang Joshib100db72016-10-12 16:28:56 -0700727#endif // CONFIG_PALETTE
Yushin Cho77bba8d2016-11-04 16:36:56 -0700728#if !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -0700729#if CONFIG_SUPERTX
Thomas Davies80188d12016-10-26 16:08:35 -0700730static void update_supertx_probs(AV1_COMMON *cm, int probwt, aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700731 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
732 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700733 int i, j;
734 int savings = 0;
735 int do_update = 0;
736 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
Jingning Han5b7706a2016-12-21 09:55:10 -0800737 for (j = TX_8X8; j < TX_SIZES; ++j) {
Thomas Davies80188d12016-10-26 16:08:35 -0700738 savings += av1_cond_prob_diff_update_savings(
739 &cm->fc->supertx_prob[i][j], cm->counts.supertx[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700740 }
741 }
742 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700743 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700744 if (do_update) {
745 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
Jingning Han5b7706a2016-12-21 09:55:10 -0800746 for (j = TX_8X8; j < TX_SIZES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700747 av1_cond_prob_diff_update(w, &cm->fc->supertx_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700748 cm->counts.supertx[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700749 }
750 }
751 }
752}
753#endif // CONFIG_SUPERTX
754
Yaowu Xuf883b422016-08-30 14:01:10 -0700755static void pack_mb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700756 const TOKENEXTRA *const stop,
Angie Chiangd4022822016-11-02 18:30:25 -0700757 aom_bit_depth_t bit_depth, const TX_SIZE tx_size,
758 TOKEN_STATS *token_stats) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700759 const TOKENEXTRA *p = *tp;
760#if CONFIG_VAR_TX
761 int count = 0;
Jingning Han7e992972016-10-31 11:03:06 -0700762 const int seg_eob = tx_size_2d[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700763#endif
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700764#if CONFIG_AOM_HIGHBITDEPTH
765 const av1_extra_bit *const extra_bits_table =
766 (bit_depth == AOM_BITS_12)
767 ? av1_extra_bits_high12
768 : (bit_depth == AOM_BITS_10) ? av1_extra_bits_high10 : av1_extra_bits;
769#else
770 const av1_extra_bit *const extra_bits_table = av1_extra_bits;
771 (void)bit_depth;
772#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700773
774 while (p < stop && p->token != EOSB_TOKEN) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700775 const int token = p->token;
776 aom_tree_index index = 0;
Alex Converseaca9feb2016-10-10 11:08:10 -0700777#if !CONFIG_EC_MULTISYMBOL
Urvang Joshi454280d2016-10-14 16:51:44 -0700778 const struct av1_token *const coef_encoding = &av1_coef_encodings[token];
779 int coef_value = coef_encoding->value;
780 int coef_length = coef_encoding->len;
Alex Converseaca9feb2016-10-10 11:08:10 -0700781#endif // !CONFIG_EC_MULTISYMBOL
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700782 const av1_extra_bit *const extra_bits = &extra_bits_table[token];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700783
Alex Converseaca9feb2016-10-10 11:08:10 -0700784#if CONFIG_EC_MULTISYMBOL
Alex Conversedc62b092016-10-11 16:50:56 -0700785 /* skip one or two nodes */
Angie Chiangd4022822016-11-02 18:30:25 -0700786 if (!p->skip_eob_node)
787 aom_write_record(w, token != EOB_TOKEN, p->context_tree[0], token_stats);
Alex Conversedc62b092016-10-11 16:50:56 -0700788
Urvang Joshi454280d2016-10-14 16:51:44 -0700789 if (token != EOB_TOKEN) {
Angie Chiangd4022822016-11-02 18:30:25 -0700790 aom_write_record(w, token != ZERO_TOKEN, p->context_tree[1], token_stats);
Alex Converseea7e9902016-10-12 12:53:40 -0700791
Urvang Joshi454280d2016-10-14 16:51:44 -0700792 if (token != ZERO_TOKEN) {
793 aom_write_symbol(w, token - ONE_TOKEN, *p->token_cdf,
Alex Conversea1ac9722016-10-12 15:59:58 -0700794 CATEGORY6_TOKEN - ONE_TOKEN + 1);
Alex Converseea7e9902016-10-12 12:53:40 -0700795 }
Alex Conversedc62b092016-10-11 16:50:56 -0700796 }
797#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700798 /* skip one or two nodes */
799 if (p->skip_eob_node)
Urvang Joshi454280d2016-10-14 16:51:44 -0700800 coef_length -= p->skip_eob_node;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700801 else
Angie Chiangd4022822016-11-02 18:30:25 -0700802 aom_write_record(w, token != EOB_TOKEN, p->context_tree[0], token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700803
Urvang Joshi454280d2016-10-14 16:51:44 -0700804 if (token != EOB_TOKEN) {
Angie Chiangd4022822016-11-02 18:30:25 -0700805 aom_write_record(w, token != ZERO_TOKEN, p->context_tree[1], token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700806
Urvang Joshi454280d2016-10-14 16:51:44 -0700807 if (token != ZERO_TOKEN) {
Angie Chiangd4022822016-11-02 18:30:25 -0700808 aom_write_record(w, token != ONE_TOKEN, p->context_tree[2],
809 token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700810
Urvang Joshi454280d2016-10-14 16:51:44 -0700811 if (token != ONE_TOKEN) {
812 const int unconstrained_len = UNCONSTRAINED_NODES - p->skip_eob_node;
Angie Chiangd4022822016-11-02 18:30:25 -0700813 aom_write_tree_record(
814 w, av1_coef_con_tree,
815 av1_pareto8_full[p->context_tree[PIVOT_NODE] - 1], coef_value,
816 coef_length - unconstrained_len, 0, token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700817 }
818 }
819 }
Alex Converseaca9feb2016-10-10 11:08:10 -0700820#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -0700821
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700822 if (extra_bits->base_val) {
823 const int bit_string = p->extra;
824 const int bit_string_length = extra_bits->len; // Length of extra bits to
825 // be written excluding
826 // the sign bit.
827 int skip_bits = (extra_bits->base_val == CAT6_MIN_VAL)
Jingning Han7e992972016-10-31 11:03:06 -0700828 ? TX_SIZES - 1 - txsize_sqr_up_map[tx_size]
Yaowu Xuc27fc142016-08-22 16:08:15 -0700829 : 0;
830
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700831 if (bit_string_length > 0) {
832 const unsigned char *pb = extra_bits->prob;
833 const int value = bit_string >> 1;
834 const int num_bits = bit_string_length; // number of bits in value
Urvang Joshi454280d2016-10-14 16:51:44 -0700835 assert(num_bits > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700836
Alex Converse81fd8902016-07-26 15:35:42 -0700837 for (index = 0; index < num_bits; ++index) {
838 const int shift = num_bits - index - 1;
839 const int bb = (value >> shift) & 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700840 if (skip_bits) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700841 --skip_bits;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700842 assert(!bb);
843 } else {
Angie Chiangd4022822016-11-02 18:30:25 -0700844 aom_write_record(w, bb, pb[index], token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700845 }
Alex Converse81fd8902016-07-26 15:35:42 -0700846 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700847 }
848
Angie Chiangd4022822016-11-02 18:30:25 -0700849 aom_write_bit_record(w, bit_string & 1, token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700850 }
851 ++p;
852
853#if CONFIG_VAR_TX
854 ++count;
Urvang Joshi454280d2016-10-14 16:51:44 -0700855 if (token == EOB_TOKEN || count == seg_eob) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700856#endif
857 }
858
859 *tp = p;
860}
Yushin Cho77bba8d2016-11-04 16:36:56 -0700861#endif // !CONFIG_PVG
Yaowu Xuc27fc142016-08-22 16:08:15 -0700862#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700863static void pack_txb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700864 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
865 MB_MODE_INFO *mbmi, int plane,
Yaowu Xuf883b422016-08-30 14:01:10 -0700866 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700867 int block, int blk_row, int blk_col,
Angie Chiangd4022822016-11-02 18:30:25 -0700868 TX_SIZE tx_size, TOKEN_STATS *token_stats) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700869 const struct macroblockd_plane *const pd = &xd->plane[plane];
870 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
871 const int tx_row = blk_row >> (1 - pd->subsampling_y);
872 const int tx_col = blk_col >> (1 - pd->subsampling_x);
873 TX_SIZE plane_tx_size;
Jingning Hanf65b8702016-10-31 12:13:20 -0700874 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
875 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700876
877 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
878
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700879 plane_tx_size =
880 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
881 : mbmi->inter_tx_size[tx_row][tx_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700882
883 if (tx_size == plane_tx_size) {
Angie Chiangd02001d2016-11-06 15:31:49 -0800884 TOKEN_STATS tmp_token_stats;
885 init_token_stats(&tmp_token_stats);
886 pack_mb_tokens(w, tp, tok_end, bit_depth, tx_size, &tmp_token_stats);
887#if CONFIG_RD_DEBUG
888 token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
889 token_stats->cost += tmp_token_stats.cost;
890#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700891 } else {
Jingning Han1807fdc2016-11-08 15:17:58 -0800892 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
893 const int bsl = tx_size_wide_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700894 int i;
895
896 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700897
898 for (i = 0; i < 4; ++i) {
Jingning Han42a0fb32016-10-31 10:43:31 -0700899 const int offsetr = blk_row + (i >> 1) * bsl;
900 const int offsetc = blk_col + (i & 0x01) * bsl;
Jingning Han42a0fb32016-10-31 10:43:31 -0700901 const int step = tx_size_wide_unit[sub_txs] * tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700902
903 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
904
905 pack_txb_tokens(w, tp, tok_end, xd, mbmi, plane, plane_bsize, bit_depth,
Angie Chiangd4022822016-11-02 18:30:25 -0700906 block, offsetr, offsetc, sub_txs, token_stats);
Jingning Han98d6a1f2016-11-03 12:47:47 -0700907 block += step;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700908 }
909 }
910}
911#endif
912
Yaowu Xuf883b422016-08-30 14:01:10 -0700913static void write_segment_id(aom_writer *w, const struct segmentation *seg,
Thomas9ac55082016-09-23 18:04:17 +0100914 struct segmentation_probs *segp, int segment_id) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400915 if (seg->enabled && seg->update_map) {
Nathan E. Egge31296062016-11-16 09:44:26 -0500916#if CONFIG_EC_MULTISYMBOL
Nathan E. Eggef627e582016-08-19 20:06:51 -0400917 aom_write_symbol(w, segment_id, segp->tree_cdf, MAX_SEGMENTS);
918#else
Nathan E. Eggeeeedc632016-06-19 12:02:33 -0400919 aom_write_tree(w, av1_segment_tree, segp->tree_probs, segment_id, 3, 0);
Nathan E. Eggef627e582016-08-19 20:06:51 -0400920#endif
921 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700922}
923
924// This function encodes the reference frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700925static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd,
926 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700927 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
928 const int is_compound = has_second_ref(mbmi);
929 const int segment_id = mbmi->segment_id;
930
931 // If segment level coding of this signal is disabled...
932 // or the segment allows multiple reference frame options
933 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
934 assert(!is_compound);
935 assert(mbmi->ref_frame[0] ==
936 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
937 } else {
938 // does the feature use compound prediction or not
939 // (if not specified at the frame/segment level)
940 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700941 aom_write(w, is_compound, av1_get_reference_mode_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700942 } else {
943 assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE));
944 }
945
946 if (is_compound) {
947#if CONFIG_EXT_REFS
948 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
949 mbmi->ref_frame[0] == LAST3_FRAME);
950 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
951#else // CONFIG_EXT_REFS
952 const int bit = mbmi->ref_frame[0] == GOLDEN_FRAME;
953#endif // CONFIG_EXT_REFS
954
Yaowu Xuf883b422016-08-30 14:01:10 -0700955 aom_write(w, bit, av1_get_pred_prob_comp_ref_p(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700956
957#if CONFIG_EXT_REFS
958 if (!bit) {
959 const int bit1 = mbmi->ref_frame[0] == LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700960 aom_write(w, bit1, av1_get_pred_prob_comp_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700961 } else {
962 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700963 aom_write(w, bit2, av1_get_pred_prob_comp_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700964 }
Yaowu Xuf883b422016-08-30 14:01:10 -0700965 aom_write(w, bit_bwd, av1_get_pred_prob_comp_bwdref_p(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700966#endif // CONFIG_EXT_REFS
967 } else {
968#if CONFIG_EXT_REFS
969 const int bit0 = (mbmi->ref_frame[0] == ALTREF_FRAME ||
970 mbmi->ref_frame[0] == BWDREF_FRAME);
Yaowu Xuf883b422016-08-30 14:01:10 -0700971 aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700972
973 if (bit0) {
974 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700975 aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700976 } else {
977 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
978 mbmi->ref_frame[0] == GOLDEN_FRAME);
Yaowu Xuf883b422016-08-30 14:01:10 -0700979 aom_write(w, bit2, av1_get_pred_prob_single_ref_p3(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700980
981 if (!bit2) {
982 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700983 aom_write(w, bit3, av1_get_pred_prob_single_ref_p4(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700984 } else {
985 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700986 aom_write(w, bit4, av1_get_pred_prob_single_ref_p5(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700987 }
988 }
989#else // CONFIG_EXT_REFS
990 const int bit0 = mbmi->ref_frame[0] != LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700991 aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700992
993 if (bit0) {
994 const int bit1 = mbmi->ref_frame[0] != GOLDEN_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700995 aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700996 }
997#endif // CONFIG_EXT_REFS
998 }
999 }
1000}
1001
hui su5db97432016-10-14 16:10:14 -07001002#if CONFIG_FILTER_INTRA
1003static void write_filter_intra_mode_info(const AV1_COMMON *const cm,
1004 const MB_MODE_INFO *const mbmi,
1005 aom_writer *w) {
Urvang Joshib100db72016-10-12 16:28:56 -07001006 if (mbmi->mode == DC_PRED
1007#if CONFIG_PALETTE
1008 && mbmi->palette_mode_info.palette_size[0] == 0
1009#endif // CONFIG_PALETTE
1010 ) {
hui su5db97432016-10-14 16:10:14 -07001011 aom_write(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[0],
1012 cm->fc->filter_intra_probs[0]);
1013 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[0]) {
1014 const FILTER_INTRA_MODE mode =
1015 mbmi->filter_intra_mode_info.filter_intra_mode[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001016 write_uniform(w, FILTER_INTRA_MODES, mode);
1017 }
1018 }
1019
Urvang Joshib100db72016-10-12 16:28:56 -07001020 if (mbmi->uv_mode == DC_PRED
1021#if CONFIG_PALETTE
1022 && mbmi->palette_mode_info.palette_size[1] == 0
1023#endif // CONFIG_PALETTE
1024 ) {
hui su5db97432016-10-14 16:10:14 -07001025 aom_write(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[1],
1026 cm->fc->filter_intra_probs[1]);
1027 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[1]) {
1028 const FILTER_INTRA_MODE mode =
1029 mbmi->filter_intra_mode_info.filter_intra_mode[1];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001030 write_uniform(w, FILTER_INTRA_MODES, mode);
1031 }
1032 }
1033}
hui su5db97432016-10-14 16:10:14 -07001034#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001035
hui su5db97432016-10-14 16:10:14 -07001036#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -07001037static void write_intra_angle_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1038 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001039 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1040 const BLOCK_SIZE bsize = mbmi->sb_type;
hui sueda3d762016-12-06 16:58:23 -08001041#if CONFIG_INTRA_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -07001042 const int intra_filter_ctx = av1_get_pred_context_intra_interp(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001043 int p_angle;
hui sueda3d762016-12-06 16:58:23 -08001044#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001045
hui sueda3d762016-12-06 16:58:23 -08001046 (void)cm;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001047 if (bsize < BLOCK_8X8) return;
1048
hui su45dc5972016-12-08 17:42:50 -08001049 if (av1_is_directional_mode(mbmi->mode, bsize)) {
1050 const int max_angle_delta = av1_get_max_angle_delta(mbmi->sb_type, 0);
1051 write_uniform(w, 2 * max_angle_delta + 1,
1052 max_angle_delta + mbmi->angle_delta[0]);
hui sueda3d762016-12-06 16:58:23 -08001053#if CONFIG_INTRA_INTERP
hui su45dc5972016-12-08 17:42:50 -08001054 p_angle = mode_to_angle_map[mbmi->mode] +
1055 mbmi->angle_delta[0] * av1_get_angle_step(mbmi->sb_type, 0);
Yaowu Xuf883b422016-08-30 14:01:10 -07001056 if (av1_is_intra_filter_switchable(p_angle)) {
1057 av1_write_token(w, av1_intra_filter_tree,
1058 cm->fc->intra_filter_probs[intra_filter_ctx],
1059 &intra_filter_encodings[mbmi->intra_filter]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001060 }
hui sueda3d762016-12-06 16:58:23 -08001061#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001062 }
1063
hui su45dc5972016-12-08 17:42:50 -08001064 if (av1_is_directional_mode(mbmi->uv_mode, bsize)) {
1065 write_uniform(w, 2 * MAX_ANGLE_DELTA_UV + 1,
1066 MAX_ANGLE_DELTA_UV + mbmi->angle_delta[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001067 }
1068}
1069#endif // CONFIG_EXT_INTRA
1070
Angie Chiang5678ad92016-11-21 09:38:40 -08001071static void write_mb_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd,
1072 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001073 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001074 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
Jingning Han203b1d32017-01-12 16:00:13 -08001075
Yaowu Xuc27fc142016-08-22 16:08:15 -07001076 if (cm->interp_filter == SWITCHABLE) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001077#if CONFIG_DUAL_FILTER
Jingning Han203b1d32017-01-12 16:00:13 -08001078 int dir;
Yaowu Xuf883b422016-08-30 14:01:10 -07001079 if (!av1_is_interp_needed(xd)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001080 assert(mbmi->interp_filter[0] == EIGHTTAP_REGULAR);
1081 return;
1082 }
Jingning Han203b1d32017-01-12 16:00:13 -08001083
Yaowu Xuc27fc142016-08-22 16:08:15 -07001084 for (dir = 0; dir < 2; ++dir) {
1085 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
1086 (mbmi->ref_frame[1] > INTRA_FRAME &&
1087 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001088 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
1089 av1_write_token(w, av1_switchable_interp_tree,
1090 cm->fc->switchable_interp_prob[ctx],
1091 &switchable_interp_encodings[mbmi->interp_filter[dir]]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001092 ++cpi->interp_filter_selected[0][mbmi->interp_filter[dir]];
1093 }
1094 }
1095#else
1096 {
Yaowu Xuf883b422016-08-30 14:01:10 -07001097 const int ctx = av1_get_pred_context_switchable_interp(xd);
Nathan E. Egge00b33312016-11-16 09:44:26 -05001098#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge56eeaa52016-07-25 10:23:33 -04001099 aom_write_symbol(w, av1_switchable_interp_ind[mbmi->interp_filter],
1100 cm->fc->switchable_interp_cdf[ctx], SWITCHABLE_FILTERS);
Nathan E. Egge4947c292016-04-26 11:37:06 -04001101#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001102 av1_write_token(w, av1_switchable_interp_tree,
1103 cm->fc->switchable_interp_prob[ctx],
1104 &switchable_interp_encodings[mbmi->interp_filter]);
Nathan E. Egge4947c292016-04-26 11:37:06 -04001105#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001106 ++cpi->interp_filter_selected[0][mbmi->interp_filter];
1107 }
Jingning Han203b1d32017-01-12 16:00:13 -08001108#endif // CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07001109 }
1110}
1111
Urvang Joshib100db72016-10-12 16:28:56 -07001112#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -07001113static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1114 const MODE_INFO *const mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001115 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1116 const MODE_INFO *const above_mi = xd->above_mi;
1117 const MODE_INFO *const left_mi = xd->left_mi;
1118 const BLOCK_SIZE bsize = mbmi->sb_type;
1119 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
1120 int palette_ctx = 0;
1121 int n, i;
1122
1123 if (mbmi->mode == DC_PRED) {
1124 n = pmi->palette_size[0];
1125 if (above_mi)
1126 palette_ctx += (above_mi->mbmi.palette_mode_info.palette_size[0] > 0);
1127 if (left_mi)
1128 palette_ctx += (left_mi->mbmi.palette_mode_info.palette_size[0] > 0);
Yaowu Xuf883b422016-08-30 14:01:10 -07001129 aom_write(w, n > 0,
1130 av1_default_palette_y_mode_prob[bsize - BLOCK_8X8][palette_ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001131 if (n > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001132 av1_write_token(w, av1_palette_size_tree,
1133 av1_default_palette_y_size_prob[bsize - BLOCK_8X8],
1134 &palette_size_encodings[n - 2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001135 for (i = 0; i < n; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07001136 aom_write_literal(w, pmi->palette_colors[i], cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001137 write_uniform(w, n, pmi->palette_first_color_idx[0]);
1138 }
1139 }
1140
1141 if (mbmi->uv_mode == DC_PRED) {
1142 n = pmi->palette_size[1];
Yaowu Xuf883b422016-08-30 14:01:10 -07001143 aom_write(w, n > 0,
1144 av1_default_palette_uv_mode_prob[pmi->palette_size[0] > 0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001145 if (n > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001146 av1_write_token(w, av1_palette_size_tree,
1147 av1_default_palette_uv_size_prob[bsize - BLOCK_8X8],
1148 &palette_size_encodings[n - 2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001149 for (i = 0; i < n; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001150 aom_write_literal(w, pmi->palette_colors[PALETTE_MAX_SIZE + i],
1151 cm->bit_depth);
1152 aom_write_literal(w, pmi->palette_colors[2 * PALETTE_MAX_SIZE + i],
1153 cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001154 }
1155 write_uniform(w, n, pmi->palette_first_color_idx[1]);
1156 }
1157 }
1158}
Urvang Joshib100db72016-10-12 16:28:56 -07001159#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001160
Jingning Han2a4da942016-11-03 18:31:30 -07001161static void write_tx_type(const AV1_COMMON *const cm,
1162 const MB_MODE_INFO *const mbmi,
1163#if CONFIG_SUPERTX
1164 const int supertx_enabled,
1165#endif
1166 aom_writer *w) {
1167 const int is_inter = is_inter_block(mbmi);
Jingning Hane67b38a2016-11-04 10:30:00 -07001168#if CONFIG_VAR_TX
1169 const TX_SIZE tx_size = is_inter ? mbmi->min_tx_size : mbmi->tx_size;
1170#else
Jingning Han641b1ad2016-11-04 09:58:36 -07001171 const TX_SIZE tx_size = mbmi->tx_size;
Jingning Hane67b38a2016-11-04 10:30:00 -07001172#endif
Jingning Han2a4da942016-11-03 18:31:30 -07001173 if (!FIXED_TX_TYPE) {
1174#if CONFIG_EXT_TX
1175 const BLOCK_SIZE bsize = mbmi->sb_type;
Jingning Han641b1ad2016-11-04 09:58:36 -07001176 if (get_ext_tx_types(tx_size, bsize, is_inter) > 1 && cm->base_qindex > 0 &&
1177 !mbmi->skip &&
Jingning Han2a4da942016-11-03 18:31:30 -07001178#if CONFIG_SUPERTX
1179 !supertx_enabled &&
1180#endif // CONFIG_SUPERTX
1181 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
Jingning Han641b1ad2016-11-04 09:58:36 -07001182 int eset = get_ext_tx_set(tx_size, bsize, is_inter);
Jingning Han2a4da942016-11-03 18:31:30 -07001183 if (is_inter) {
1184 assert(ext_tx_used_inter[eset][mbmi->tx_type]);
1185 if (eset > 0)
1186 av1_write_token(
1187 w, av1_ext_tx_inter_tree[eset],
Jingning Han641b1ad2016-11-04 09:58:36 -07001188 cm->fc->inter_ext_tx_prob[eset][txsize_sqr_map[tx_size]],
Jingning Han2a4da942016-11-03 18:31:30 -07001189 &ext_tx_inter_encodings[eset][mbmi->tx_type]);
1190 } else if (ALLOW_INTRA_EXT_TX) {
1191 if (eset > 0)
Jingning Han641b1ad2016-11-04 09:58:36 -07001192 av1_write_token(w, av1_ext_tx_intra_tree[eset],
1193 cm->fc->intra_ext_tx_prob[eset][tx_size][mbmi->mode],
1194 &ext_tx_intra_encodings[eset][mbmi->tx_type]);
Jingning Han2a4da942016-11-03 18:31:30 -07001195 }
1196 }
1197#else
Jingning Han641b1ad2016-11-04 09:58:36 -07001198 if (tx_size < TX_32X32 && cm->base_qindex > 0 && !mbmi->skip &&
Jingning Han2a4da942016-11-03 18:31:30 -07001199#if CONFIG_SUPERTX
1200 !supertx_enabled &&
1201#endif // CONFIG_SUPERTX
1202 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1203 if (is_inter) {
Nathan E. Eggedfa33f22016-11-16 09:44:26 -05001204#if CONFIG_EC_MULTISYMBOL
Jingning Han2a4da942016-11-03 18:31:30 -07001205 aom_write_symbol(w, av1_ext_tx_ind[mbmi->tx_type],
Jingning Han641b1ad2016-11-04 09:58:36 -07001206 cm->fc->inter_ext_tx_cdf[tx_size], TX_TYPES);
Jingning Han2a4da942016-11-03 18:31:30 -07001207#else
Jingning Han641b1ad2016-11-04 09:58:36 -07001208 av1_write_token(w, av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[tx_size],
Jingning Han2a4da942016-11-03 18:31:30 -07001209 &ext_tx_encodings[mbmi->tx_type]);
1210#endif
1211 } else {
Nathan E. Egge29ccee02016-11-16 09:44:26 -05001212#if CONFIG_EC_MULTISYMBOL
Jingning Han2a4da942016-11-03 18:31:30 -07001213 aom_write_symbol(
1214 w, av1_ext_tx_ind[mbmi->tx_type],
Jingning Han641b1ad2016-11-04 09:58:36 -07001215 cm->fc->intra_ext_tx_cdf[tx_size]
Jingning Han2a4da942016-11-03 18:31:30 -07001216 [intra_mode_to_tx_type_context[mbmi->mode]],
1217 TX_TYPES);
1218#else
1219 av1_write_token(
1220 w, av1_ext_tx_tree,
1221 cm->fc
Jingning Han641b1ad2016-11-04 09:58:36 -07001222 ->intra_ext_tx_prob[tx_size]
Jingning Han2a4da942016-11-03 18:31:30 -07001223 [intra_mode_to_tx_type_context[mbmi->mode]],
1224 &ext_tx_encodings[mbmi->tx_type]);
1225#endif
1226 }
Jingning Han2a4da942016-11-03 18:31:30 -07001227 }
1228#endif // CONFIG_EXT_TX
1229 }
1230}
1231
Yaowu Xuf883b422016-08-30 14:01:10 -07001232static void pack_inter_mode_mvs(AV1_COMP *cpi, const MODE_INFO *mi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001233#if CONFIG_SUPERTX
1234 int supertx_enabled,
1235#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001236 aom_writer *w) {
1237 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001238#if !CONFIG_REF_MV
Thomas9ac55082016-09-23 18:04:17 +01001239 nmv_context *nmvc = &cm->fc->nmvc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001240#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02001241
1242#if CONFIG_DELTA_Q
1243 MACROBLOCK *const x = &cpi->td.mb;
1244 MACROBLOCKD *const xd = &x->e_mbd;
1245#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001246 const MACROBLOCK *x = &cpi->td.mb;
1247 const MACROBLOCKD *xd = &x->e_mbd;
Arild Fuldseth07441162016-08-15 15:07:52 +02001248#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001249 const struct segmentation *const seg = &cm->seg;
Thomas9ac55082016-09-23 18:04:17 +01001250 struct segmentation_probs *const segp = &cm->fc->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001251 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1252 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
1253 const PREDICTION_MODE mode = mbmi->mode;
1254 const int segment_id = mbmi->segment_id;
1255 const BLOCK_SIZE bsize = mbmi->sb_type;
1256 const int allow_hp = cm->allow_high_precision_mv;
1257 const int is_inter = is_inter_block(mbmi);
1258 const int is_compound = has_second_ref(mbmi);
1259 int skip, ref;
Jingning Han52261842016-12-14 12:17:49 -08001260#if CONFIG_CB4X4
1261 const int unify_bsize = 1;
1262#else
1263 const int unify_bsize = 0;
1264#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001265
1266 if (seg->update_map) {
1267 if (seg->temporal_update) {
1268 const int pred_flag = mbmi->seg_id_predicted;
Yaowu Xuf883b422016-08-30 14:01:10 -07001269 aom_prob pred_prob = av1_get_pred_prob_seg_id(segp, xd);
1270 aom_write(w, pred_flag, pred_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001271 if (!pred_flag) write_segment_id(w, seg, segp, segment_id);
1272 } else {
1273 write_segment_id(w, seg, segp, segment_id);
1274 }
1275 }
1276
1277#if CONFIG_SUPERTX
1278 if (supertx_enabled)
1279 skip = mbmi->skip;
1280 else
1281 skip = write_skip(cm, xd, segment_id, mi, w);
1282#else
1283 skip = write_skip(cm, xd, segment_id, mi, w);
1284#endif // CONFIG_SUPERTX
Arild Fuldseth07441162016-08-15 15:07:52 +02001285#if CONFIG_DELTA_Q
1286 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001287 int mi_row = (-xd->mb_to_top_edge) >> (MI_SIZE_LOG2 + 3);
1288 int mi_col = (-xd->mb_to_left_edge) >> (MI_SIZE_LOG2 + 3);
1289 int super_block_upper_left =
1290 ((mi_row & MAX_MIB_MASK) == 0) && ((mi_col & MAX_MIB_MASK) == 0);
Arild Fuldseth07441162016-08-15 15:07:52 +02001291 if ((bsize != BLOCK_64X64 || skip == 0) && super_block_upper_left) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001292 int reduced_delta_qindex =
1293 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
1294 write_delta_qindex(cm, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001295 xd->prev_qindex = mbmi->current_q_index;
1296 }
1297 }
1298#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001299
1300#if CONFIG_SUPERTX
1301 if (!supertx_enabled)
1302#endif // CONFIG_SUPERTX
1303 if (!segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
Yaowu Xuf883b422016-08-30 14:01:10 -07001304 aom_write(w, is_inter, av1_get_intra_inter_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001305
Jingning Han581d1692017-01-05 16:03:54 -08001306 if (cm->tx_mode == TX_MODE_SELECT &&
1307#if CONFIG_CB4X4 && CONFIG_VAR_TX
1308 (bsize >= BLOCK_8X8 || (bsize >= BLOCK_4X4 && is_inter && !skip)) &&
1309#else
1310 bsize >= BLOCK_8X8 &&
1311#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001312#if CONFIG_SUPERTX
1313 !supertx_enabled &&
1314#endif // CONFIG_SUPERTX
1315 !(is_inter && skip) && !xd->lossless[segment_id]) {
1316#if CONFIG_VAR_TX
1317 if (is_inter) { // This implies skip flag is 0.
Jingning Han70e5f3f2016-11-09 17:03:07 -08001318 const TX_SIZE max_tx_size = max_txsize_rect_lookup[bsize];
Jingning Hanf64062f2016-11-02 16:22:18 -07001319 const int bh = tx_size_high_unit[max_tx_size];
1320 const int bw = tx_size_wide_unit[max_tx_size];
Jingning Han9ca05b72017-01-03 14:41:36 -08001321 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1322 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001323 int idx, idy;
Jingning Hanfe45b212016-11-22 10:30:23 -08001324 for (idy = 0; idy < height; idy += bh)
1325 for (idx = 0; idx < width; idx += bw)
1326 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, height != width, idy,
1327 idx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001328 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001329 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001330 write_selected_tx_size(cm, xd, w);
1331 }
1332 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001333 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001334#else
1335 write_selected_tx_size(cm, xd, w);
1336#endif
1337 }
1338
1339 if (!is_inter) {
Jingning Han52261842016-12-14 12:17:49 -08001340 if (bsize >= BLOCK_8X8 || unify_bsize) {
Nathan E. Eggeecc21ec2016-11-16 09:44:26 -05001341#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge5710c722016-09-08 10:01:16 -04001342 aom_write_symbol(w, av1_intra_mode_ind[mode],
1343 cm->fc->y_mode_cdf[size_group_lookup[bsize]],
1344 INTRA_MODES);
1345#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001346 write_intra_mode(w, mode, cm->fc->y_mode_prob[size_group_lookup[bsize]]);
Nathan E. Egge5710c722016-09-08 10:01:16 -04001347#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001348 } else {
1349 int idx, idy;
1350 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1351 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1352 for (idy = 0; idy < 2; idy += num_4x4_h) {
1353 for (idx = 0; idx < 2; idx += num_4x4_w) {
1354 const PREDICTION_MODE b_mode = mi->bmi[idy * 2 + idx].as_mode;
Nathan E. Eggeecc21ec2016-11-16 09:44:26 -05001355#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge5710c722016-09-08 10:01:16 -04001356 aom_write_symbol(w, av1_intra_mode_ind[b_mode], cm->fc->y_mode_cdf[0],
1357 INTRA_MODES);
1358#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001359 write_intra_mode(w, b_mode, cm->fc->y_mode_prob[0]);
Nathan E. Egge5710c722016-09-08 10:01:16 -04001360#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001361 }
1362 }
1363 }
Nathan E. Eggedd28aed2016-11-16 09:44:26 -05001364#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04001365 aom_write_symbol(w, av1_intra_mode_ind[mbmi->uv_mode],
1366 cm->fc->uv_mode_cdf[mode], INTRA_MODES);
1367#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001368 write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mode]);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04001369#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001370#if CONFIG_EXT_INTRA
1371 write_intra_angle_info(cm, xd, w);
1372#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001373#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001374 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1375 write_palette_mode_info(cm, xd, mi, w);
Urvang Joshib100db72016-10-12 16:28:56 -07001376#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07001377#if CONFIG_FILTER_INTRA
1378 if (bsize >= BLOCK_8X8) write_filter_intra_mode_info(cm, mbmi, w);
1379#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001380 } else {
Yaowu Xub0d0d002016-11-22 09:26:43 -08001381 int16_t mode_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001382 write_ref_frames(cm, xd, w);
1383
1384#if CONFIG_REF_MV
1385#if CONFIG_EXT_INTER
1386 if (is_compound)
1387 mode_ctx = mbmi_ext->compound_mode_context[mbmi->ref_frame[0]];
1388 else
1389#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001390 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1391 mbmi->ref_frame, bsize, -1);
Yaowu Xub0d0d002016-11-22 09:26:43 -08001392#else // CONFIG_REF_MV
1393 mode_ctx = mbmi_ext->mode_context[mbmi->ref_frame[0]];
1394#endif // CONFIG_REF_MV
Yaowu Xuc27fc142016-08-22 16:08:15 -07001395
1396 // If segment skip is not enabled code the mode.
1397 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
Jingning Han52261842016-12-14 12:17:49 -08001398 if (bsize >= BLOCK_8X8 || unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001399#if CONFIG_EXT_INTER
1400 if (is_inter_compound_mode(mode))
1401 write_inter_compound_mode(cm, w, mode, mode_ctx);
1402 else if (is_inter_singleref_mode(mode))
1403#endif // CONFIG_EXT_INTER
1404 write_inter_mode(cm, w, mode,
1405#if CONFIG_REF_MV && CONFIG_EXT_INTER
1406 is_compound,
1407#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1408 mode_ctx);
1409
1410#if CONFIG_REF_MV
1411 if (mode == NEARMV || mode == NEWMV)
1412 write_drl_idx(cm, mbmi, mbmi_ext, w);
1413#endif
1414 }
1415 }
1416
Angie Chiang1733f6b2017-01-05 09:52:20 -08001417#if !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION
Angie Chiang5678ad92016-11-21 09:38:40 -08001418 write_mb_interp_filter(cpi, xd, w);
Angie Chiang1733f6b2017-01-05 09:52:20 -08001419#endif // !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001420
Jingning Han52261842016-12-14 12:17:49 -08001421 if (bsize < BLOCK_8X8 && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001422 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1423 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1424 int idx, idy;
1425 for (idy = 0; idy < 2; idy += num_4x4_h) {
1426 for (idx = 0; idx < 2; idx += num_4x4_w) {
1427 const int j = idy * 2 + idx;
1428 const PREDICTION_MODE b_mode = mi->bmi[j].as_mode;
1429#if CONFIG_REF_MV
1430#if CONFIG_EXT_INTER
1431 if (!is_compound)
1432#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001433 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1434 mbmi->ref_frame, bsize, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001435#endif
1436#if CONFIG_EXT_INTER
1437 if (is_inter_compound_mode(b_mode))
1438 write_inter_compound_mode(cm, w, b_mode, mode_ctx);
1439 else if (is_inter_singleref_mode(b_mode))
1440#endif // CONFIG_EXT_INTER
1441 write_inter_mode(cm, w, b_mode,
1442#if CONFIG_REF_MV && CONFIG_EXT_INTER
1443 has_second_ref(mbmi),
1444#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1445 mode_ctx);
1446
1447#if CONFIG_EXT_INTER
1448 if (b_mode == NEWMV || b_mode == NEWFROMNEARMV ||
1449 b_mode == NEW_NEWMV) {
1450#else
1451 if (b_mode == NEWMV) {
1452#endif // CONFIG_EXT_INTER
1453 for (ref = 0; ref < 1 + is_compound; ++ref) {
1454#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001455 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1456 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1457 mbmi_ext->ref_mv_stack[rf_type], ref,
1458 mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001459 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001460#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001461 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001462#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001463 &mi->bmi[j].ref_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001464#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001465 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001466#endif
1467#else
1468#if CONFIG_REF_MV
Yaowu Xuf5bbbfa2016-09-26 09:13:38 -07001469 &mi->bmi[j].pred_mv[ref].as_mv, is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001470#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001471 &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001472#endif // CONFIG_REF_MV
1473#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001474 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001475 }
1476 }
1477#if CONFIG_EXT_INTER
1478 else if (b_mode == NEAREST_NEWMV || b_mode == NEAR_NEWMV) {
1479#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001480 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1481 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1482 mbmi_ext->ref_mv_stack[rf_type], 1,
1483 mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001484 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001485#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001486 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[1].as_mv,
1487 &mi->bmi[j].ref_mv[1].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001488#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001489 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001490#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001491 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001492 } else if (b_mode == NEW_NEARESTMV || b_mode == NEW_NEARMV) {
1493#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001494 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1495 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1496 mbmi_ext->ref_mv_stack[rf_type], 0,
1497 mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001498 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001499#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001500 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[0].as_mv,
1501 &mi->bmi[j].ref_mv[0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001502#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001503 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001504#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001505 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001506 }
1507#endif // CONFIG_EXT_INTER
1508 }
1509 }
1510 } else {
1511#if CONFIG_EXT_INTER
1512 if (mode == NEWMV || mode == NEWFROMNEARMV || mode == NEW_NEWMV) {
1513#else
1514 if (mode == NEWMV) {
1515#endif // CONFIG_EXT_INTER
1516 int_mv ref_mv;
1517 for (ref = 0; ref < 1 + is_compound; ++ref) {
1518#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001519 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1520 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1521 mbmi_ext->ref_mv_stack[rf_type], ref,
1522 mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001523 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001524#endif
1525 ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0];
1526#if CONFIG_EXT_INTER
1527 if (mode == NEWFROMNEARMV)
Yaowu Xuf883b422016-08-30 14:01:10 -07001528 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv,
1529 &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][1].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001530#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001531 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001532#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001533 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001534 else
1535#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001536 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001537#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001538 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001539#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001540 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001541 }
1542#if CONFIG_EXT_INTER
1543 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
1544#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001545 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1546 int nmv_ctx =
1547 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1548 mbmi_ext->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001549 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001550#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001551 av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv,
1552 &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001553#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001554 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001555#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001556 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001557 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
1558#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001559 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1560 int nmv_ctx =
1561 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1562 mbmi_ext->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
Thomas9ac55082016-09-23 18:04:17 +01001563 nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001564#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001565 av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv,
1566 &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001567#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001568 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001569#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001570 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001571#endif // CONFIG_EXT_INTER
1572 }
1573 }
1574
1575#if CONFIG_EXT_INTER
1576 if (cpi->common.reference_mode != COMPOUND_REFERENCE &&
1577#if CONFIG_SUPERTX
1578 !supertx_enabled &&
1579#endif // CONFIG_SUPERTX
1580 is_interintra_allowed(mbmi)) {
1581 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
1582 const int bsize_group = size_group_lookup[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -07001583 aom_write(w, interintra, cm->fc->interintra_prob[bsize_group]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001584 if (interintra) {
1585 write_interintra_mode(w, mbmi->interintra_mode,
1586 cm->fc->interintra_mode_prob[bsize_group]);
1587 if (is_interintra_wedge_used(bsize)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001588 aom_write(w, mbmi->use_wedge_interintra,
1589 cm->fc->wedge_interintra_prob[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001590 if (mbmi->use_wedge_interintra) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001591 aom_write_literal(w, mbmi->interintra_wedge_index,
1592 get_wedge_bits_lookup(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001593 assert(mbmi->interintra_wedge_sign == 0);
1594 }
1595 }
1596 }
1597 }
1598#endif // CONFIG_EXT_INTER
1599
Yue Chencb60b182016-10-13 15:18:22 -07001600#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001601#if CONFIG_SUPERTX
1602 if (!supertx_enabled)
1603#endif // CONFIG_SUPERTX
1604#if CONFIG_EXT_INTER
1605 if (mbmi->ref_frame[1] != INTRA_FRAME)
1606#endif // CONFIG_EXT_INTER
Yue Chen69f18e12016-09-08 14:48:15 -07001607 write_motion_mode(cm, mbmi, w);
Yue Chencb60b182016-10-13 15:18:22 -07001608#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001609
1610#if CONFIG_EXT_INTER
1611 if (cpi->common.reference_mode != SINGLE_REFERENCE &&
Sarah Parker6fdc8532016-11-16 17:47:13 -08001612 is_inter_compound_mode(mbmi->mode)
Yue Chencb60b182016-10-13 15:18:22 -07001613#if CONFIG_MOTION_VAR
Sarah Parker6fdc8532016-11-16 17:47:13 -08001614 && mbmi->motion_mode == SIMPLE_TRANSLATION
Yue Chencb60b182016-10-13 15:18:22 -07001615#endif // CONFIG_MOTION_VAR
Sarah Parker6fdc8532016-11-16 17:47:13 -08001616 ) {
1617 av1_write_token(
1618 w, av1_compound_type_tree, cm->fc->compound_type_prob[bsize],
1619 &compound_type_encodings[mbmi->interinter_compound_data.type]);
1620 if (mbmi->interinter_compound_data.type == COMPOUND_WEDGE) {
1621 aom_write_literal(w, mbmi->interinter_compound_data.wedge_index,
Yaowu Xuf883b422016-08-30 14:01:10 -07001622 get_wedge_bits_lookup(bsize));
Sarah Parker6fdc8532016-11-16 17:47:13 -08001623 aom_write_bit(w, mbmi->interinter_compound_data.wedge_sign);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001624 }
Sarah Parker569edda2016-12-14 14:57:38 -08001625#if CONFIG_COMPOUND_SEGMENT
1626 else if (mbmi->interinter_compound_data.type == COMPOUND_SEG) {
Sarah Parkerb9f757c2017-01-06 17:12:24 -08001627 aom_write_literal(w, mbmi->interinter_compound_data.mask_type,
1628 MAX_SEG_MASK_BITS);
Sarah Parker569edda2016-12-14 14:57:38 -08001629 }
1630#endif // CONFIG_COMPOUND_SEGMENT
Yaowu Xuc27fc142016-08-22 16:08:15 -07001631 }
1632#endif // CONFIG_EXT_INTER
1633
Yue Chen69f18e12016-09-08 14:48:15 -07001634#if CONFIG_WARPED_MOTION
1635 if (mbmi->motion_mode != WARPED_CAUSAL)
1636#endif // CONFIG_WARPED_MOTION
Angie Chiang1733f6b2017-01-05 09:52:20 -08001637#if CONFIG_DUAL_FILTER || CONFIG_WARPED_MOTION
Yue Chen69f18e12016-09-08 14:48:15 -07001638 write_mb_interp_filter(cpi, xd, w);
Angie Chiang1733f6b2017-01-05 09:52:20 -08001639#endif // CONFIG_DUAL_FILTE || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001640 }
1641
Jingning Han2a4da942016-11-03 18:31:30 -07001642 write_tx_type(cm, mbmi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001643#if CONFIG_SUPERTX
Jingning Han2a4da942016-11-03 18:31:30 -07001644 supertx_enabled,
Nathan E. Egge93878c42016-05-03 10:01:32 -04001645#endif
Jingning Han2a4da942016-11-03 18:31:30 -07001646 w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001647}
1648
Arild Fuldseth07441162016-08-15 15:07:52 +02001649#if CONFIG_DELTA_Q
Thomas9ac55082016-09-23 18:04:17 +01001650static void write_mb_modes_kf(AV1_COMMON *cm, MACROBLOCKD *xd,
Arild Fuldseth07441162016-08-15 15:07:52 +02001651 MODE_INFO **mi_8x8, aom_writer *w) {
1652 int skip;
1653#else
Thomas9ac55082016-09-23 18:04:17 +01001654static void write_mb_modes_kf(AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuf883b422016-08-30 14:01:10 -07001655 MODE_INFO **mi_8x8, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +02001656#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001657 const struct segmentation *const seg = &cm->seg;
Thomas9ac55082016-09-23 18:04:17 +01001658 struct segmentation_probs *const segp = &cm->fc->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001659 const MODE_INFO *const mi = mi_8x8[0];
1660 const MODE_INFO *const above_mi = xd->above_mi;
1661 const MODE_INFO *const left_mi = xd->left_mi;
1662 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1663 const BLOCK_SIZE bsize = mbmi->sb_type;
Jingning Han52261842016-12-14 12:17:49 -08001664#if CONFIG_CB4X4
1665 const int unify_bsize = 1;
1666#else
1667 const int unify_bsize = 0;
1668#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001669
1670 if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id);
1671
Arild Fuldseth07441162016-08-15 15:07:52 +02001672#if CONFIG_DELTA_Q
1673 skip = write_skip(cm, xd, mbmi->segment_id, mi, w);
1674 if (cm->delta_q_present_flag) {
1675 int mi_row = (-xd->mb_to_top_edge) >> 6;
1676 int mi_col = (-xd->mb_to_left_edge) >> 6;
1677 int super_block_upper_left = ((mi_row & 7) == 0) && ((mi_col & 7) == 0);
1678 if ((bsize != BLOCK_64X64 || skip == 0) && super_block_upper_left) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001679 int reduced_delta_qindex =
1680 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
1681 write_delta_qindex(cm, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001682 xd->prev_qindex = mbmi->current_q_index;
1683 }
1684 }
1685#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001686 write_skip(cm, xd, mbmi->segment_id, mi, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001687#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001688
1689 if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
1690 !xd->lossless[mbmi->segment_id])
1691 write_selected_tx_size(cm, xd, w);
1692
Jingning Han52261842016-12-14 12:17:49 -08001693 if (bsize >= BLOCK_8X8 || unify_bsize) {
Nathan E. Egge10ba2be2016-11-16 09:44:26 -05001694#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04001695 aom_write_symbol(w, av1_intra_mode_ind[mbmi->mode],
1696 get_y_mode_cdf(cm, mi, above_mi, left_mi, 0), INTRA_MODES);
1697#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001698 write_intra_mode(w, mbmi->mode,
1699 get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04001700#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001701 } else {
1702 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1703 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1704 int idx, idy;
1705
1706 for (idy = 0; idy < 2; idy += num_4x4_h) {
1707 for (idx = 0; idx < 2; idx += num_4x4_w) {
1708 const int block = idy * 2 + idx;
Nathan E. Egge10ba2be2016-11-16 09:44:26 -05001709#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04001710 aom_write_symbol(w, av1_intra_mode_ind[mi->bmi[block].as_mode],
1711 get_y_mode_cdf(cm, mi, above_mi, left_mi, block),
1712 INTRA_MODES);
1713#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001714 write_intra_mode(w, mi->bmi[block].as_mode,
1715 get_y_mode_probs(cm, mi, above_mi, left_mi, block));
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04001716#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001717 }
1718 }
1719 }
Nathan E. Eggedd28aed2016-11-16 09:44:26 -05001720#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04001721 aom_write_symbol(w, av1_intra_mode_ind[mbmi->uv_mode],
1722 cm->fc->uv_mode_cdf[mbmi->mode], INTRA_MODES);
1723#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001724 write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mbmi->mode]);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04001725#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001726#if CONFIG_EXT_INTRA
1727 write_intra_angle_info(cm, xd, w);
1728#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001729#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001730 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1731 write_palette_mode_info(cm, xd, mi, w);
Urvang Joshib100db72016-10-12 16:28:56 -07001732#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07001733#if CONFIG_FILTER_INTRA
1734 if (bsize >= BLOCK_8X8) write_filter_intra_mode_info(cm, mbmi, w);
1735#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001736
Jingning Han2a4da942016-11-03 18:31:30 -07001737 write_tx_type(cm, mbmi,
1738#if CONFIG_SUPERTX
1739 0,
Nathan E. Egge72762a22016-09-07 17:12:07 -04001740#endif
Jingning Han2a4da942016-11-03 18:31:30 -07001741 w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001742}
1743
1744#if CONFIG_SUPERTX
1745#define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1746 mi_row, mi_col) \
1747 write_modes_b(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col)
1748#else
1749#define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1750 mi_row, mi_col) \
1751 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col)
Alex Converseec6fb642016-10-19 11:31:48 -07001752#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001753
Angie Chiangd4022822016-11-02 18:30:25 -07001754#if CONFIG_RD_DEBUG
1755static void dump_mode_info(MODE_INFO *mi) {
1756 printf("\nmi->mbmi.mi_row == %d\n", mi->mbmi.mi_row);
1757 printf("&& mi->mbmi.mi_col == %d\n", mi->mbmi.mi_col);
1758 printf("&& mi->mbmi.sb_type == %d\n", mi->mbmi.sb_type);
1759 printf("&& mi->mbmi.tx_size == %d\n", mi->mbmi.tx_size);
1760 if (mi->mbmi.sb_type >= BLOCK_8X8) {
1761 printf("&& mi->mbmi.mode == %d\n", mi->mbmi.mode);
1762 } else {
1763 printf("&& mi->bmi[0].as_mode == %d\n", mi->bmi[0].as_mode);
1764 }
1765}
Angie Chiangd02001d2016-11-06 15:31:49 -08001766static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats,
1767 int plane) {
1768 if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) {
Angie Chiang3963d632016-11-10 18:41:40 -08001769#if CONFIG_VAR_TX
Angie Chiangd02001d2016-11-06 15:31:49 -08001770 int r, c;
Angie Chiang3963d632016-11-10 18:41:40 -08001771#endif
Angie Chiangd02001d2016-11-06 15:31:49 -08001772 printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n",
1773 plane, rd_stats->txb_coeff_cost[plane], token_stats->cost);
1774#if CONFIG_VAR_TX
1775 printf("rd txb_coeff_cost_map\n");
1776 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1777 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1778 printf("%d ", rd_stats->txb_coeff_cost_map[plane][r][c]);
1779 }
1780 printf("\n");
1781 }
1782
1783 printf("pack txb_coeff_cost_map\n");
1784 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
1785 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
1786 printf("%d ", token_stats->txb_coeff_cost_map[r][c]);
1787 }
1788 printf("\n");
1789 }
1790#endif
1791 return 1;
1792 }
1793 return 0;
1794}
Angie Chiangd4022822016-11-02 18:30:25 -07001795#endif
1796
Yushin Cho77bba8d2016-11-04 16:36:56 -07001797#if CONFIG_PVQ
1798PVQ_INFO *get_pvq_block(PVQ_QUEUE *pvq_q) {
1799 PVQ_INFO *pvq;
1800
1801 assert(pvq_q->curr_pos <= pvq_q->last_pos);
1802 assert(pvq_q->curr_pos < pvq_q->buf_len);
1803
1804 pvq = pvq_q->buf + pvq_q->curr_pos;
1805 ++pvq_q->curr_pos;
1806
1807 return pvq;
1808}
1809#endif
1810
Yue Chen64550b62017-01-12 12:18:22 -08001811static void write_mbmi_b(AV1_COMP *cpi, const TileInfo *const tile,
1812 aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001813#if CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08001814 int supertx_enabled,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001815#endif
Yue Chen64550b62017-01-12 12:18:22 -08001816 int mi_row, int mi_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001817 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001818 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1819 MODE_INFO *m;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001820 int bh, bw;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001821 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1822 m = xd->mi[0];
1823
1824 assert(m->mbmi.sb_type <= cm->sb_size);
1825
Jingning Hanc709e1f2016-12-06 14:48:09 -08001826 bh = mi_size_high[m->mbmi.sb_type];
1827 bw = mi_size_wide[m->mbmi.sb_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001828
1829 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1830
1831 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
Yushin Cho77bba8d2016-11-04 16:36:56 -07001832
Yaowu Xuc27fc142016-08-22 16:08:15 -07001833 if (frame_is_intra_only(cm)) {
1834 write_mb_modes_kf(cm, xd, xd->mi, w);
1835 } else {
1836#if CONFIG_VAR_TX
1837 xd->above_txfm_context = cm->above_txfm_context + mi_col;
1838 xd->left_txfm_context =
1839 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
1840#endif
Angie Chiang1733f6b2017-01-05 09:52:20 -08001841#if CONFIG_DUAL_FILTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001842 // av1_is_interp_needed needs the ref frame buffers set up to look
1843 // up if they are scaled. av1_is_interp_needed is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07001844 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
1845 set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
Angie Chiang1733f6b2017-01-05 09:52:20 -08001846#endif // CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07001847#if 0
1848 // NOTE(zoeliu): For debug
1849 if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
1850 const PREDICTION_MODE mode = m->mbmi.mode;
1851 const int segment_id = m->mbmi.segment_id;
1852 const BLOCK_SIZE bsize = m->mbmi.sb_type;
1853
1854 // For sub8x8, simply dump out the first sub8x8 block info
1855 const PREDICTION_MODE b_mode =
1856 (bsize < BLOCK_8X8) ? m->bmi[0].as_mode : -1;
1857 const int mv_x = (bsize < BLOCK_8X8) ?
1858 m->bmi[0].as_mv[0].as_mv.row : m->mbmi.mv[0].as_mv.row;
1859 const int mv_y = (bsize < BLOCK_8X8) ?
1860 m->bmi[0].as_mv[0].as_mv.col : m->mbmi.mv[0].as_mv.col;
1861
1862 printf("Before pack_inter_mode_mvs(): "
1863 "Frame=%d, (mi_row,mi_col)=(%d,%d), "
1864 "mode=%d, segment_id=%d, bsize=%d, b_mode=%d, "
1865 "mv[0]=(%d, %d), ref[0]=%d, ref[1]=%d\n",
1866 cm->current_video_frame, mi_row, mi_col,
1867 mode, segment_id, bsize, b_mode, mv_x, mv_y,
1868 m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
1869 }
1870#endif // 0
1871 pack_inter_mode_mvs(cpi, m,
1872#if CONFIG_SUPERTX
1873 supertx_enabled,
1874#endif
1875 w);
1876 }
Yue Chen64550b62017-01-12 12:18:22 -08001877}
Yaowu Xuc27fc142016-08-22 16:08:15 -07001878
Yue Chen64550b62017-01-12 12:18:22 -08001879static void write_tokens_b(AV1_COMP *cpi, const TileInfo *const tile,
1880 aom_writer *w, const TOKENEXTRA **tok,
1881 const TOKENEXTRA *const tok_end, int mi_row,
1882 int mi_col) {
1883 AV1_COMMON *const cm = &cpi->common;
1884 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1885 MODE_INFO *m;
1886 int plane;
1887 int bh, bw;
1888#if CONFIG_PVQ
1889 MB_MODE_INFO *mbmi;
1890 BLOCK_SIZE bsize;
1891 od_adapt_ctx *adapt;
1892 (void)tok;
1893 (void)tok_end;
1894#endif
1895 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1896 m = xd->mi[0];
1897
1898 assert(m->mbmi.sb_type <= cm->sb_size);
1899
1900 bh = mi_size_high[m->mbmi.sb_type];
1901 bw = mi_size_wide[m->mbmi.sb_type];
1902
1903 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1904
1905 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
1906#if CONFIG_PVQ
1907 mbmi = &m->mbmi;
1908 bsize = mbmi->sb_type;
1909 adapt = &cpi->td.mb.daala_enc.state.adapt;
1910#endif
1911
1912#if !CONFIG_PVQ
Urvang Joshib100db72016-10-12 16:28:56 -07001913#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001914 for (plane = 0; plane <= 1; ++plane) {
1915 if (m->mbmi.palette_mode_info.palette_size[plane] > 0) {
Jingning Hanae5cfde2016-11-30 12:01:44 -08001916 const int rows =
1917 block_size_high[m->mbmi.sb_type] >> (xd->plane[plane].subsampling_y);
1918 const int cols =
1919 block_size_wide[m->mbmi.sb_type] >> (xd->plane[plane].subsampling_x);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001920 assert(*tok < tok_end);
1921 pack_palette_tokens(w, tok, m->mbmi.palette_mode_info.palette_size[plane],
1922 rows * cols - 1);
1923 assert(*tok < tok_end + m->mbmi.skip);
1924 }
1925 }
Urvang Joshib100db72016-10-12 16:28:56 -07001926#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001927
iole moccagattaf25a4cf2016-11-11 23:57:57 -08001928#if CONFIG_COEF_INTERLEAVE
1929 if (!m->mbmi.skip) {
1930 const struct macroblockd_plane *const pd_y = &xd->plane[0];
1931 const struct macroblockd_plane *const pd_c = &xd->plane[1];
1932 const TX_SIZE tx_log2_y = m->mbmi.tx_size;
1933 const TX_SIZE tx_log2_c = get_uv_tx_size(&m->mbmi, pd_c);
1934 const int tx_sz_y = (1 << tx_log2_y);
1935 const int tx_sz_c = (1 << tx_log2_c);
1936
1937 const BLOCK_SIZE plane_bsize_y =
1938 get_plane_block_size(AOMMAX(m->mbmi.sb_type, 3), pd_y);
1939 const BLOCK_SIZE plane_bsize_c =
1940 get_plane_block_size(AOMMAX(m->mbmi.sb_type, 3), pd_c);
1941
1942 const int num_4x4_w_y = num_4x4_blocks_wide_lookup[plane_bsize_y];
1943 const int num_4x4_w_c = num_4x4_blocks_wide_lookup[plane_bsize_c];
1944 const int num_4x4_h_y = num_4x4_blocks_high_lookup[plane_bsize_y];
1945 const int num_4x4_h_c = num_4x4_blocks_high_lookup[plane_bsize_c];
1946
1947 const int max_4x4_w_y = get_max_4x4_size(num_4x4_w_y, xd->mb_to_right_edge,
1948 pd_y->subsampling_x);
1949 const int max_4x4_h_y = get_max_4x4_size(num_4x4_h_y, xd->mb_to_bottom_edge,
1950 pd_y->subsampling_y);
1951 const int max_4x4_w_c = get_max_4x4_size(num_4x4_w_c, xd->mb_to_right_edge,
1952 pd_c->subsampling_x);
1953 const int max_4x4_h_c = get_max_4x4_size(num_4x4_h_c, xd->mb_to_bottom_edge,
1954 pd_c->subsampling_y);
1955
1956 // The max_4x4_w/h may be smaller than tx_sz under some corner cases,
1957 // i.e. when the SB is splitted by tile boundaries.
1958 const int tu_num_w_y = (max_4x4_w_y + tx_sz_y - 1) / tx_sz_y;
1959 const int tu_num_h_y = (max_4x4_h_y + tx_sz_y - 1) / tx_sz_y;
1960 const int tu_num_w_c = (max_4x4_w_c + tx_sz_c - 1) / tx_sz_c;
1961 const int tu_num_h_c = (max_4x4_h_c + tx_sz_c - 1) / tx_sz_c;
1962 const int tu_num_y = tu_num_w_y * tu_num_h_y;
1963 const int tu_num_c = tu_num_w_c * tu_num_h_c;
1964
1965 int tu_idx_y = 0, tu_idx_c = 0;
1966 TOKEN_STATS token_stats;
1967 init_token_stats(&token_stats);
1968
1969 assert(*tok < tok_end);
1970
1971 while (tu_idx_y < tu_num_y) {
1972 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_y, &token_stats);
1973 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1974 (*tok)++;
1975 tu_idx_y++;
1976
1977 if (tu_idx_c < tu_num_c) {
1978 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
1979 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1980 (*tok)++;
1981
1982 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
1983 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1984 (*tok)++;
1985
1986 tu_idx_c++;
1987 }
1988 }
1989
1990 // In 422 case, it's possilbe that Chroma has more TUs than Luma
1991 while (tu_idx_c < tu_num_c) {
1992 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
1993 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1994 (*tok)++;
1995
1996 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
1997 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1998 (*tok)++;
1999
2000 tu_idx_c++;
2001 }
2002 }
2003#else // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002004 if (!m->mbmi.skip) {
2005 assert(*tok < tok_end);
2006 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
Angie Chiang3963d632016-11-10 18:41:40 -08002007 MB_MODE_INFO *mbmi = &m->mbmi;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002008#if CONFIG_VAR_TX
2009 const struct macroblockd_plane *const pd = &xd->plane[plane];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002010 BLOCK_SIZE bsize = mbmi->sb_type;
Jingning Han9ca05b72017-01-03 14:41:36 -08002011#if CONFIG_CB4X4
2012 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
2013#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002014 const BLOCK_SIZE plane_bsize =
Yaowu Xuf883b422016-08-30 14:01:10 -07002015 get_plane_block_size(AOMMAX(bsize, BLOCK_8X8), pd);
Jingning Han9ca05b72017-01-03 14:41:36 -08002016#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002017
Jingning Han42a0fb32016-10-31 10:43:31 -07002018 const int num_4x4_w =
2019 block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
2020 const int num_4x4_h =
2021 block_size_high[plane_bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002022 int row, col;
Angie Chiangd4022822016-11-02 18:30:25 -07002023 TOKEN_STATS token_stats;
Angie Chiangd02001d2016-11-06 15:31:49 -08002024 init_token_stats(&token_stats);
Angie Chiangd4022822016-11-02 18:30:25 -07002025
Jingning Hanfe45b212016-11-22 10:30:23 -08002026 if (is_inter_block(mbmi)) {
Jingning Han70e5f3f2016-11-09 17:03:07 -08002027 const TX_SIZE max_tx_size = max_txsize_rect_lookup[plane_bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002028 int block = 0;
Jingning Han42a0fb32016-10-31 10:43:31 -07002029 const int step =
2030 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
2031 const int bkw = tx_size_wide_unit[max_tx_size];
2032 const int bkh = tx_size_high_unit[max_tx_size];
2033 for (row = 0; row < num_4x4_h; row += bkh) {
2034 for (col = 0; col < num_4x4_w; col += bkw) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002035 pack_txb_tokens(w, tok, tok_end, xd, mbmi, plane, plane_bsize,
Angie Chiangd4022822016-11-02 18:30:25 -07002036 cm->bit_depth, block, row, col, max_tx_size,
2037 &token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002038 block += step;
2039 }
2040 }
Angie Chiangd02001d2016-11-06 15:31:49 -08002041#if CONFIG_RD_DEBUG
Angie Chiang3963d632016-11-10 18:41:40 -08002042 if (mbmi->sb_type >= BLOCK_8X8 &&
2043 rd_token_stats_mismatch(&m->mbmi.rd_stats, &token_stats, plane)) {
Angie Chiangd02001d2016-11-06 15:31:49 -08002044 dump_mode_info(m);
2045 assert(0);
2046 }
Jingning Hanfe45b212016-11-22 10:30:23 -08002047#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07002048 } else {
2049 TX_SIZE tx = plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane])
2050 : m->mbmi.tx_size;
Jingning Han42a0fb32016-10-31 10:43:31 -07002051 const int bkw = tx_size_wide_unit[tx];
2052 const int bkh = tx_size_high_unit[tx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002053
Jingning Han42a0fb32016-10-31 10:43:31 -07002054 for (row = 0; row < num_4x4_h; row += bkh)
2055 for (col = 0; col < num_4x4_w; col += bkw)
Angie Chiangd4022822016-11-02 18:30:25 -07002056 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002057 }
2058#else
2059 TX_SIZE tx =
2060 plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane]) : m->mbmi.tx_size;
Angie Chiangd4022822016-11-02 18:30:25 -07002061 TOKEN_STATS token_stats;
Angie Chiang3963d632016-11-10 18:41:40 -08002062 init_token_stats(&token_stats);
Angie Chiangd4022822016-11-02 18:30:25 -07002063 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats);
Angie Chiang3963d632016-11-10 18:41:40 -08002064#if CONFIG_RD_DEBUG
2065 if (is_inter_block(mbmi) && mbmi->sb_type >= BLOCK_8X8 &&
2066 rd_token_stats_mismatch(&m->mbmi.rd_stats, &token_stats, plane)) {
2067 dump_mode_info(m);
2068 assert(0);
2069 }
2070#else
2071 (void)mbmi;
Jingning Hanfe45b212016-11-22 10:30:23 -08002072#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07002073#endif // CONFIG_VAR_TX
Angie Chiangd4022822016-11-02 18:30:25 -07002074
Yaowu Xuc27fc142016-08-22 16:08:15 -07002075 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2076 (*tok)++;
2077 }
2078 }
iole moccagattaf25a4cf2016-11-11 23:57:57 -08002079#endif // CONFIG_COEF_INTERLEAVE
Yushin Cho77bba8d2016-11-04 16:36:56 -07002080#else
2081 // PVQ writes its tokens (i.e. symbols) here.
2082 if (!m->mbmi.skip) {
2083 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
2084 PVQ_INFO *pvq;
2085 TX_SIZE tx_size =
2086 plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane]) : m->mbmi.tx_size;
2087 int idx, idy;
2088 const struct macroblockd_plane *const pd = &xd->plane[plane];
2089 int num_4x4_w;
2090 int num_4x4_h;
2091 int max_blocks_wide;
2092 int max_blocks_high;
2093 int step = (1 << tx_size);
2094 const int step_xy = 1 << (tx_size << 1);
2095 int block = 0;
2096
2097 if (tx_size == TX_4X4 && bsize <= BLOCK_8X8) {
2098 num_4x4_w = 2 >> xd->plane[plane].subsampling_x;
2099 num_4x4_h = 2 >> xd->plane[plane].subsampling_y;
2100 } else {
2101 num_4x4_w =
2102 num_4x4_blocks_wide_lookup[bsize] >> xd->plane[plane].subsampling_x;
2103 num_4x4_h =
2104 num_4x4_blocks_high_lookup[bsize] >> xd->plane[plane].subsampling_y;
2105 }
2106 // TODO: Do we need below for 4x4,4x8,8x4 cases as well?
2107 max_blocks_wide =
2108 num_4x4_w + (xd->mb_to_right_edge >= 0
2109 ? 0
2110 : xd->mb_to_right_edge >> (5 + pd->subsampling_x));
2111 max_blocks_high =
2112 num_4x4_h + (xd->mb_to_bottom_edge >= 0
2113 ? 0
2114 : xd->mb_to_bottom_edge >> (5 + pd->subsampling_y));
2115
2116 // TODO(yushin) Try to use av1_foreach_transformed_block_in_plane().
2117 // Logic like the mb_to_right_edge/mb_to_bottom_edge stuff should
2118 // really be centralized in one place.
2119
2120 for (idy = 0; idy < max_blocks_high; idy += step) {
2121 for (idx = 0; idx < max_blocks_wide; idx += step) {
2122 const int is_keyframe = 0;
2123 const int encode_flip = 0;
2124 const int flip = 0;
2125 const int robust = 1;
2126 int i;
2127 const int has_dc_skip = 1;
2128 int *exg = &adapt->pvq.pvq_exg[plane][tx_size][0];
2129 int *ext = adapt->pvq.pvq_ext + tx_size * PVQ_MAX_PARTITIONS;
2130 generic_encoder *model = adapt->pvq.pvq_param_model;
2131
2132 pvq = get_pvq_block(cpi->td.mb.pvq_q);
2133
2134 // encode block skip info
Nathan E. Egge39051a72016-12-22 17:07:28 -05002135 aom_encode_cdf_adapt(w, pvq->ac_dc_coded,
2136 adapt->skip_cdf[2 * tx_size + (plane != 0)], 4,
2137 adapt->skip_increment);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002138
2139 // AC coeffs coded?
2140 if (pvq->ac_dc_coded & 0x02) {
2141 assert(pvq->bs <= tx_size);
2142 for (i = 0; i < pvq->nb_bands; i++) {
2143 if (i == 0 || (!pvq->skip_rest &&
2144 !(pvq->skip_dir & (1 << ((i - 1) % 3))))) {
2145 pvq_encode_partition(
Nathan E. Egge6b0b4a92016-12-22 09:21:06 -05002146 w, pvq->qg[i], pvq->theta[i], pvq->max_theta[i],
Yushin Cho77bba8d2016-11-04 16:36:56 -07002147 pvq->y + pvq->off[i], pvq->size[i], pvq->k[i], model, adapt,
2148 exg + i, ext + i, robust || is_keyframe,
Yushin Cho48f84db2016-11-07 21:20:17 -08002149 (plane != 0) * OD_TXSIZES * PVQ_MAX_PARTITIONS +
Yushin Cho77bba8d2016-11-04 16:36:56 -07002150 pvq->bs * PVQ_MAX_PARTITIONS + i,
2151 is_keyframe, i == 0 && (i < pvq->nb_bands - 1),
2152 pvq->skip_rest, encode_flip, flip);
2153 }
2154 if (i == 0 && !pvq->skip_rest && pvq->bs > 0) {
Nathan E. Egge39051a72016-12-22 17:07:28 -05002155 aom_encode_cdf_adapt(
2156 w, pvq->skip_dir,
Yushin Cho77bba8d2016-11-04 16:36:56 -07002157 &adapt->pvq
2158 .pvq_skip_dir_cdf[(plane != 0) + 2 * (pvq->bs - 1)][0],
2159 7, adapt->pvq.pvq_skip_dir_increment);
2160 }
2161 }
2162 }
2163 // Encode residue of DC coeff, if exist.
2164 if (!has_dc_skip || (pvq->ac_dc_coded & 1)) { // DC coded?
Nathan E. Egge760c27f2016-12-22 12:30:00 -05002165 generic_encode(w, &adapt->model_dc[plane],
Yushin Cho77bba8d2016-11-04 16:36:56 -07002166 abs(pvq->dq_dc_residue) - has_dc_skip, -1,
2167 &adapt->ex_dc[plane][pvq->bs][0], 2);
2168 }
2169 if ((pvq->ac_dc_coded & 1)) { // DC coded?
Nathan E. Eggee335fb72016-12-29 20:19:08 -05002170 aom_write_bit(w, pvq->dq_dc_residue < 0);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002171 }
2172 block += step_xy;
2173 }
2174 } // for (idy = 0;
2175 } // for (plane =
2176 } // if (!m->mbmi.skip)
Angie Chiangd4022822016-11-02 18:30:25 -07002177#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002178}
2179
Yue Chen64550b62017-01-12 12:18:22 -08002180static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
2181 aom_writer *w, const TOKENEXTRA **tok,
2182 const TOKENEXTRA *const tok_end,
2183#if CONFIG_SUPERTX
2184 int supertx_enabled,
2185#endif
2186 int mi_row, int mi_col) {
2187 write_mbmi_b(cpi, tile, w,
2188#if CONFIG_SUPERTX
2189 supertx_enabled,
2190#endif
2191 mi_row, mi_col);
2192#if !CONFIG_PVQ && CONFIG_SUPERTX
2193 if (!supertx_enabled)
2194#endif
2195 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2196}
2197
Yaowu Xuf883b422016-08-30 14:01:10 -07002198static void write_partition(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002199 const MACROBLOCKD *const xd, int hbs, int mi_row,
2200 int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize,
Yaowu Xuf883b422016-08-30 14:01:10 -07002201 aom_writer *w) {
Jingning Hanbf9c6b72016-12-14 14:50:45 -08002202 const int is_partition_point = bsize >= BLOCK_8X8;
2203 const int ctx = is_partition_point
2204 ? partition_plane_context(xd, mi_row, mi_col, bsize)
2205 : 0;
Yaowu Xuf883b422016-08-30 14:01:10 -07002206 const aom_prob *const probs = cm->fc->partition_prob[ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002207 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2208 const int has_cols = (mi_col + hbs) < cm->mi_cols;
2209
Jingning Hanbf9c6b72016-12-14 14:50:45 -08002210 if (!is_partition_point) return;
2211
Yaowu Xuc27fc142016-08-22 16:08:15 -07002212 if (has_rows && has_cols) {
2213#if CONFIG_EXT_PARTITION_TYPES
2214 if (bsize <= BLOCK_8X8)
Yaowu Xuf883b422016-08-30 14:01:10 -07002215 av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002216 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002217 av1_write_token(w, av1_ext_partition_tree, probs,
2218 &ext_partition_encodings[p]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002219#else
Nathan E. Egge9d9eb6c2016-11-16 09:44:26 -05002220#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge56eeaa52016-07-25 10:23:33 -04002221 aom_write_symbol(w, p, cm->fc->partition_cdf[ctx], PARTITION_TYPES);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04002222#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002223 av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04002224#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002225#endif // CONFIG_EXT_PARTITION_TYPES
2226 } else if (!has_rows && has_cols) {
2227 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Yaowu Xuf883b422016-08-30 14:01:10 -07002228 aom_write(w, p == PARTITION_SPLIT, probs[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002229 } else if (has_rows && !has_cols) {
2230 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Yaowu Xuf883b422016-08-30 14:01:10 -07002231 aom_write(w, p == PARTITION_SPLIT, probs[2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002232 } else {
2233 assert(p == PARTITION_SPLIT);
2234 }
2235}
2236
2237#if CONFIG_SUPERTX
2238#define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
2239 mi_row, mi_col, bsize) \
2240 write_modes_sb(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col, \
2241 bsize)
2242#else
2243#define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
2244 mi_row, mi_col, bsize) \
2245 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, bsize)
Alex Converseec6fb642016-10-19 11:31:48 -07002246#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002247
Yaowu Xuf883b422016-08-30 14:01:10 -07002248static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile,
2249 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002250 const TOKENEXTRA *const tok_end,
2251#if CONFIG_SUPERTX
2252 int supertx_enabled,
2253#endif
2254 int mi_row, int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002255 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002256 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Jingning Hanc709e1f2016-12-06 14:48:09 -08002257 const int hbs = mi_size_wide[bsize] / 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002258 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
2259 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
Jingning Han52261842016-12-14 12:17:49 -08002260#if CONFIG_CB4X4
2261 const int unify_bsize = 1;
2262#else
2263 const int unify_bsize = 0;
2264#endif
2265
Yaowu Xuc27fc142016-08-22 16:08:15 -07002266#if CONFIG_SUPERTX
2267 const int mi_offset = mi_row * cm->mi_stride + mi_col;
2268 MB_MODE_INFO *mbmi;
2269 const int pack_token = !supertx_enabled;
2270 TX_SIZE supertx_size;
2271 int plane;
2272#endif
2273
2274 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2275
2276 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
2277#if CONFIG_SUPERTX
2278 mbmi = &cm->mi_grid_visible[mi_offset]->mbmi;
2279 xd->mi = cm->mi_grid_visible + mi_offset;
Jingning Han5b7706a2016-12-21 09:55:10 -08002280 set_mi_row_col(xd, tile, mi_row, mi_size_high[bsize], mi_col,
2281 mi_size_wide[bsize], cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002282 if (!supertx_enabled && !frame_is_intra_only(cm) &&
2283 partition != PARTITION_NONE && bsize <= MAX_SUPERTX_BLOCK_SIZE &&
2284 !xd->lossless[0]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002285 aom_prob prob;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002286 supertx_size = max_txsize_lookup[bsize];
2287 prob = cm->fc->supertx_prob[partition_supertx_context_lookup[partition]]
2288 [supertx_size];
2289 supertx_enabled = (xd->mi[0]->mbmi.tx_size == supertx_size);
Yaowu Xuf883b422016-08-30 14:01:10 -07002290 aom_write(w, supertx_enabled, prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002291 }
2292#endif // CONFIG_SUPERTX
Jingning Han52261842016-12-14 12:17:49 -08002293 if (subsize < BLOCK_8X8 && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002294 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row,
2295 mi_col);
2296 } else {
2297 switch (partition) {
2298 case PARTITION_NONE:
2299 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2300 mi_row, mi_col);
2301 break;
2302 case PARTITION_HORZ:
2303 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2304 mi_row, mi_col);
2305 if (mi_row + hbs < cm->mi_rows)
2306 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2307 mi_row + hbs, mi_col);
2308 break;
2309 case PARTITION_VERT:
2310 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2311 mi_row, mi_col);
2312 if (mi_col + hbs < cm->mi_cols)
2313 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2314 mi_row, mi_col + hbs);
2315 break;
2316 case PARTITION_SPLIT:
2317 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2318 mi_row, mi_col, subsize);
2319 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2320 mi_row, mi_col + hbs, subsize);
2321 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2322 mi_row + hbs, mi_col, subsize);
2323 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2324 mi_row + hbs, mi_col + hbs, subsize);
2325 break;
2326#if CONFIG_EXT_PARTITION_TYPES
2327 case PARTITION_HORZ_A:
2328 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2329 mi_row, mi_col);
2330 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2331 mi_row, mi_col + hbs);
2332 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2333 mi_row + hbs, mi_col);
2334 break;
2335 case PARTITION_HORZ_B:
2336 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2337 mi_row, mi_col);
2338 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2339 mi_row + hbs, mi_col);
2340 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2341 mi_row + hbs, mi_col + hbs);
2342 break;
2343 case PARTITION_VERT_A:
2344 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2345 mi_row, mi_col);
2346 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2347 mi_row + hbs, mi_col);
2348 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2349 mi_row, mi_col + hbs);
2350 break;
2351 case PARTITION_VERT_B:
2352 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2353 mi_row, mi_col);
2354 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2355 mi_row, mi_col + hbs);
2356 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2357 mi_row + hbs, mi_col + hbs);
2358 break;
2359#endif // CONFIG_EXT_PARTITION_TYPES
2360 default: assert(0);
2361 }
2362 }
2363#if CONFIG_SUPERTX
2364 if (partition != PARTITION_NONE && supertx_enabled && pack_token) {
2365 int skip;
Jingning Han5b7706a2016-12-21 09:55:10 -08002366 const int bsw = mi_size_wide[bsize];
2367 const int bsh = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002368 xd->mi = cm->mi_grid_visible + mi_offset;
2369 supertx_size = mbmi->tx_size;
Jingning Hane92bf1c2016-11-21 10:41:56 -08002370 set_mi_row_col(xd, tile, mi_row, bsh, mi_col, bsw, cm->mi_rows,
2371 cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002372
2373 assert(IMPLIES(!cm->seg.enabled, mbmi->segment_id_supertx == 0));
2374 assert(mbmi->segment_id_supertx < MAX_SEGMENTS);
2375
2376 skip = write_skip(cm, xd, mbmi->segment_id_supertx, xd->mi[0], w);
2377#if CONFIG_EXT_TX
2378 if (get_ext_tx_types(supertx_size, bsize, 1) > 1 && !skip) {
2379 int eset = get_ext_tx_set(supertx_size, bsize, 1);
2380 if (eset > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002381 av1_write_token(w, av1_ext_tx_inter_tree[eset],
2382 cm->fc->inter_ext_tx_prob[eset][supertx_size],
2383 &ext_tx_inter_encodings[eset][mbmi->tx_type]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002384 }
2385 }
2386#else
2387 if (supertx_size < TX_32X32 && !skip) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002388 av1_write_token(w, av1_ext_tx_tree,
2389 cm->fc->inter_ext_tx_prob[supertx_size],
2390 &ext_tx_encodings[mbmi->tx_type]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002391 }
2392#endif // CONFIG_EXT_TX
2393
2394 if (!skip) {
2395 assert(*tok < tok_end);
2396 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
Jingning Han5b7706a2016-12-21 09:55:10 -08002397 const struct macroblockd_plane *const pd = &xd->plane[plane];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002398 const int mbmi_txb_size = txsize_to_bsize[mbmi->tx_size];
Jingning Han5b7706a2016-12-21 09:55:10 -08002399 const BLOCK_SIZE plane_bsize = get_plane_block_size(mbmi_txb_size, pd);
2400
2401 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
2402 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
2403
Yaowu Xuc27fc142016-08-22 16:08:15 -07002404 int row, col;
2405 TX_SIZE tx =
2406 plane ? get_uv_tx_size(mbmi, &xd->plane[plane]) : mbmi->tx_size;
2407 BLOCK_SIZE txb_size = txsize_to_bsize[tx];
Jingning Han5b7706a2016-12-21 09:55:10 -08002408
2409 const int stepr = tx_size_high_unit[txb_size];
2410 const int stepc = tx_size_wide_unit[txb_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002411
Angie Chiangd4022822016-11-02 18:30:25 -07002412 TOKEN_STATS token_stats;
2413 token_stats.cost = 0;
Jingning Han5b7706a2016-12-21 09:55:10 -08002414 for (row = 0; row < max_blocks_high; row += stepr)
2415 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangd4022822016-11-02 18:30:25 -07002416 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002417 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2418 (*tok)++;
2419 }
2420 }
Jingning Hane92bf1c2016-11-21 10:41:56 -08002421#if CONFIG_VAR_TX
2422 xd->above_txfm_context = cm->above_txfm_context + mi_col;
2423 xd->left_txfm_context =
2424 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
2425 set_txfm_ctxs(xd->mi[0]->mbmi.tx_size, bsw, bsh, skip, xd);
2426#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002427 }
2428#endif // CONFIG_SUPERTX
2429
2430// update partition context
2431#if CONFIG_EXT_PARTITION_TYPES
2432 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
2433#else
2434 if (bsize >= BLOCK_8X8 &&
2435 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
2436 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01002437#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002438
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02002439#if CONFIG_DERING
David Barker9739f362016-11-10 09:29:32 +00002440#if CONFIG_EXT_PARTITION
2441 if (cm->sb_size == BLOCK_128X128 && bsize == BLOCK_128X128 &&
2442 cm->dering_level != 0 && !sb_all_skip(cm, mi_row, mi_col)) {
2443 aom_write_literal(
2444 w,
2445 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain,
2446 DERING_REFINEMENT_BITS);
2447 } else if (cm->sb_size == BLOCK_64X64 && bsize == BLOCK_64X64 &&
2448#else
2449 if (bsize == BLOCK_64X64 &&
2450#endif // CONFIG_EXT_PARTITION
2451 cm->dering_level != 0 && !sb_all_skip(cm, mi_row, mi_col)) {
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02002452 aom_write_literal(
2453 w,
2454 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain,
2455 DERING_REFINEMENT_BITS);
2456 }
2457#endif
2458
Yaowu Xud71be782016-10-14 08:47:03 -07002459#if CONFIG_CLPF
David Barker9739f362016-11-10 09:29:32 +00002460#if CONFIG_EXT_PARTITION
2461 if (cm->sb_size == BLOCK_128X128 && bsize == BLOCK_128X128 &&
2462 cm->clpf_blocks && cm->clpf_strength_y && cm->clpf_size != CLPF_NOSIZE) {
2463 const int tl = mi_row * MI_SIZE / MIN_FB_SIZE * cm->clpf_stride +
2464 mi_col * MI_SIZE / MIN_FB_SIZE;
2465 if (cm->clpf_size == CLPF_128X128 && cm->clpf_blocks[tl] != CLPF_NOFLAG) {
2466 aom_write_literal(w, cm->clpf_blocks[tl], 1);
2467 } else if (cm->clpf_size == CLPF_64X64) {
2468 const int tr = tl + 2;
2469 const int bl = tl + 2 * cm->clpf_stride;
2470 const int br = tr + 2 * cm->clpf_stride;
2471
2472 // Up to four bits per SB.
2473 if (cm->clpf_blocks[tl] != CLPF_NOFLAG)
2474 aom_write_literal(w, cm->clpf_blocks[tl], 1);
2475
2476 if (mi_col + MI_SIZE < cm->mi_cols && cm->clpf_blocks[tr] != CLPF_NOFLAG)
2477 aom_write_literal(w, cm->clpf_blocks[tr], 1);
2478
2479 if (mi_row + MI_SIZE < cm->mi_rows && cm->clpf_blocks[bl] != CLPF_NOFLAG)
2480 aom_write_literal(w, cm->clpf_blocks[bl], 1);
2481
2482 if (mi_row + MI_SIZE < cm->mi_rows && mi_col + MI_SIZE < cm->mi_cols &&
2483 cm->clpf_blocks[br] != CLPF_NOFLAG)
2484 aom_write_literal(w, cm->clpf_blocks[br], 1);
2485 } else if (cm->clpf_size == CLPF_32X32) {
2486 int i, j;
2487 const int size = 32 / MI_SIZE;
2488 // Up to sixteen bits per SB.
2489 for (i = 0; i < 4; ++i)
2490 for (j = 0; j < 4; ++j) {
2491 const int index = tl + i * cm->clpf_stride + j;
2492 if (mi_row + i * size < cm->mi_rows &&
2493 mi_col + j * size < cm->mi_cols &&
2494 cm->clpf_blocks[index] != CLPF_NOFLAG)
2495 aom_write_literal(w, cm->clpf_blocks[index], 1);
2496 }
2497 }
2498 } else if (cm->sb_size == BLOCK_64X64 && bsize == BLOCK_64X64 &&
2499#else
2500 if (bsize == BLOCK_64X64 &&
2501#endif // CONFIG_EXT_PARTITION
2502 cm->clpf_blocks && cm->clpf_strength_y &&
2503 cm->clpf_size != CLPF_NOSIZE) {
Yaowu Xud71be782016-10-14 08:47:03 -07002504 const int tl = mi_row * MI_SIZE / MIN_FB_SIZE * cm->clpf_stride +
2505 mi_col * MI_SIZE / MIN_FB_SIZE;
2506 const int tr = tl + 1;
2507 const int bl = tl + cm->clpf_stride;
2508 const int br = tr + cm->clpf_stride;
2509
2510 // Up to four bits per SB.
2511 // When clpf_size indicates a size larger than the SB size
2512 // (CLPF_128X128), one bit for every fourth SB will be transmitted
2513 // regardless of skip blocks.
2514 if (cm->clpf_blocks[tl] != CLPF_NOFLAG)
2515 aom_write_literal(w, cm->clpf_blocks[tl], 1);
2516
2517 if (mi_col + MI_SIZE / 2 < cm->mi_cols &&
2518 cm->clpf_blocks[tr] != CLPF_NOFLAG)
2519 aom_write_literal(w, cm->clpf_blocks[tr], 1);
2520
2521 if (mi_row + MI_SIZE / 2 < cm->mi_rows &&
2522 cm->clpf_blocks[bl] != CLPF_NOFLAG)
2523 aom_write_literal(w, cm->clpf_blocks[bl], 1);
2524
2525 if (mi_row + MI_SIZE / 2 < cm->mi_rows &&
2526 mi_col + MI_SIZE / 2 < cm->mi_cols &&
2527 cm->clpf_blocks[br] != CLPF_NOFLAG)
2528 aom_write_literal(w, cm->clpf_blocks[br], 1);
2529 }
David Barker9739f362016-11-10 09:29:32 +00002530#endif // CONFIG_CLPF
Yaowu Xuc27fc142016-08-22 16:08:15 -07002531}
2532
Yaowu Xuf883b422016-08-30 14:01:10 -07002533static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile,
2534 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002535 const TOKENEXTRA *const tok_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002536 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002537 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
2538 const int mi_row_start = tile->mi_row_start;
2539 const int mi_row_end = tile->mi_row_end;
2540 const int mi_col_start = tile->mi_col_start;
2541 const int mi_col_end = tile->mi_col_end;
2542 int mi_row, mi_col;
Yaowu Xuf883b422016-08-30 14:01:10 -07002543 av1_zero_above_context(cm, mi_col_start, mi_col_end);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002544#if CONFIG_PVQ
2545 assert(cpi->td.mb.pvq_q->curr_pos == 0);
2546#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02002547#if CONFIG_DELTA_Q
2548 if (cpi->common.delta_q_present_flag) {
2549 xd->prev_qindex = cpi->common.base_qindex;
2550 }
2551#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002552
2553 for (mi_row = mi_row_start; mi_row < mi_row_end; mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002554 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002555
2556 for (mi_col = mi_col_start; mi_col < mi_col_end; mi_col += cm->mib_size) {
2557 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, 0, mi_row, mi_col,
2558 cm->sb_size);
2559 }
2560 }
Yushin Cho77bba8d2016-11-04 16:36:56 -07002561#if CONFIG_PVQ
2562 // Check that the number of PVQ blocks encoded and written to the bitstream
2563 // are the same
2564 assert(cpi->td.mb.pvq_q->curr_pos == cpi->td.mb.pvq_q->last_pos);
2565 // Reset curr_pos in case we repack the bitstream
2566 cpi->td.mb.pvq_q->curr_pos = 0;
2567#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002568}
2569
Yushin Cho77bba8d2016-11-04 16:36:56 -07002570#if !CONFIG_PVQ
Yaowu Xuf883b422016-08-30 14:01:10 -07002571static void build_tree_distribution(AV1_COMP *cpi, TX_SIZE tx_size,
2572 av1_coeff_stats *coef_branch_ct,
2573 av1_coeff_probs_model *coef_probs) {
2574 av1_coeff_count *coef_counts = cpi->td.rd_counts.coef_counts[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002575 unsigned int(*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
2576 cpi->common.counts.eob_branch[tx_size];
2577 int i, j, k, l, m;
2578
2579 for (i = 0; i < PLANE_TYPES; ++i) {
2580 for (j = 0; j < REF_TYPES; ++j) {
2581 for (k = 0; k < COEF_BANDS; ++k) {
2582 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002583 av1_tree_probs_from_distribution(av1_coef_tree,
2584 coef_branch_ct[i][j][k][l],
2585 coef_counts[i][j][k][l]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002586 coef_branch_ct[i][j][k][l][0][1] =
2587 eob_branch_ct[i][j][k][l] - coef_branch_ct[i][j][k][l][0][0];
2588 for (m = 0; m < UNCONSTRAINED_NODES; ++m)
2589 coef_probs[i][j][k][l][m] =
2590 get_binary_prob(coef_branch_ct[i][j][k][l][m][0],
2591 coef_branch_ct[i][j][k][l][m][1]);
2592 }
2593 }
2594 }
2595 }
2596}
2597
Yaowu Xuf883b422016-08-30 14:01:10 -07002598static void update_coef_probs_common(aom_writer *const bc, AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002599 TX_SIZE tx_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07002600 av1_coeff_stats *frame_branch_ct,
2601 av1_coeff_probs_model *new_coef_probs) {
2602 av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
2603 const aom_prob upd = DIFF_UPDATE_PROB;
Thomas9ac55082016-09-23 18:04:17 +01002604#if CONFIG_EC_ADAPT
Thomas Davies09ebbfb2016-10-20 18:28:47 +01002605 const int entropy_nodes_update = UNCONSTRAINED_NODES - 1;
Thomas9ac55082016-09-23 18:04:17 +01002606#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002607 const int entropy_nodes_update = UNCONSTRAINED_NODES;
Thomas9ac55082016-09-23 18:04:17 +01002608#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002609 int i, j, k, l, t;
2610 int stepsize = cpi->sf.coeff_prob_appx_step;
Thomas Davies80188d12016-10-26 16:08:35 -07002611#if CONFIG_TILE_GROUPS
2612 const int probwt = cpi->common.num_tg;
2613#else
2614 const int probwt = 1;
2615#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002616
2617 switch (cpi->sf.use_fast_coef_updates) {
2618 case TWO_LOOP: {
2619 /* dry run to see if there is any update at all needed */
2620 int savings = 0;
2621 int update[2] = { 0, 0 };
2622 for (i = 0; i < PLANE_TYPES; ++i) {
2623 for (j = 0; j < REF_TYPES; ++j) {
2624 for (k = 0; k < COEF_BANDS; ++k) {
2625 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2626 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002627 aom_prob newp = new_coef_probs[i][j][k][l][t];
2628 const aom_prob oldp = old_coef_probs[i][j][k][l][t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002629 int s;
2630 int u = 0;
2631 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002632 s = av1_prob_diff_update_savings_search_model(
Alex Conversea9ce4b72016-05-25 10:28:03 -07002633 frame_branch_ct[i][j][k][l][0], oldp, &newp, upd,
2634 stepsize, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002635 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002636 s = av1_prob_diff_update_savings_search(
Thomas Davies80188d12016-10-26 16:08:35 -07002637 frame_branch_ct[i][j][k][l][t], oldp, &newp, upd, probwt);
2638
Yaowu Xuc27fc142016-08-22 16:08:15 -07002639 if (s > 0 && newp != oldp) u = 1;
2640 if (u)
Yaowu Xuf883b422016-08-30 14:01:10 -07002641 savings += s - (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002642 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002643 savings -= (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002644 update[u]++;
2645 }
2646 }
2647 }
2648 }
2649 }
2650
2651 /* Is coef updated at all */
2652 if (update[1] == 0 || savings < 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002653 aom_write_bit(bc, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002654 return;
2655 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002656 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002657 for (i = 0; i < PLANE_TYPES; ++i) {
2658 for (j = 0; j < REF_TYPES; ++j) {
2659 for (k = 0; k < COEF_BANDS; ++k) {
2660 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2661 // calc probs and branch cts for this frame only
2662 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002663 aom_prob newp = new_coef_probs[i][j][k][l][t];
2664 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002665 int s;
2666 int u = 0;
2667 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002668 s = av1_prob_diff_update_savings_search_model(
Alex Conversea9ce4b72016-05-25 10:28:03 -07002669 frame_branch_ct[i][j][k][l][0], *oldp, &newp, upd,
2670 stepsize, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002671 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002672 s = av1_prob_diff_update_savings_search(
Thomas Davies80188d12016-10-26 16:08:35 -07002673 frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd,
2674 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002675 if (s > 0 && newp != *oldp) u = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002676 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002677 if (u) {
2678 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002679 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002680 *oldp = newp;
2681 }
2682 }
2683 }
2684 }
2685 }
2686 }
2687 return;
2688 }
2689
2690 case ONE_LOOP_REDUCED: {
2691 int updates = 0;
2692 int noupdates_before_first = 0;
2693 for (i = 0; i < PLANE_TYPES; ++i) {
2694 for (j = 0; j < REF_TYPES; ++j) {
2695 for (k = 0; k < COEF_BANDS; ++k) {
2696 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2697 // calc probs and branch cts for this frame only
2698 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002699 aom_prob newp = new_coef_probs[i][j][k][l][t];
2700 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002701 int s;
2702 int u = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002703 if (t == PIVOT_NODE) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002704 s = av1_prob_diff_update_savings_search_model(
Alex Conversea9ce4b72016-05-25 10:28:03 -07002705 frame_branch_ct[i][j][k][l][0], *oldp, &newp, upd,
2706 stepsize, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002707 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002708 s = av1_prob_diff_update_savings_search(
Thomas Davies80188d12016-10-26 16:08:35 -07002709 frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd,
2710 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002711 }
2712
2713 if (s > 0 && newp != *oldp) u = 1;
2714 updates += u;
2715 if (u == 0 && updates == 0) {
2716 noupdates_before_first++;
2717 continue;
2718 }
2719 if (u == 1 && updates == 1) {
2720 int v;
2721 // first update
Yaowu Xuf883b422016-08-30 14:01:10 -07002722 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002723 for (v = 0; v < noupdates_before_first; ++v)
Yaowu Xuf883b422016-08-30 14:01:10 -07002724 aom_write(bc, 0, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002725 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002726 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002727 if (u) {
2728 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002729 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002730 *oldp = newp;
2731 }
2732 }
2733 }
2734 }
2735 }
2736 }
2737 if (updates == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002738 aom_write_bit(bc, 0); // no updates
Yaowu Xuc27fc142016-08-22 16:08:15 -07002739 }
2740 return;
2741 }
2742 default: assert(0);
2743 }
2744}
2745
2746#if CONFIG_ENTROPY
2747// Calculate the token counts between subsequent subframe updates.
clang-format67948d32016-09-07 22:40:40 -07002748static void get_coef_counts_diff(AV1_COMP *cpi, int index,
2749 av1_coeff_count coef_counts[TX_SIZES]
2750 [PLANE_TYPES],
2751 unsigned int eob_counts[TX_SIZES][PLANE_TYPES]
2752 [REF_TYPES][COEF_BANDS]
2753 [COEFF_CONTEXTS]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002754 int i, j, k, l, m, tx_size, val;
2755 const int max_idx = cpi->common.coef_probs_update_idx;
2756 const TX_MODE tx_mode = cpi->common.tx_mode;
Urvang Joshicb586f32016-09-20 11:36:33 -07002757 const int max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002758 const SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats;
2759
2760 assert(max_idx < COEF_PROBS_BUFS);
2761
2762 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
2763 for (i = 0; i < PLANE_TYPES; ++i)
2764 for (j = 0; j < REF_TYPES; ++j)
2765 for (k = 0; k < COEF_BANDS; ++k)
2766 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2767 if (index == max_idx) {
2768 val =
2769 cpi->common.counts.eob_branch[tx_size][i][j][k][l] -
2770 subframe_stats->eob_counts_buf[max_idx][tx_size][i][j][k][l];
2771 } else {
clang-format67948d32016-09-07 22:40:40 -07002772 val = subframe_stats->eob_counts_buf[index + 1][tx_size][i][j][k]
2773 [l] -
Yaowu Xuc27fc142016-08-22 16:08:15 -07002774 subframe_stats->eob_counts_buf[index][tx_size][i][j][k][l];
2775 }
2776 assert(val >= 0);
2777 eob_counts[tx_size][i][j][k][l] = val;
2778
2779 for (m = 0; m < ENTROPY_TOKENS; ++m) {
2780 if (index == max_idx) {
2781 val = cpi->td.rd_counts.coef_counts[tx_size][i][j][k][l][m] -
clang-format67948d32016-09-07 22:40:40 -07002782 subframe_stats->coef_counts_buf[max_idx][tx_size][i][j][k]
2783 [l][m];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002784 } else {
clang-format67948d32016-09-07 22:40:40 -07002785 val = subframe_stats->coef_counts_buf[index + 1][tx_size][i][j]
2786 [k][l][m] -
2787 subframe_stats->coef_counts_buf[index][tx_size][i][j][k]
2788 [l][m];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002789 }
2790 assert(val >= 0);
2791 coef_counts[tx_size][i][j][k][l][m] = val;
2792 }
2793 }
2794}
2795
2796static void update_coef_probs_subframe(
Yaowu Xuf883b422016-08-30 14:01:10 -07002797 aom_writer *const bc, AV1_COMP *cpi, TX_SIZE tx_size,
2798 av1_coeff_stats branch_ct[COEF_PROBS_BUFS][TX_SIZES][PLANE_TYPES],
2799 av1_coeff_probs_model *new_coef_probs) {
2800 av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
2801 const aom_prob upd = DIFF_UPDATE_PROB;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002802 const int entropy_nodes_update = UNCONSTRAINED_NODES;
2803 int i, j, k, l, t;
2804 int stepsize = cpi->sf.coeff_prob_appx_step;
2805 const int max_idx = cpi->common.coef_probs_update_idx;
2806 int idx;
2807 unsigned int this_branch_ct[ENTROPY_NODES][COEF_PROBS_BUFS][2];
2808
2809 switch (cpi->sf.use_fast_coef_updates) {
2810 case TWO_LOOP: {
2811 /* dry run to see if there is any update at all needed */
2812 int savings = 0;
2813 int update[2] = { 0, 0 };
2814 for (i = 0; i < PLANE_TYPES; ++i) {
2815 for (j = 0; j < REF_TYPES; ++j) {
2816 for (k = 0; k < COEF_BANDS; ++k) {
2817 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2818 for (t = 0; t < ENTROPY_NODES; ++t) {
2819 for (idx = 0; idx <= max_idx; ++idx) {
2820 memcpy(this_branch_ct[t][idx],
2821 branch_ct[idx][tx_size][i][j][k][l][t],
2822 2 * sizeof(this_branch_ct[t][idx][0]));
2823 }
2824 }
2825 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002826 aom_prob newp = new_coef_probs[i][j][k][l][t];
2827 const aom_prob oldp = old_coef_probs[i][j][k][l][t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002828 int s, u = 0;
2829
2830 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002831 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002832 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2833 stepsize, max_idx);
2834 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002835 s = av1_prob_update_search_subframe(this_branch_ct[t], oldp,
2836 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002837 if (s > 0 && newp != oldp) u = 1;
2838 if (u)
Yaowu Xuf883b422016-08-30 14:01:10 -07002839 savings += s - (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002840 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002841 savings -= (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002842 update[u]++;
2843 }
2844 }
2845 }
2846 }
2847 }
2848
2849 /* Is coef updated at all */
2850 if (update[1] == 0 || savings < 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002851 aom_write_bit(bc, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002852 return;
2853 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002854 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002855 for (i = 0; i < PLANE_TYPES; ++i) {
2856 for (j = 0; j < REF_TYPES; ++j) {
2857 for (k = 0; k < COEF_BANDS; ++k) {
2858 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2859 for (t = 0; t < ENTROPY_NODES; ++t) {
2860 for (idx = 0; idx <= max_idx; ++idx) {
2861 memcpy(this_branch_ct[t][idx],
2862 branch_ct[idx][tx_size][i][j][k][l][t],
2863 2 * sizeof(this_branch_ct[t][idx][0]));
2864 }
2865 }
2866 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002867 aom_prob newp = new_coef_probs[i][j][k][l][t];
2868 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002869 int s;
2870 int u = 0;
2871
2872 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002873 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002874 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2875 stepsize, max_idx);
2876 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002877 s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp,
2878 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002879 if (s > 0 && newp != *oldp) u = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002880 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002881 if (u) {
2882 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002883 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002884 *oldp = newp;
2885 }
2886 }
2887 }
2888 }
2889 }
2890 }
2891 return;
2892 }
2893
2894 case ONE_LOOP_REDUCED: {
2895 int updates = 0;
2896 int noupdates_before_first = 0;
2897 for (i = 0; i < PLANE_TYPES; ++i) {
2898 for (j = 0; j < REF_TYPES; ++j) {
2899 for (k = 0; k < COEF_BANDS; ++k) {
2900 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2901 for (t = 0; t < ENTROPY_NODES; ++t) {
2902 for (idx = 0; idx <= max_idx; ++idx) {
2903 memcpy(this_branch_ct[t][idx],
2904 branch_ct[idx][tx_size][i][j][k][l][t],
2905 2 * sizeof(this_branch_ct[t][idx][0]));
2906 }
2907 }
2908 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002909 aom_prob newp = new_coef_probs[i][j][k][l][t];
2910 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002911 int s;
2912 int u = 0;
2913
2914 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002915 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002916 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2917 stepsize, max_idx);
2918 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002919 s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp,
2920 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002921 if (s > 0 && newp != *oldp) u = 1;
2922 updates += u;
2923 if (u == 0 && updates == 0) {
2924 noupdates_before_first++;
2925 continue;
2926 }
2927 if (u == 1 && updates == 1) {
2928 int v;
2929 // first update
Yaowu Xuf883b422016-08-30 14:01:10 -07002930 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002931 for (v = 0; v < noupdates_before_first; ++v)
Yaowu Xuf883b422016-08-30 14:01:10 -07002932 aom_write(bc, 0, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002933 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002934 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002935 if (u) {
2936 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002937 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002938 *oldp = newp;
2939 }
2940 }
2941 }
2942 }
2943 }
2944 }
2945 if (updates == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002946 aom_write_bit(bc, 0); // no updates
Yaowu Xuc27fc142016-08-22 16:08:15 -07002947 }
2948 return;
2949 }
2950 default: assert(0);
2951 }
2952}
2953#endif // CONFIG_ENTROPY
2954
Yaowu Xuf883b422016-08-30 14:01:10 -07002955static void update_coef_probs(AV1_COMP *cpi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002956 const TX_MODE tx_mode = cpi->common.tx_mode;
2957 const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
2958 TX_SIZE tx_size;
Alex Converse1e4e29f2016-11-08 14:12:14 -08002959#if CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002960 int update = 0;
Alex Converse1e4e29f2016-11-08 14:12:14 -08002961#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002962#if CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07002963 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002964 SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002965 int i;
Yaowu Xuf883b422016-08-30 14:01:10 -07002966 av1_coeff_probs_model dummy_frame_coef_probs[PLANE_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002967
2968 if (cm->do_subframe_update &&
2969 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002970 av1_copy(cpi->common.fc->coef_probs,
2971 subframe_stats->enc_starting_coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002972 for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) {
2973 get_coef_counts_diff(cpi, i, cpi->wholeframe_stats.coef_counts_buf[i],
2974 cpi->wholeframe_stats.eob_counts_buf[i]);
2975 }
2976 }
2977#endif // CONFIG_ENTROPY
2978
Jingning Han83630632016-12-16 11:27:25 -08002979 for (tx_size = 0; tx_size <= max_tx_size; ++tx_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002980 av1_coeff_stats frame_branch_ct[PLANE_TYPES];
2981 av1_coeff_probs_model frame_coef_probs[PLANE_TYPES];
Jingning Hanc7ea7612017-01-11 15:01:30 -08002982 if (cpi->td.counts->tx_size_totals[tx_size] <= 20 || CONFIG_RD_DEBUG ||
Yaowu Xuc27fc142016-08-22 16:08:15 -07002983 (tx_size >= TX_16X16 && cpi->sf.tx_size_search_method == USE_TX_8X8)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002984 aom_write_bit(w, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002985 } else {
2986#if CONFIG_ENTROPY
2987 if (cm->do_subframe_update &&
2988 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Urvang Joshi43e62812016-10-20 14:51:01 -07002989 unsigned int this_eob_counts_copy[PLANE_TYPES][REF_TYPES][COEF_BANDS]
2990 [COEFF_CONTEXTS];
Yaowu Xuf883b422016-08-30 14:01:10 -07002991 av1_coeff_count coef_counts_copy[PLANE_TYPES];
Urvang Joshi43e62812016-10-20 14:51:01 -07002992 av1_copy(this_eob_counts_copy, cpi->common.counts.eob_branch[tx_size]);
Yaowu Xuf883b422016-08-30 14:01:10 -07002993 av1_copy(coef_counts_copy, cpi->td.rd_counts.coef_counts[tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002994 build_tree_distribution(cpi, tx_size, frame_branch_ct,
2995 frame_coef_probs);
2996 for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002997 av1_copy(cpi->common.counts.eob_branch[tx_size],
2998 cpi->wholeframe_stats.eob_counts_buf[i][tx_size]);
2999 av1_copy(cpi->td.rd_counts.coef_counts[tx_size],
3000 cpi->wholeframe_stats.coef_counts_buf[i][tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003001 build_tree_distribution(cpi, tx_size, cpi->branch_ct_buf[i][tx_size],
3002 dummy_frame_coef_probs);
3003 }
Urvang Joshi43e62812016-10-20 14:51:01 -07003004 av1_copy(cpi->common.counts.eob_branch[tx_size], this_eob_counts_copy);
Yaowu Xuf883b422016-08-30 14:01:10 -07003005 av1_copy(cpi->td.rd_counts.coef_counts[tx_size], coef_counts_copy);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003006
3007 update_coef_probs_subframe(w, cpi, tx_size, cpi->branch_ct_buf,
3008 frame_coef_probs);
Alex Converse1e4e29f2016-11-08 14:12:14 -08003009#if CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003010 update = 1;
Alex Converse1e4e29f2016-11-08 14:12:14 -08003011#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003012 } else {
3013#endif // CONFIG_ENTROPY
3014 build_tree_distribution(cpi, tx_size, frame_branch_ct,
3015 frame_coef_probs);
3016 update_coef_probs_common(w, cpi, tx_size, frame_branch_ct,
3017 frame_coef_probs);
Alex Converse1e4e29f2016-11-08 14:12:14 -08003018#if CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003019 update = 1;
Alex Converse1e4e29f2016-11-08 14:12:14 -08003020#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003021#if CONFIG_ENTROPY
3022 }
3023#endif // CONFIG_ENTROPY
3024 }
3025 }
3026
3027#if CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07003028 av1_copy(cm->starting_coef_probs, cm->fc->coef_probs);
3029 av1_copy(subframe_stats->coef_probs_buf[0], cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003030 if (cm->do_subframe_update &&
3031 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Urvang Joshi43e62812016-10-20 14:51:01 -07003032 unsigned int eob_counts_copy[TX_SIZES][PLANE_TYPES][REF_TYPES][COEF_BANDS]
3033 [COEFF_CONTEXTS];
Yaowu Xuf883b422016-08-30 14:01:10 -07003034 av1_copy(eob_counts_copy, cm->counts.eob_branch);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003035 for (i = 1; i <= cpi->common.coef_probs_update_idx; ++i) {
3036 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
Yaowu Xuf883b422016-08-30 14:01:10 -07003037 av1_full_to_model_counts(cm->counts.coef[tx_size],
3038 subframe_stats->coef_counts_buf[i][tx_size]);
3039 av1_copy(cm->counts.eob_branch, subframe_stats->eob_counts_buf[i]);
3040 av1_partial_adapt_probs(cm, 0, 0);
3041 av1_copy(subframe_stats->coef_probs_buf[i], cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003042 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003043 av1_copy(cm->fc->coef_probs, subframe_stats->coef_probs_buf[0]);
3044 av1_copy(cm->counts.eob_branch, eob_counts_copy);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003045 }
3046#endif // CONFIG_ENTROPY
Alex Converse1e4e29f2016-11-08 14:12:14 -08003047#if CONFIG_EC_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -07003048 if (update) av1_coef_pareto_cdfs(cpi->common.fc);
Alex Converse1e4e29f2016-11-08 14:12:14 -08003049#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07003050}
Yushin Cho77bba8d2016-11-04 16:36:56 -07003051#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003052
3053#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003054static void encode_restoration_mode(AV1_COMMON *cm,
3055 struct aom_write_bit_buffer *wb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003056 int p;
3057 RestorationInfo *rsi = &cm->rst_info[0];
3058 switch (rsi->frame_restoration_type) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003059 case RESTORE_NONE:
3060 aom_wb_write_bit(wb, 0);
3061 aom_wb_write_bit(wb, 0);
3062 break;
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003063 case RESTORE_WIENER:
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003064 aom_wb_write_bit(wb, 1);
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003065 aom_wb_write_bit(wb, 0);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003066 break;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003067 case RESTORE_SGRPROJ:
3068 aom_wb_write_bit(wb, 1);
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003069 aom_wb_write_bit(wb, 1);
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003070 aom_wb_write_bit(wb, 0);
3071 break;
3072 case RESTORE_DOMAINTXFMRF:
3073 aom_wb_write_bit(wb, 1);
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003074 aom_wb_write_bit(wb, 1);
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003075 aom_wb_write_bit(wb, 1);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003076 break;
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003077 case RESTORE_SWITCHABLE:
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003078 aom_wb_write_bit(wb, 0);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003079 aom_wb_write_bit(wb, 1);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003080 break;
3081 default: assert(0);
3082 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003083 for (p = 1; p < MAX_MB_PLANE; ++p) {
3084 rsi = &cm->rst_info[p];
3085 switch (rsi->frame_restoration_type) {
3086 case RESTORE_NONE: aom_wb_write_bit(wb, 0); break;
3087 case RESTORE_WIENER: aom_wb_write_bit(wb, 1); break;
3088 default: assert(0);
3089 }
3090 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003091}
3092
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003093static void write_wiener_filter(WienerInfo *wiener_info, aom_writer *wb) {
3094 aom_write_literal(wb, wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV,
3095 WIENER_FILT_TAP0_BITS);
3096 aom_write_literal(wb, wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV,
3097 WIENER_FILT_TAP1_BITS);
3098 aom_write_literal(wb, wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV,
3099 WIENER_FILT_TAP2_BITS);
3100 aom_write_literal(wb, wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV,
3101 WIENER_FILT_TAP0_BITS);
3102 aom_write_literal(wb, wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV,
3103 WIENER_FILT_TAP1_BITS);
3104 aom_write_literal(wb, wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV,
3105 WIENER_FILT_TAP2_BITS);
3106}
3107
3108static void write_sgrproj_filter(SgrprojInfo *sgrproj_info, aom_writer *wb) {
3109 aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS);
3110 aom_write_literal(wb, sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
3111 SGRPROJ_PRJ_BITS);
3112 aom_write_literal(wb, sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
3113 SGRPROJ_PRJ_BITS);
3114}
3115
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003116static void write_domaintxfmrf_filter(DomaintxfmrfInfo *domaintxfmrf_info,
3117 aom_writer *wb) {
3118 aom_write_literal(wb, domaintxfmrf_info->sigma_r, DOMAINTXFMRF_PARAMS_BITS);
3119}
3120
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003121static void encode_restoration(AV1_COMMON *cm, aom_writer *wb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003122 int i, p;
Debargha Mukherjeed7489142017-01-05 13:58:16 -08003123 const int ntiles =
3124 av1_get_rest_ntiles(cm->width, cm->height, NULL, NULL, NULL, NULL);
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003125 const int ntiles_uv = av1_get_rest_ntiles(cm->width >> cm->subsampling_x,
3126 cm->height >> cm->subsampling_y,
3127 NULL, NULL, NULL, NULL);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003128 RestorationInfo *rsi = &cm->rst_info[0];
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07003129 if (rsi->frame_restoration_type != RESTORE_NONE) {
3130 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003131 // RESTORE_SWITCHABLE
Debargha Mukherjeed7489142017-01-05 13:58:16 -08003132 for (i = 0; i < ntiles; ++i) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003133 av1_write_token(
clang-formatbda8d612016-09-19 15:55:46 -07003134 wb, av1_switchable_restore_tree, cm->fc->switchable_restore_prob,
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07003135 &switchable_restore_encodings[rsi->restoration_type[i]]);
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003136 if (rsi->restoration_type[i] == RESTORE_WIENER) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003137 write_wiener_filter(&rsi->wiener_info[i], wb);
3138 } else if (rsi->restoration_type[i] == RESTORE_SGRPROJ) {
3139 write_sgrproj_filter(&rsi->sgrproj_info[i], wb);
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003140 } else if (rsi->restoration_type[i] == RESTORE_DOMAINTXFMRF) {
3141 write_domaintxfmrf_filter(&rsi->domaintxfmrf_info[i], wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003142 }
3143 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07003144 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjeed7489142017-01-05 13:58:16 -08003145 for (i = 0; i < ntiles; ++i) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003146 aom_write(wb, rsi->restoration_type[i] != RESTORE_NONE,
3147 RESTORE_NONE_WIENER_PROB);
3148 if (rsi->restoration_type[i] != RESTORE_NONE) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003149 write_wiener_filter(&rsi->wiener_info[i], wb);
3150 }
3151 }
3152 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
Debargha Mukherjeed7489142017-01-05 13:58:16 -08003153 for (i = 0; i < ntiles; ++i) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003154 aom_write(wb, rsi->restoration_type[i] != RESTORE_NONE,
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003155 RESTORE_NONE_SGRPROJ_PROB);
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003156 if (rsi->restoration_type[i] != RESTORE_NONE) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003157 write_sgrproj_filter(&rsi->sgrproj_info[i], wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003158 }
3159 }
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003160 } else if (rsi->frame_restoration_type == RESTORE_DOMAINTXFMRF) {
Debargha Mukherjeed7489142017-01-05 13:58:16 -08003161 for (i = 0; i < ntiles; ++i) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003162 aom_write(wb, rsi->restoration_type[i] != RESTORE_NONE,
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003163 RESTORE_NONE_DOMAINTXFMRF_PROB);
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003164 if (rsi->restoration_type[i] != RESTORE_NONE) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -08003165 write_domaintxfmrf_filter(&rsi->domaintxfmrf_info[i], wb);
3166 }
3167 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003168 }
3169 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003170 for (p = 1; p < MAX_MB_PLANE; ++p) {
3171 rsi = &cm->rst_info[p];
3172 if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003173 for (i = 0; i < ntiles_uv; ++i) {
3174 if (ntiles_uv > 1)
3175 aom_write(wb, rsi->restoration_type[i] != RESTORE_NONE,
3176 RESTORE_NONE_WIENER_PROB);
3177 if (rsi->restoration_type[i] != RESTORE_NONE) {
3178 write_wiener_filter(&rsi->wiener_info[i], wb);
3179 }
3180 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003181 } else if (rsi->frame_restoration_type != RESTORE_NONE) {
3182 assert(0);
3183 }
3184 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003185}
3186#endif // CONFIG_LOOP_RESTORATION
3187
Yaowu Xuf883b422016-08-30 14:01:10 -07003188static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003189 int i;
3190 struct loopfilter *lf = &cm->lf;
3191
3192 // Encode the loop filter level and type
Yaowu Xuf883b422016-08-30 14:01:10 -07003193 aom_wb_write_literal(wb, lf->filter_level, 6);
3194 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003195
3196 // Write out loop filter deltas applied at the MB level based on mode or
3197 // ref frame (if they are enabled).
Yaowu Xuf883b422016-08-30 14:01:10 -07003198 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003199
3200 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003201 aom_wb_write_bit(wb, lf->mode_ref_delta_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003202 if (lf->mode_ref_delta_update) {
3203 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
3204 const int delta = lf->ref_deltas[i];
3205 const int changed = delta != lf->last_ref_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07003206 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003207 if (changed) {
3208 lf->last_ref_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07003209 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003210 }
3211 }
3212
3213 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
3214 const int delta = lf->mode_deltas[i];
3215 const int changed = delta != lf->last_mode_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07003216 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003217 if (changed) {
3218 lf->last_mode_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07003219 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003220 }
3221 }
3222 }
3223 }
3224}
3225
3226#if CONFIG_CLPF
Yaowu Xuf883b422016-08-30 14:01:10 -07003227static void encode_clpf(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +02003228 aom_wb_write_literal(wb, cm->clpf_strength_y, 2);
3229 aom_wb_write_literal(wb, cm->clpf_strength_u, 2);
3230 aom_wb_write_literal(wb, cm->clpf_strength_v, 2);
3231 if (cm->clpf_strength_y) {
Steinar Midtskogend06588a2016-05-06 13:48:20 +02003232 aom_wb_write_literal(wb, cm->clpf_size, 2);
Steinar Midtskogend06588a2016-05-06 13:48:20 +02003233 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003234}
3235#endif
3236
3237#if CONFIG_DERING
Yaowu Xuf883b422016-08-30 14:01:10 -07003238static void encode_dering(int level, struct aom_write_bit_buffer *wb) {
3239 aom_wb_write_literal(wb, level, DERING_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003240}
3241#endif // CONFIG_DERING
3242
Yaowu Xuf883b422016-08-30 14:01:10 -07003243static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003244 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003245 aom_wb_write_bit(wb, 1);
3246 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003247 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003248 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003249 }
3250}
3251
Yaowu Xuf883b422016-08-30 14:01:10 -07003252static void encode_quantization(const AV1_COMMON *const cm,
3253 struct aom_write_bit_buffer *wb) {
3254 aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003255 write_delta_q(wb, cm->y_dc_delta_q);
3256 write_delta_q(wb, cm->uv_dc_delta_q);
3257 write_delta_q(wb, cm->uv_ac_delta_q);
3258#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07003259 aom_wb_write_bit(wb, cm->using_qmatrix);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003260 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003261 aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS);
3262 aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003263 }
3264#endif
3265}
3266
Yaowu Xuf883b422016-08-30 14:01:10 -07003267static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
3268 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003269 int i, j;
3270 const struct segmentation *seg = &cm->seg;
3271
Yaowu Xuf883b422016-08-30 14:01:10 -07003272 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003273 if (!seg->enabled) return;
3274
3275 // Segmentation map
3276 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003277 aom_wb_write_bit(wb, seg->update_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003278 } else {
3279 assert(seg->update_map == 1);
3280 }
3281 if (seg->update_map) {
3282 // Select the coding strategy (temporal or spatial)
Yaowu Xuf883b422016-08-30 14:01:10 -07003283 av1_choose_segmap_coding_method(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003284
3285 // Write out the chosen coding method.
3286 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003287 aom_wb_write_bit(wb, seg->temporal_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003288 } else {
3289 assert(seg->temporal_update == 0);
3290 }
3291 }
3292
3293 // Segmentation data
Yaowu Xuf883b422016-08-30 14:01:10 -07003294 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003295 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003296 aom_wb_write_bit(wb, seg->abs_delta);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003297
3298 for (i = 0; i < MAX_SEGMENTS; i++) {
3299 for (j = 0; j < SEG_LVL_MAX; j++) {
3300 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07003301 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003302 if (active) {
3303 const int data = get_segdata(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07003304 const int data_max = av1_seg_feature_data_max(j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003305
Yaowu Xuf883b422016-08-30 14:01:10 -07003306 if (av1_is_segfeature_signed(j)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003307 encode_unsigned_max(wb, abs(data), data_max);
Yaowu Xuf883b422016-08-30 14:01:10 -07003308 aom_wb_write_bit(wb, data < 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003309 } else {
3310 encode_unsigned_max(wb, data, data_max);
3311 }
3312 }
3313 }
3314 }
3315 }
3316}
3317
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04003318#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -07003319static void update_seg_probs(AV1_COMP *cpi, aom_writer *w) {
3320 AV1_COMMON *cm = &cpi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07003321#if CONFIG_TILE_GROUPS
3322 const int probwt = cm->num_tg;
3323#else
3324 const int probwt = 1;
3325#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003326
3327 if (!cm->seg.enabled || !cm->seg.update_map) return;
3328
3329 if (cm->seg.temporal_update) {
3330 int i;
3331
3332 for (i = 0; i < PREDICTION_PROBS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07003333 av1_cond_prob_diff_update(w, &cm->fc->seg.pred_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -07003334 cm->counts.seg.pred[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003335
Yaowu Xuf883b422016-08-30 14:01:10 -07003336 prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs,
Thomas Davies80188d12016-10-26 16:08:35 -07003337 cm->counts.seg.tree_mispred, MAX_SEGMENTS, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003338 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003339 prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs,
Thomas Davies80188d12016-10-26 16:08:35 -07003340 cm->counts.seg.tree_total, MAX_SEGMENTS, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003341 }
3342}
Thomas Davies6519beb2016-10-19 14:46:07 +01003343#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003344
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08003345static void write_tx_mode(TX_MODE mode, struct aom_write_bit_buffer *wb) {
3346#if CONFIG_TX64X64
3347 aom_wb_write_bit(wb, mode == TX_MODE_SELECT);
3348 if (mode != TX_MODE_SELECT) {
3349 aom_wb_write_literal(wb, AOMMIN(mode, ALLOW_32X32), 2);
3350 if (mode >= ALLOW_32X32) aom_wb_write_bit(wb, mode == ALLOW_64X64);
3351 }
3352#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003353 aom_wb_write_bit(wb, mode == TX_MODE_SELECT);
3354 if (mode != TX_MODE_SELECT) aom_wb_write_literal(wb, mode, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08003355#endif // CONFIG_TX64X64
Yaowu Xuc27fc142016-08-22 16:08:15 -07003356}
3357
Yaowu Xuf883b422016-08-30 14:01:10 -07003358static void update_txfm_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003359 FRAME_COUNTS *counts) {
Thomas Davies80188d12016-10-26 16:08:35 -07003360#if CONFIG_TILE_GROUPS
3361 const int probwt = cm->num_tg;
3362#else
3363 const int probwt = 1;
3364#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003365 if (cm->tx_mode == TX_MODE_SELECT) {
3366 int i, j;
Jingning Hanaae72a62016-10-25 15:35:29 -07003367 for (i = 0; i < MAX_TX_DEPTH; ++i)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003368 for (j = 0; j < TX_SIZE_CONTEXTS; ++j)
Yaowu Xuf883b422016-08-30 14:01:10 -07003369 prob_diff_update(av1_tx_size_tree[i], cm->fc->tx_size_probs[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07003370 counts->tx_size[i][j], i + 2, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003371 }
3372}
3373
Angie Chiang5678ad92016-11-21 09:38:40 -08003374static void write_frame_interp_filter(InterpFilter filter,
3375 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003376 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003377 if (filter != SWITCHABLE)
Angie Chiang6305abe2016-10-24 12:24:44 -07003378 aom_wb_write_literal(wb, filter, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003379}
3380
Yaowu Xuf883b422016-08-30 14:01:10 -07003381static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003382 if (cm->interp_filter == SWITCHABLE) {
3383 // Check to see if only one of the filters is actually used
3384 int count[SWITCHABLE_FILTERS];
3385 int i, j, c = 0;
3386 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
3387 count[i] = 0;
3388 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
3389 count[i] += counts->switchable_interp[j][i];
3390 c += (count[i] > 0);
3391 }
3392 if (c == 1) {
3393 // Only one filter is used. So set the filter at frame level
3394 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
3395 if (count[i]) {
3396 cm->interp_filter = i;
3397 break;
3398 }
3399 }
3400 }
3401 }
3402}
3403
Yaowu Xuf883b422016-08-30 14:01:10 -07003404static void write_tile_info(const AV1_COMMON *const cm,
3405 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003406#if CONFIG_EXT_TILE
3407 const int tile_width =
3408 ALIGN_POWER_OF_TWO(cm->tile_width, cm->mib_size_log2) >>
3409 cm->mib_size_log2;
3410 const int tile_height =
3411 ALIGN_POWER_OF_TWO(cm->tile_height, cm->mib_size_log2) >>
3412 cm->mib_size_log2;
3413
3414 assert(tile_width > 0);
3415 assert(tile_height > 0);
3416
3417// Write the tile sizes
3418#if CONFIG_EXT_PARTITION
3419 if (cm->sb_size == BLOCK_128X128) {
3420 assert(tile_width <= 32);
3421 assert(tile_height <= 32);
Yaowu Xuf883b422016-08-30 14:01:10 -07003422 aom_wb_write_literal(wb, tile_width - 1, 5);
3423 aom_wb_write_literal(wb, tile_height - 1, 5);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003424 } else
3425#endif // CONFIG_EXT_PARTITION
3426 {
3427 assert(tile_width <= 64);
3428 assert(tile_height <= 64);
Yaowu Xuf883b422016-08-30 14:01:10 -07003429 aom_wb_write_literal(wb, tile_width - 1, 6);
3430 aom_wb_write_literal(wb, tile_height - 1, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003431 }
3432#else
3433 int min_log2_tile_cols, max_log2_tile_cols, ones;
Yaowu Xuf883b422016-08-30 14:01:10 -07003434 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003435
3436 // columns
3437 ones = cm->log2_tile_cols - min_log2_tile_cols;
Yaowu Xuf883b422016-08-30 14:01:10 -07003438 while (ones--) aom_wb_write_bit(wb, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003439
Yaowu Xuf883b422016-08-30 14:01:10 -07003440 if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003441
3442 // rows
Yaowu Xuf883b422016-08-30 14:01:10 -07003443 aom_wb_write_bit(wb, cm->log2_tile_rows != 0);
3444 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003445#endif // CONFIG_EXT_TILE
Ryan Lei7386eda2016-12-08 21:08:31 -08003446
3447#if CONFIG_DEBLOCKING_ACROSS_TILES
3448 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_enabled);
3449#endif // CONFIG_DEBLOCKING_ACROSS_TILES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003450}
3451
Yaowu Xuf883b422016-08-30 14:01:10 -07003452static int get_refresh_mask(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003453 int refresh_mask = 0;
3454
3455#if CONFIG_EXT_REFS
3456 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
3457 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
3458 // the 3 LAST reference frames will be updated accordingly, i.e.:
3459 // (1) The original virtual index for LAST3_FRAME will become the new virtual
3460 // index for LAST_FRAME; and
3461 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
3462 // shifted and become the new virtual indexes for LAST2_FRAME and
3463 // LAST3_FRAME.
3464 refresh_mask |=
3465 (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]);
3466 if (cpi->rc.is_bwd_ref_frame && cpi->num_extra_arfs) {
3467 // We have swapped the virtual indices
3468 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->arf_map[0]);
3469 } else {
3470 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
3471 }
3472#else
3473 refresh_mask |= (cpi->refresh_last_frame << cpi->lst_fb_idx);
3474#endif // CONFIG_EXT_REFS
3475
Yaowu Xuf883b422016-08-30 14:01:10 -07003476 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003477 // We have decided to preserve the previously existing golden frame as our
3478 // new ARF frame. However, in the short term we leave it in the GF slot and,
3479 // if we're updating the GF with the current decoded frame, we save it
3480 // instead to the ARF slot.
Yaowu Xuf883b422016-08-30 14:01:10 -07003481 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
Yaowu Xuc27fc142016-08-22 16:08:15 -07003482 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
3483 // there so that it can be done outside of the recode loop.
3484 // Note: This is highly specific to the use of ARF as a forward reference,
3485 // and this needs to be generalized as other uses are implemented
3486 // (like RTC/temporal scalability).
3487 return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx);
3488 } else {
3489 int arf_idx = cpi->alt_fb_idx;
3490#if CONFIG_EXT_REFS
3491 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
3492 arf_idx = cpi->arf_map[gf_group->arf_update_idx[gf_group->index]];
3493#else
3494 if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
3495 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
3496 arf_idx = gf_group->arf_update_idx[gf_group->index];
3497 }
3498#endif // CONFIG_EXT_REFS
3499 return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) |
3500 (cpi->refresh_alt_ref_frame << arf_idx);
3501 }
3502}
3503
3504#if CONFIG_EXT_TILE
3505static INLINE int find_identical_tile(
3506 const int tile_row, const int tile_col,
3507 TileBufferEnc (*const tile_buffers)[1024]) {
3508 const MV32 candidate_offset[1] = { { 1, 0 } };
3509 const uint8_t *const cur_tile_data =
3510 tile_buffers[tile_row][tile_col].data + 4;
3511 const unsigned int cur_tile_size = tile_buffers[tile_row][tile_col].size;
3512
3513 int i;
3514
3515 if (tile_row == 0) return 0;
3516
3517 // (TODO: yunqingwang) For now, only above tile is checked and used.
3518 // More candidates such as left tile can be added later.
3519 for (i = 0; i < 1; i++) {
3520 int row_offset = candidate_offset[0].row;
3521 int col_offset = candidate_offset[0].col;
3522 int row = tile_row - row_offset;
3523 int col = tile_col - col_offset;
3524 uint8_t tile_hdr;
3525 const uint8_t *tile_data;
3526 TileBufferEnc *candidate;
3527
3528 if (row < 0 || col < 0) continue;
3529
3530 tile_hdr = *(tile_buffers[row][col].data);
3531
3532 // Read out tcm bit
3533 if ((tile_hdr >> 7) == 1) {
3534 // The candidate is a copy tile itself
3535 row_offset += tile_hdr & 0x7f;
3536 row = tile_row - row_offset;
3537 }
3538
3539 candidate = &tile_buffers[row][col];
3540
3541 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
3542
3543 tile_data = candidate->data + 4;
3544
3545 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
3546
3547 // Identical tile found
3548 assert(row_offset > 0);
3549 return row_offset;
3550 }
3551
3552 // No identical tile found
3553 return 0;
3554}
3555#endif // CONFIG_EXT_TILE
3556
Thomas Davies80188d12016-10-26 16:08:35 -07003557#if CONFIG_TILE_GROUPS
3558static uint32_t write_tiles(AV1_COMP *const cpi,
3559 struct aom_write_bit_buffer *wb,
3560 unsigned int *max_tile_size,
3561 unsigned int *max_tile_col_size) {
3562#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003563static uint32_t write_tiles(AV1_COMP *const cpi, uint8_t *const dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003564 unsigned int *max_tile_size,
3565 unsigned int *max_tile_col_size) {
Thomas Davies80188d12016-10-26 16:08:35 -07003566#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003567 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003568#if CONFIG_ANS
Alex Converse2a1b3af2016-10-26 13:11:26 -07003569 struct BufAnsCoder *buf_ans = &cpi->buf_ans;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003570#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003571 aom_writer mode_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003572#endif // CONFIG_ANS
3573 int tile_row, tile_col;
3574 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
clang-format67948d32016-09-07 22:40:40 -07003575 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003576 size_t total_size = 0;
3577 const int tile_cols = cm->tile_cols;
3578 const int tile_rows = cm->tile_rows;
Thomas Daviesaf6df172016-11-09 14:04:18 +00003579 unsigned int tile_size = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07003580#if CONFIG_TILE_GROUPS
3581 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
3582 const int have_tiles = n_log2_tiles > 0;
3583 size_t comp_hdr_size;
3584 // Fixed size tile groups for the moment
3585 const int num_tg_hdrs = cm->num_tg;
3586 const int tg_size = (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
3587 int tile_count = 0;
3588 int uncompressed_hdr_size = 0;
3589 uint8_t *dst = NULL;
3590 struct aom_write_bit_buffer comp_hdr_len_wb;
3591 struct aom_write_bit_buffer tg_params_wb;
3592 int saved_offset;
Thomas Daviesaf6df172016-11-09 14:04:18 +00003593 int mtu_size = cpi->oxcf.mtu;
3594 int curr_tg_data_size = 0;
3595 int hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07003596#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003597#if CONFIG_EXT_TILE
3598 const int have_tiles = tile_cols * tile_rows > 1;
3599#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003600
3601 *max_tile_size = 0;
3602 *max_tile_col_size = 0;
3603
3604// All tile size fields are output on 4 bytes. A call to remux_tiles will
3605// later compact the data if smaller headers are adequate.
3606
3607#if CONFIG_EXT_TILE
3608 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3609 TileInfo tile_info;
3610 const int is_last_col = (tile_col == tile_cols - 1);
3611 const size_t col_offset = total_size;
3612
Yaowu Xuf883b422016-08-30 14:01:10 -07003613 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003614
3615 // The last column does not have a column header
3616 if (!is_last_col) total_size += 4;
3617
3618 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3619 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003620 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3621 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3622 const int data_offset = have_tiles ? 4 : 0;
3623
Yaowu Xuf883b422016-08-30 14:01:10 -07003624 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003625
3626 buf->data = dst + total_size;
3627
3628 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
3629 // even for the last one, unless no tiling is used at all.
3630 total_size += data_offset;
3631#if !CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -07003632 aom_start_encode(&mode_bc, buf->data + data_offset);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003633 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
3634 assert(tok == tok_end);
Yaowu Xuf883b422016-08-30 14:01:10 -07003635 aom_stop_encode(&mode_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003636 tile_size = mode_bc.pos;
3637#else
Alex Converse2a1b3af2016-10-26 13:11:26 -07003638 buf_ans_write_init(buf_ans, buf->data + data_offset);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003639 write_modes(cpi, &tile_info, buf_ans, &tok, tok_end);
3640 assert(tok == tok_end);
Alex Converse1ecdf2b2016-11-30 15:51:12 -08003641 aom_buf_ans_flush(buf_ans);
Alex Converse2a1b3af2016-10-26 13:11:26 -07003642 tile_size = buf_ans_write_end(buf_ans);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003643#endif // !CONFIG_ANS
3644
3645 buf->size = tile_size;
3646
3647 // Record the maximum tile size we see, so we can compact headers later.
Yaowu Xuf883b422016-08-30 14:01:10 -07003648 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003649
3650 if (have_tiles) {
3651 // tile header: size of this tile, or copy offset
3652 uint32_t tile_header = tile_size;
3653
3654 // Check if this tile is a copy tile.
3655 // Very low chances to have copy tiles on the key frames, so don't
3656 // search on key frames to reduce unnecessary search.
3657 if (cm->frame_type != KEY_FRAME) {
3658 const int idendical_tile_offset =
3659 find_identical_tile(tile_row, tile_col, tile_buffers);
3660
3661 if (idendical_tile_offset > 0) {
3662 tile_size = 0;
3663 tile_header = idendical_tile_offset | 0x80;
3664 tile_header <<= 24;
3665 }
3666 }
3667
3668 mem_put_le32(buf->data, tile_header);
3669 }
3670
3671 total_size += tile_size;
3672 }
3673
3674 if (!is_last_col) {
3675 size_t col_size = total_size - col_offset - 4;
3676 mem_put_le32(dst + col_offset, col_size);
3677
3678 // If it is not final packing, record the maximum tile column size we see,
3679 // otherwise, check if the tile size is out of the range.
Yaowu Xuf883b422016-08-30 14:01:10 -07003680 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003681 }
3682 }
3683#else
Thomas Davies80188d12016-10-26 16:08:35 -07003684#if CONFIG_TILE_GROUPS
3685 write_uncompressed_header(cpi, wb);
3686
3687 // Write the tile length code. Use full 32 bit length fields for the moment
Thomas Davies4974e522016-11-07 17:44:05 +00003688 aom_wb_write_literal(wb, 3, 2);
Thomas Davies80188d12016-10-26 16:08:35 -07003689
3690 /* Write a placeholder for the number of tiles in each tile group */
3691 tg_params_wb = *wb;
3692 saved_offset = wb->bit_offset;
Thomas Davies4974e522016-11-07 17:44:05 +00003693 if (have_tiles) aom_wb_write_literal(wb, 0, n_log2_tiles * 2);
Thomas Davies80188d12016-10-26 16:08:35 -07003694
3695 /* Write a placeholder for the compressed header length */
3696 comp_hdr_len_wb = *wb;
3697 aom_wb_write_literal(wb, 0, 16);
3698
3699 uncompressed_hdr_size = aom_wb_bytes_written(wb);
3700 dst = wb->bit_buffer;
3701 comp_hdr_size = write_compressed_header(cpi, dst + uncompressed_hdr_size);
Thomas Daviesfaa7fcf2016-11-14 11:59:43 +00003702 aom_wb_overwrite_literal(&comp_hdr_len_wb, (int)(comp_hdr_size), 16);
Thomas Daviesaf6df172016-11-09 14:04:18 +00003703 hdr_size = uncompressed_hdr_size + comp_hdr_size;
3704 total_size += hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07003705#endif
3706
Yaowu Xuc27fc142016-08-22 16:08:15 -07003707 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3708 TileInfo tile_info;
Yaowu Xu8acaa552016-11-21 09:50:22 -08003709#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07003710 const int is_last_row = (tile_row == tile_rows - 1);
Yaowu Xu8acaa552016-11-21 09:50:22 -08003711#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003712 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003713
3714 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
Yushin Cho77bba8d2016-11-04 16:36:56 -07003715 const int tile_idx = tile_row * tile_cols + tile_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003716 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
Yushin Cho77bba8d2016-11-04 16:36:56 -07003717#if CONFIG_PVQ
3718 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
3719#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003720 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3721 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
Thomas Davies8fe64a32016-10-04 13:19:31 +01003722#if !CONFIG_TILE_GROUPS
Yaowu Xu8acaa552016-11-21 09:50:22 -08003723 const int is_last_col = (tile_col == tile_cols - 1);
Thomas Davies8fe64a32016-10-04 13:19:31 +01003724 const int is_last_tile = is_last_col && is_last_row;
Yushin Cho77bba8d2016-11-04 16:36:56 -07003725 (void)tile_idx;
Thomas Davies8fe64a32016-10-04 13:19:31 +01003726#else
Thomas Davies8fe64a32016-10-04 13:19:31 +01003727 // All tiles in a tile group have a length
3728 const int is_last_tile = 0;
Thomas Daviesaf6df172016-11-09 14:04:18 +00003729
3730 if ((!mtu_size && tile_count > tg_size) ||
3731 (mtu_size && tile_count && curr_tg_data_size >= mtu_size)) {
3732 // We've exceeded the packet size
3733 if (tile_count > 1) {
3734 /* The last tile exceeded the packet size. The tile group size
3735 should therefore be tile_count-1.
3736 Move the last tile and insert headers before it
3737 */
3738 int old_total_size = total_size - tile_size - 4;
3739 memmove(dst + old_total_size + hdr_size, dst + old_total_size,
3740 (tile_size + 4) * sizeof(uint8_t));
3741 // Copy uncompressed header
3742 memmove(dst + old_total_size, dst,
3743 uncompressed_hdr_size * sizeof(uint8_t));
3744 // Write the number of tiles in the group into the last uncompressed
3745 // header before the one we've just inserted
Thomas Daviesfaa7fcf2016-11-14 11:59:43 +00003746 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
3747 n_log2_tiles);
3748 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 2, n_log2_tiles);
Thomas Daviesaf6df172016-11-09 14:04:18 +00003749 // Update the pointer to the last TG params
3750 tg_params_wb.bit_offset = saved_offset + 8 * old_total_size;
3751 // Copy compressed header
3752 memmove(dst + old_total_size + uncompressed_hdr_size,
3753 dst + uncompressed_hdr_size, comp_hdr_size * sizeof(uint8_t));
3754 total_size += hdr_size;
3755 tile_count = 1;
3756 curr_tg_data_size = hdr_size + tile_size + 4;
3757
3758 } else {
3759 // We exceeded the packet size in just one tile
3760 // Copy uncompressed header
3761 memmove(dst + total_size, dst,
3762 uncompressed_hdr_size * sizeof(uint8_t));
3763 // Write the number of tiles in the group into the last uncompressed
3764 // header
Thomas Daviesfaa7fcf2016-11-14 11:59:43 +00003765 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
3766 n_log2_tiles);
3767 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1, n_log2_tiles);
Thomas Daviesaf6df172016-11-09 14:04:18 +00003768 tg_params_wb.bit_offset = saved_offset + 8 * total_size;
3769 // Copy compressed header
3770 memmove(dst + total_size + uncompressed_hdr_size,
3771 dst + uncompressed_hdr_size, comp_hdr_size * sizeof(uint8_t));
3772 total_size += hdr_size;
3773 tile_count = 0;
3774 curr_tg_data_size = hdr_size;
3775 }
Thomas Davies80188d12016-10-26 16:08:35 -07003776 }
3777 tile_count++;
3778#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07003779 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003780
3781 buf->data = dst + total_size;
3782
3783 // The last tile does not have a header.
3784 if (!is_last_tile) total_size += 4;
3785
Thomas Davies80188d12016-10-26 16:08:35 -07003786#if CONFIG_ANS
Alex Converse2a1b3af2016-10-26 13:11:26 -07003787 buf_ans_write_init(buf_ans, dst + total_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003788 write_modes(cpi, &tile_info, buf_ans, &tok, tok_end);
3789 assert(tok == tok_end);
Alex Converse1ecdf2b2016-11-30 15:51:12 -08003790 aom_buf_ans_flush(buf_ans);
Alex Converse2a1b3af2016-10-26 13:11:26 -07003791 tile_size = buf_ans_write_end(buf_ans);
Thomas Davies80188d12016-10-26 16:08:35 -07003792#else
3793 aom_start_encode(&mode_bc, dst + total_size);
Yushin Cho77bba8d2016-11-04 16:36:56 -07003794#if CONFIG_PVQ
3795 // NOTE: This will not work with CONFIG_ANS turned on.
3796 od_adapt_ctx_reset(&cpi->td.mb.daala_enc.state.adapt, 0);
3797 cpi->td.mb.pvq_q = &this_tile->pvq_q;
3798#endif
Thomas Davies80188d12016-10-26 16:08:35 -07003799 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
3800 assert(tok == tok_end);
3801 aom_stop_encode(&mode_bc);
3802 tile_size = mode_bc.pos;
Alex Converse2a1b3af2016-10-26 13:11:26 -07003803#endif // CONFIG_ANS
Yushin Cho77bba8d2016-11-04 16:36:56 -07003804#if CONFIG_PVQ
3805 cpi->td.mb.pvq_q = NULL;
Alex Converse2a1b3af2016-10-26 13:11:26 -07003806#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003807
3808 assert(tile_size > 0);
3809
Thomas Daviesaf6df172016-11-09 14:04:18 +00003810#if CONFIG_TILE_GROUPS
3811 curr_tg_data_size += tile_size + 4;
3812#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003813 buf->size = tile_size;
3814
3815 if (!is_last_tile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003816 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003817 // size of this tile
3818 mem_put_le32(buf->data, tile_size);
3819 }
3820
3821 total_size += tile_size;
3822 }
3823 }
Thomas Davies80188d12016-10-26 16:08:35 -07003824#if CONFIG_TILE_GROUPS
3825 // Write the final tile group size
3826 if (n_log2_tiles) {
3827 aom_wb_write_literal(&tg_params_wb, (1 << n_log2_tiles) - tile_count,
3828 n_log2_tiles);
3829 aom_wb_write_literal(&tg_params_wb, tile_count - 1, n_log2_tiles);
3830 }
3831#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003832#endif // CONFIG_EXT_TILE
3833 return (uint32_t)total_size;
3834}
3835
Yaowu Xuf883b422016-08-30 14:01:10 -07003836static void write_render_size(const AV1_COMMON *cm,
3837 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003838 const int scaling_active =
3839 cm->width != cm->render_width || cm->height != cm->render_height;
Yaowu Xuf883b422016-08-30 14:01:10 -07003840 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003841 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003842 aom_wb_write_literal(wb, cm->render_width - 1, 16);
3843 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003844 }
3845}
3846
Yaowu Xuf883b422016-08-30 14:01:10 -07003847static void write_frame_size(const AV1_COMMON *cm,
3848 struct aom_write_bit_buffer *wb) {
3849 aom_wb_write_literal(wb, cm->width - 1, 16);
3850 aom_wb_write_literal(wb, cm->height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003851
3852 write_render_size(cm, wb);
3853}
3854
Yaowu Xuf883b422016-08-30 14:01:10 -07003855static void write_frame_size_with_refs(AV1_COMP *cpi,
3856 struct aom_write_bit_buffer *wb) {
3857 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003858 int found = 0;
3859
3860 MV_REFERENCE_FRAME ref_frame;
3861 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3862 YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
3863
3864 if (cfg != NULL) {
3865 found =
3866 cm->width == cfg->y_crop_width && cm->height == cfg->y_crop_height;
3867 found &= cm->render_width == cfg->render_width &&
3868 cm->render_height == cfg->render_height;
3869 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003870 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003871 if (found) {
3872 break;
3873 }
3874 }
3875
3876 if (!found) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003877 aom_wb_write_literal(wb, cm->width - 1, 16);
3878 aom_wb_write_literal(wb, cm->height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003879 write_render_size(cm, wb);
3880 }
3881}
3882
Yaowu Xuf883b422016-08-30 14:01:10 -07003883static void write_sync_code(struct aom_write_bit_buffer *wb) {
3884 aom_wb_write_literal(wb, AV1_SYNC_CODE_0, 8);
3885 aom_wb_write_literal(wb, AV1_SYNC_CODE_1, 8);
3886 aom_wb_write_literal(wb, AV1_SYNC_CODE_2, 8);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003887}
3888
3889static void write_profile(BITSTREAM_PROFILE profile,
Yaowu Xuf883b422016-08-30 14:01:10 -07003890 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003891 switch (profile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003892 case PROFILE_0: aom_wb_write_literal(wb, 0, 2); break;
3893 case PROFILE_1: aom_wb_write_literal(wb, 2, 2); break;
3894 case PROFILE_2: aom_wb_write_literal(wb, 1, 2); break;
3895 case PROFILE_3: aom_wb_write_literal(wb, 6, 3); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003896 default: assert(0);
3897 }
3898}
3899
3900static void write_bitdepth_colorspace_sampling(
Yaowu Xuf883b422016-08-30 14:01:10 -07003901 AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003902 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003903 assert(cm->bit_depth > AOM_BITS_8);
3904 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003905 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003906 aom_wb_write_literal(wb, cm->color_space, 3);
3907 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003908 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Yaowu Xuf883b422016-08-30 14:01:10 -07003909 aom_wb_write_bit(wb, cm->color_range);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003910 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
3911 assert(cm->subsampling_x != 1 || cm->subsampling_y != 1);
Yaowu Xuf883b422016-08-30 14:01:10 -07003912 aom_wb_write_bit(wb, cm->subsampling_x);
3913 aom_wb_write_bit(wb, cm->subsampling_y);
3914 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003915 } else {
3916 assert(cm->subsampling_x == 1 && cm->subsampling_y == 1);
3917 }
3918 } else {
3919 assert(cm->profile == PROFILE_1 || cm->profile == PROFILE_3);
Yaowu Xuf883b422016-08-30 14:01:10 -07003920 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003921 }
3922}
3923
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003924#if CONFIG_REFERENCE_BUFFER
3925void write_sequence_header(SequenceHeader *seq_params) {
3926 /* Placeholder for actually writing to the bitstream */
3927 seq_params->frame_id_numbers_present_flag = FRAME_ID_NUMBERS_PRESENT_FLAG;
3928 seq_params->frame_id_length_minus7 = FRAME_ID_LENGTH_MINUS7;
3929 seq_params->delta_frame_id_length_minus2 = DELTA_FRAME_ID_LENGTH_MINUS2;
3930}
3931#endif
3932
Yaowu Xuf883b422016-08-30 14:01:10 -07003933static void write_uncompressed_header(AV1_COMP *cpi,
3934 struct aom_write_bit_buffer *wb) {
3935 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003936 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3937
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003938#if CONFIG_REFERENCE_BUFFER
3939 /* TODO: Move outside frame loop or inside key-frame branch */
3940 write_sequence_header(&cpi->seq_params);
3941#endif
3942
Yaowu Xuf883b422016-08-30 14:01:10 -07003943 aom_wb_write_literal(wb, AOM_FRAME_MARKER, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003944
3945 write_profile(cm->profile, wb);
3946
3947#if CONFIG_EXT_REFS
3948 // NOTE: By default all coded frames to be used as a reference
3949 cm->is_reference_frame = 1;
3950
3951 if (cm->show_existing_frame) {
3952 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3953 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3954
3955 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003956 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003957 "Buffer %d does not contain a reconstructed frame",
3958 frame_to_show);
3959 }
3960 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3961
Yaowu Xuf883b422016-08-30 14:01:10 -07003962 aom_wb_write_bit(wb, 1); // show_existing_frame
3963 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003964
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003965#if CONFIG_REFERENCE_BUFFER
3966 if (cpi->seq_params.frame_id_numbers_present_flag) {
3967 int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7;
3968 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
3969 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
3970 /* Add a zero byte to prevent emulation of superframe marker */
3971 /* Same logic as when when terminating the entropy coder */
3972 /* Consider to have this logic only one place */
3973 aom_wb_write_literal(wb, 0, 8);
3974 }
3975#endif
3976
Yaowu Xuc27fc142016-08-22 16:08:15 -07003977 return;
3978 } else {
3979#endif // CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07003980 aom_wb_write_bit(wb, 0); // show_existing_frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07003981#if CONFIG_EXT_REFS
3982 }
3983#endif // CONFIG_EXT_REFS
3984
Yaowu Xuf883b422016-08-30 14:01:10 -07003985 aom_wb_write_bit(wb, cm->frame_type);
3986 aom_wb_write_bit(wb, cm->show_frame);
3987 aom_wb_write_bit(wb, cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003988
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003989#if CONFIG_REFERENCE_BUFFER
3990 cm->invalid_delta_frame_id_minus1 = 0;
3991 if (cpi->seq_params.frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01003992 int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7;
3993 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01003994 }
3995#endif
3996
Yaowu Xuc27fc142016-08-22 16:08:15 -07003997 if (cm->frame_type == KEY_FRAME) {
3998 write_sync_code(wb);
3999 write_bitdepth_colorspace_sampling(cm, wb);
4000 write_frame_size(cm, wb);
Urvang Joshib100db72016-10-12 16:28:56 -07004001#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07004002 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Urvang Joshib100db72016-10-12 16:28:56 -07004003#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004004 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004005 if (!cm->show_frame) aom_wb_write_bit(wb, cm->intra_only);
Urvang Joshib100db72016-10-12 16:28:56 -07004006#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07004007 if (cm->intra_only) aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Urvang Joshib100db72016-10-12 16:28:56 -07004008#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004009 if (!cm->error_resilient_mode) {
4010 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004011 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004012 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4013 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004014 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004015 cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
4016 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
Yaowu Xuf883b422016-08-30 14:01:10 -07004017 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004018 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4019 }
4020 }
4021
4022#if CONFIG_EXT_REFS
4023 cpi->refresh_frame_mask = get_refresh_mask(cpi);
4024#endif // CONFIG_EXT_REFS
4025
4026 if (cm->intra_only) {
4027 write_sync_code(wb);
4028 write_bitdepth_colorspace_sampling(cm, wb);
4029
4030#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07004031 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004032#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004033 aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004034#endif // CONFIG_EXT_REFS
4035 write_frame_size(cm, wb);
4036 } else {
4037 MV_REFERENCE_FRAME ref_frame;
4038
4039#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07004040 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004041#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004042 aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004043#endif // CONFIG_EXT_REFS
4044
4045#if CONFIG_EXT_REFS
4046 if (!cpi->refresh_frame_mask) {
4047 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
4048 // will not be used as a reference
4049 cm->is_reference_frame = 0;
4050 }
4051#endif // CONFIG_EXT_REFS
4052
4053 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4054 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
Yaowu Xuf883b422016-08-30 14:01:10 -07004055 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
Yaowu Xuc27fc142016-08-22 16:08:15 -07004056 REF_FRAMES_LOG2);
Yaowu Xuf883b422016-08-30 14:01:10 -07004057 aom_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004058#if CONFIG_REFERENCE_BUFFER
4059 if (cpi->seq_params.frame_id_numbers_present_flag) {
4060 int i = get_ref_frame_map_idx(cpi, ref_frame);
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004061 int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7;
4062 int diff_len = cpi->seq_params.delta_frame_id_length_minus2 + 2;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004063 int delta_frame_id_minus1 =
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004064 ((cm->current_frame_id - cm->ref_frame_id[i] +
4065 (1 << frame_id_len)) %
4066 (1 << frame_id_len)) -
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004067 1;
4068 if (delta_frame_id_minus1 < 0 ||
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004069 delta_frame_id_minus1 >= (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004070 cm->invalid_delta_frame_id_minus1 = 1;
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004071 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004072 }
4073#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004074 }
4075
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004076#if CONFIG_FRAME_SIZE
4077 if (cm->error_resilient_mode == 0) {
4078 write_frame_size_with_refs(cpi, wb);
4079 } else {
4080 write_frame_size(cm, wb);
4081 }
4082#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004083 write_frame_size_with_refs(cpi, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004084#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004085
Yaowu Xuf883b422016-08-30 14:01:10 -07004086 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004087
4088 fix_interp_filter(cm, cpi->td.counts);
Angie Chiang5678ad92016-11-21 09:38:40 -08004089 write_frame_interp_filter(cm->interp_filter, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004090 }
4091 }
4092
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004093#if CONFIG_REFERENCE_BUFFER
4094 cm->refresh_mask = cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
4095#endif
4096
Yaowu Xuc27fc142016-08-22 16:08:15 -07004097 if (!cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004098 aom_wb_write_bit(
Yaowu Xuc27fc142016-08-22 16:08:15 -07004099 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
4100 }
4101
Yaowu Xuf883b422016-08-30 14:01:10 -07004102 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004103
Jingning Hanc709e1f2016-12-06 14:48:09 -08004104 assert(cm->mib_size == mi_size_wide[cm->sb_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004105 assert(cm->mib_size == 1 << cm->mib_size_log2);
4106#if CONFIG_EXT_PARTITION
4107 assert(cm->sb_size == BLOCK_128X128 || cm->sb_size == BLOCK_64X64);
Yaowu Xuf883b422016-08-30 14:01:10 -07004108 aom_wb_write_bit(wb, cm->sb_size == BLOCK_128X128 ? 1 : 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004109#else
4110 assert(cm->sb_size == BLOCK_64X64);
4111#endif // CONFIG_EXT_PARTITION
4112
4113 encode_loopfilter(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004114#if CONFIG_DERING
4115 encode_dering(cm->dering_level, wb);
4116#endif // CONFIG_DERING
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02004117#if CONFIG_CLPF
4118 encode_clpf(cm, wb);
4119#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004120#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004121 encode_restoration_mode(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004122#endif // CONFIG_LOOP_RESTORATION
4123 encode_quantization(cm, wb);
4124 encode_segmentation(cm, xd, wb);
Arild Fuldseth07441162016-08-15 15:07:52 +02004125#if CONFIG_DELTA_Q
4126 {
4127 int i;
4128 struct segmentation *const seg = &cm->seg;
4129 int segment_quantizer_active = 0;
4130 for (i = 0; i < MAX_SEGMENTS; i++) {
4131 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
4132 segment_quantizer_active = 1;
4133 }
4134 }
4135 if (segment_quantizer_active == 0) {
4136 cm->delta_q_present_flag = cpi->oxcf.aq_mode == DELTA_AQ;
4137 aom_wb_write_bit(wb, cm->delta_q_present_flag);
4138 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01004139 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
Arild Fuldseth07441162016-08-15 15:07:52 +02004140 xd->prev_qindex = cm->base_qindex;
4141 }
4142 }
4143 }
4144#endif
4145
Yaowu Xuc27fc142016-08-22 16:08:15 -07004146 if (!cm->seg.enabled && xd->lossless[0])
Urvang Joshicb586f32016-09-20 11:36:33 -07004147 cm->tx_mode = ONLY_4X4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004148 else
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08004149 write_tx_mode(cm->tx_mode, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004150
4151 if (cpi->allow_comp_inter_inter) {
4152 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
4153 const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE;
4154
Yaowu Xuf883b422016-08-30 14:01:10 -07004155 aom_wb_write_bit(wb, use_hybrid_pred);
4156 if (!use_hybrid_pred) aom_wb_write_bit(wb, use_compound_pred);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004157 }
4158
4159 write_tile_info(cm, wb);
4160}
4161
4162#if CONFIG_GLOBAL_MOTION
David Barkercf3d0b02016-11-10 10:14:49 +00004163static void write_global_motion_params(WarpedMotionParams *params,
Yaowu Xuf883b422016-08-30 14:01:10 -07004164 aom_prob *probs, aom_writer *w) {
David Barkercf3d0b02016-11-10 10:14:49 +00004165 TransformationType type = params->wmtype;
Yaowu Xuf883b422016-08-30 14:01:10 -07004166 av1_write_token(w, av1_global_motion_types_tree, probs,
David Barkercf3d0b02016-11-10 10:14:49 +00004167 &global_motion_types_encodings[type]);
4168 switch (type) {
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004169 case HOMOGRAPHY:
Debargha Mukherjee949097c2016-11-15 17:27:38 -08004170 aom_write_primitive_symmetric(
4171 w, (params->wmmat[6] >> GM_ROW3HOMO_PREC_DIFF), GM_ABS_ROW3HOMO_BITS);
4172 aom_write_primitive_symmetric(
4173 w, (params->wmmat[7] >> GM_ROW3HOMO_PREC_DIFF), GM_ABS_ROW3HOMO_BITS);
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004174 // fallthrough intended
David Barkercf3d0b02016-11-10 10:14:49 +00004175 case AFFINE:
4176 case ROTZOOM:
Yaowu Xuf883b422016-08-30 14:01:10 -07004177 aom_write_primitive_symmetric(
David Barkercf3d0b02016-11-10 10:14:49 +00004178 w,
4179 (params->wmmat[2] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS),
Yaowu Xuc27fc142016-08-22 16:08:15 -07004180 GM_ABS_ALPHA_BITS);
David Barkercf3d0b02016-11-10 10:14:49 +00004181 aom_write_primitive_symmetric(w, (params->wmmat[3] >> GM_ALPHA_PREC_DIFF),
4182 GM_ABS_ALPHA_BITS);
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004183 if (type == AFFINE || type == HOMOGRAPHY) {
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004184 aom_write_primitive_symmetric(
David Barkercf3d0b02016-11-10 10:14:49 +00004185 w, (params->wmmat[4] >> GM_ALPHA_PREC_DIFF), GM_ABS_ALPHA_BITS);
4186 aom_write_primitive_symmetric(w,
4187 (params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
4188 (1 << GM_ALPHA_PREC_BITS),
4189 GM_ABS_ALPHA_BITS);
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004190 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004191 // fallthrough intended
David Barkercf3d0b02016-11-10 10:14:49 +00004192 case TRANSLATION:
4193 aom_write_primitive_symmetric(w, (params->wmmat[0] >> GM_TRANS_PREC_DIFF),
4194 GM_ABS_TRANS_BITS);
4195 aom_write_primitive_symmetric(w, (params->wmmat[1] >> GM_TRANS_PREC_DIFF),
4196 GM_ABS_TRANS_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004197 break;
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004198 case IDENTITY: break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004199 default: assert(0);
4200 }
4201}
4202
Yaowu Xuf883b422016-08-30 14:01:10 -07004203static void write_global_motion(AV1_COMP *cpi, aom_writer *w) {
4204 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004205 int frame;
4206 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barker43479c62016-11-30 10:34:20 +00004207#if !CONFIG_REF_MV
4208 // With ref-mv, clearing unused global motion models here is
4209 // unsafe, and we need to rely on the recode loop to do it
4210 // instead. See av1_find_mv_refs for details.
Debargha Mukherjee705544c2016-11-22 08:55:49 -08004211 if (!cpi->global_motion_used[frame][0]) {
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004212 set_default_gmparams(&cm->global_motion[frame]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004213 }
David Barker43479c62016-11-30 10:34:20 +00004214#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004215 write_global_motion_params(&cm->global_motion[frame],
4216 cm->fc->global_motion_types_prob, w);
Sarah Parkere5299862016-08-16 14:57:37 -07004217 /*
Debargha Mukherjee705544c2016-11-22 08:55:49 -08004218 printf("Frame %d/%d: Enc Ref %d (used %d/%d): %d %d %d %d\n",
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08004219 cm->current_video_frame, cm->show_frame, frame,
Debargha Mukherjee705544c2016-11-22 08:55:49 -08004220 cpi->global_motion_used[frame][0], cpi->global_motion_used[frame][1],
4221 cm->global_motion[frame].wmmat[0], cm->global_motion[frame].wmmat[1],
4222 cm->global_motion[frame].wmmat[2],
4223 cm->global_motion[frame].wmmat[3]);
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004224 */
Yaowu Xuc27fc142016-08-22 16:08:15 -07004225 }
4226}
4227#endif
4228
Yaowu Xuf883b422016-08-30 14:01:10 -07004229static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data) {
4230 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004231#if CONFIG_SUPERTX
4232 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
4233#endif // CONFIG_SUPERTX
4234 FRAME_CONTEXT *const fc = cm->fc;
4235 FRAME_COUNTS *counts = cpi->td.counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07004236 aom_writer *header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004237 int i, j;
Thomas9ac55082016-09-23 18:04:17 +01004238
Thomas Davies80188d12016-10-26 16:08:35 -07004239#if CONFIG_TILE_GROUPS
4240 const int probwt = cm->num_tg;
4241#else
4242 const int probwt = 1;
4243#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004244
4245#if CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004246 int header_size;
4247 header_bc = &cpi->buf_ans;
Alex Converse2a1b3af2016-10-26 13:11:26 -07004248 buf_ans_write_init(header_bc, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004249#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004250 aom_writer real_header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004251 header_bc = &real_header_bc;
Yaowu Xuf883b422016-08-30 14:01:10 -07004252 aom_start_encode(header_bc, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004253#endif
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004254
4255#if CONFIG_LOOP_RESTORATION
4256 encode_restoration(cm, header_bc);
4257#endif // CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -07004258 update_txfm_probs(cm, header_bc, counts);
Yushin Cho77bba8d2016-11-04 16:36:56 -07004259#if !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -07004260 update_coef_probs(cpi, header_bc);
Yushin Cho77bba8d2016-11-04 16:36:56 -07004261#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004262#if CONFIG_VAR_TX
Thomas Davies80188d12016-10-26 16:08:35 -07004263 update_txfm_partition_probs(cm, header_bc, counts, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004264#endif
4265
4266 update_skip_probs(cm, header_bc, counts);
Thomas Daviesf6936102016-09-05 16:51:31 +01004267#if CONFIG_DELTA_Q
4268 update_delta_q_probs(cm, header_bc, counts);
4269#endif
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004270#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004271 update_seg_probs(cpi, header_bc);
4272
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04004273 for (i = 0; i < INTRA_MODES; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004274 prob_diff_update(av1_intra_mode_tree, fc->uv_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004275 counts->uv_mode[i], INTRA_MODES, probwt, header_bc);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04004276 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004277
4278#if CONFIG_EXT_PARTITION_TYPES
Yaowu Xuf883b422016-08-30 14:01:10 -07004279 prob_diff_update(av1_partition_tree, fc->partition_prob[0],
Thomas Davies80188d12016-10-26 16:08:35 -07004280 counts->partition[0], PARTITION_TYPES, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004281 for (i = 1; i < PARTITION_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07004282 prob_diff_update(av1_ext_partition_tree, fc->partition_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004283 counts->partition[i], EXT_PARTITION_TYPES, probwt,
4284 header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004285#else
Nathan E. Eggefba2be62016-05-03 09:48:54 -04004286 for (i = 0; i < PARTITION_CONTEXTS; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004287 prob_diff_update(av1_partition_tree, fc->partition_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004288 counts->partition[i], PARTITION_TYPES, probwt, header_bc);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04004289 }
Thomas Davies6519beb2016-10-19 14:46:07 +01004290#endif // CONFIG_EC_ADAPT, CONFIG_DAALA_EC
Yaowu Xuc27fc142016-08-22 16:08:15 -07004291
4292#if CONFIG_EXT_INTRA
hui sueda3d762016-12-06 16:58:23 -08004293#if CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07004294 for (i = 0; i < INTRA_FILTERS + 1; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07004295 prob_diff_update(av1_intra_filter_tree, fc->intra_filter_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004296 counts->intra_filter[i], INTRA_FILTERS, probwt, header_bc);
hui sueda3d762016-12-06 16:58:23 -08004297#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07004298#endif // CONFIG_EXT_INTRA
Thomas9ac55082016-09-23 18:04:17 +01004299#endif // CONFIG_EC_ADAPT, CONFIG_DAALA_EC
Yaowu Xuc27fc142016-08-22 16:08:15 -07004300 if (frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004301 av1_copy(cm->kf_y_prob, av1_kf_y_mode_prob);
Nathan E. Egge10ba2be2016-11-16 09:44:26 -05004302#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04004303 av1_copy(cm->kf_y_cdf, av1_kf_y_mode_cdf);
4304#endif
Thomas9ac55082016-09-23 18:04:17 +01004305
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004306#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004307 for (i = 0; i < INTRA_MODES; ++i)
Thomas Davies6519beb2016-10-19 14:46:07 +01004308 for (j = 0; j < INTRA_MODES; ++j)
Yaowu Xuf883b422016-08-30 14:01:10 -07004309 prob_diff_update(av1_intra_mode_tree, cm->kf_y_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07004310 counts->kf_y_mode[i][j], INTRA_MODES, probwt,
4311 header_bc);
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004312#endif // CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004313 } else {
4314#if CONFIG_REF_MV
4315 update_inter_mode_probs(cm, header_bc, counts);
4316#else
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004317#if !CONFIG_EC_ADAPT
Nathan E. Egge6ec4d102016-09-08 10:41:20 -04004318 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004319 prob_diff_update(av1_inter_mode_tree, cm->fc->inter_mode_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004320 counts->inter_mode[i], INTER_MODES, probwt, header_bc);
Nathan E. Egge6ec4d102016-09-08 10:41:20 -04004321 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004322#endif
Thomas Davies6519beb2016-10-19 14:46:07 +01004323#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004324#if CONFIG_EXT_INTER
Thomas Davies80188d12016-10-26 16:08:35 -07004325 update_inter_compound_mode_probs(cm, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004326
4327 if (cm->reference_mode != COMPOUND_REFERENCE) {
4328 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
4329 if (is_interintra_allowed_bsize_group(i)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004330 av1_cond_prob_diff_update(header_bc, &fc->interintra_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004331 cm->counts.interintra[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004332 }
4333 }
4334 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
4335 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -07004336 av1_interintra_mode_tree, cm->fc->interintra_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004337 counts->interintra_mode[i], INTERINTRA_MODES, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004338 }
4339 for (i = 0; i < BLOCK_SIZES; i++) {
4340 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i))
Yaowu Xuf883b422016-08-30 14:01:10 -07004341 av1_cond_prob_diff_update(header_bc, &fc->wedge_interintra_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004342 cm->counts.wedge_interintra[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004343 }
4344 }
4345 if (cm->reference_mode != SINGLE_REFERENCE) {
4346 for (i = 0; i < BLOCK_SIZES; i++)
Sarah Parker6fdc8532016-11-16 17:47:13 -08004347 prob_diff_update(av1_compound_type_tree, fc->compound_type_prob[i],
4348 cm->counts.compound_interinter[i], COMPOUND_TYPES,
4349 probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004350 }
4351#endif // CONFIG_EXT_INTER
4352
Yue Chencb60b182016-10-13 15:18:22 -07004353#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07004354 for (i = BLOCK_8X8; i < BLOCK_SIZES; ++i)
Yue Chencb60b182016-10-13 15:18:22 -07004355 prob_diff_update(av1_motion_mode_tree, fc->motion_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004356 counts->motion_mode[i], MOTION_MODES, probwt, header_bc);
Yue Chencb60b182016-10-13 15:18:22 -07004357#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004358#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004359 if (cm->interp_filter == SWITCHABLE)
4360 update_switchable_interp_probs(cm, header_bc, counts);
Thomas9ac55082016-09-23 18:04:17 +01004361#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004362
4363 for (i = 0; i < INTRA_INTER_CONTEXTS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07004364 av1_cond_prob_diff_update(header_bc, &fc->intra_inter_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004365 counts->intra_inter[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004366
4367 if (cpi->allow_comp_inter_inter) {
4368 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
4369 if (use_hybrid_pred)
4370 for (i = 0; i < COMP_INTER_CONTEXTS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07004371 av1_cond_prob_diff_update(header_bc, &fc->comp_inter_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004372 counts->comp_inter[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004373 }
4374
4375 if (cm->reference_mode != COMPOUND_REFERENCE) {
4376 for (i = 0; i < REF_CONTEXTS; i++) {
4377 for (j = 0; j < (SINGLE_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004378 av1_cond_prob_diff_update(header_bc, &fc->single_ref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07004379 counts->single_ref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004380 }
4381 }
4382 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004383 if (cm->reference_mode != SINGLE_REFERENCE) {
4384 for (i = 0; i < REF_CONTEXTS; i++) {
4385#if CONFIG_EXT_REFS
4386 for (j = 0; j < (FWD_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004387 av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07004388 counts->comp_ref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004389 }
4390 for (j = 0; j < (BWD_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004391 av1_cond_prob_diff_update(header_bc, &fc->comp_bwdref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07004392 counts->comp_bwdref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004393 }
4394#else
4395 for (j = 0; j < (COMP_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004396 av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07004397 counts->comp_ref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004398 }
4399#endif // CONFIG_EXT_REFS
4400 }
4401 }
4402
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004403#if !CONFIG_EC_ADAPT
Nathan E. Egge5710c722016-09-08 10:01:16 -04004404 for (i = 0; i < BLOCK_SIZE_GROUPS; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004405 prob_diff_update(av1_intra_mode_tree, cm->fc->y_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004406 counts->y_mode[i], INTRA_MODES, probwt, header_bc);
Nathan E. Egge5710c722016-09-08 10:01:16 -04004407 }
Thomas Davies6519beb2016-10-19 14:46:07 +01004408#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004409
Jingning Hanfd0cf162016-09-30 10:33:50 -07004410 av1_write_nmv_probs(cm, cm->allow_high_precision_mv, header_bc,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004411#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07004412 counts->mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004413#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004414 &counts->mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004415#endif
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004416#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004417 update_ext_tx_probs(cm, header_bc);
Thomas9ac55082016-09-23 18:04:17 +01004418#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004419#if CONFIG_SUPERTX
Thomas Davies80188d12016-10-26 16:08:35 -07004420 if (!xd->lossless[0]) update_supertx_probs(cm, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004421#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07004422#if CONFIG_GLOBAL_MOTION
Sarah Parker689b0ca2016-10-11 12:06:33 -07004423 write_global_motion(cpi, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004424#endif // CONFIG_GLOBAL_MOTION
Sarah Parker689b0ca2016-10-11 12:06:33 -07004425 }
Thomas Davies6519beb2016-10-19 14:46:07 +01004426#if CONFIG_EC_MULTISYMBOL
4427 av1_coef_pareto_cdfs(fc);
David Barker599dfd02016-11-10 13:20:12 +00004428#if CONFIG_REF_MV
4429 for (i = 0; i < NMV_CONTEXTS; ++i) av1_set_mv_cdfs(&fc->nmvc[i]);
4430#else
Thomas Davies6519beb2016-10-19 14:46:07 +01004431 av1_set_mv_cdfs(&fc->nmvc);
David Barker599dfd02016-11-10 13:20:12 +00004432#endif
Nathan E. Egge31296062016-11-16 09:44:26 -05004433#if CONFIG_EC_MULTISYMBOL
Thomas Davies6519beb2016-10-19 14:46:07 +01004434 av1_set_mode_cdfs(cm);
4435#endif
4436#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004437#if CONFIG_ANS
Alex Converse1ecdf2b2016-11-30 15:51:12 -08004438 aom_buf_ans_flush(header_bc);
Alex Converse2a1b3af2016-10-26 13:11:26 -07004439 header_size = buf_ans_write_end(header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004440 assert(header_size <= 0xffff);
4441 return header_size;
4442#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004443 aom_stop_encode(header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004444 assert(header_bc->pos <= 0xffff);
4445 return header_bc->pos;
4446#endif // CONFIG_ANS
4447}
4448
Thomas Davies80188d12016-10-26 16:08:35 -07004449#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004450static int choose_size_bytes(uint32_t size, int spare_msbs) {
4451 // Choose the number of bytes required to represent size, without
4452 // using the 'spare_msbs' number of most significant bits.
4453
4454 // Make sure we will fit in 4 bytes to start with..
4455 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
4456
4457 // Normalise to 32 bits
4458 size <<= spare_msbs;
4459
4460 if (size >> 24 != 0)
4461 return 4;
4462 else if (size >> 16 != 0)
4463 return 3;
4464 else if (size >> 8 != 0)
4465 return 2;
4466 else
4467 return 1;
4468}
4469
4470static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) {
4471 switch (sz) {
4472 case 1: dst[0] = (uint8_t)(val & 0xff); break;
4473 case 2: mem_put_le16(dst, val); break;
4474 case 3: mem_put_le24(dst, val); break;
4475 case 4: mem_put_le32(dst, val); break;
4476 default: assert("Invalid size" && 0); break;
4477 }
4478}
Yaowu Xuf883b422016-08-30 14:01:10 -07004479static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004480 const uint32_t data_size, const uint32_t max_tile_size,
4481 const uint32_t max_tile_col_size,
4482 int *const tile_size_bytes,
4483 int *const tile_col_size_bytes) {
4484// Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
4485#if CONFIG_EXT_TILE
4486 // The top bit in the tile size field indicates tile copy mode, so we
4487 // have 1 less bit to code the tile size
4488 const int tsb = choose_size_bytes(max_tile_size, 1);
4489 const int tcsb = choose_size_bytes(max_tile_col_size, 0);
4490#else
4491 const int tsb = choose_size_bytes(max_tile_size, 0);
4492 const int tcsb = 4; // This is ignored
4493 (void)max_tile_col_size;
4494#endif // CONFIG_EXT_TILE
4495
4496 assert(tsb > 0);
4497 assert(tcsb > 0);
4498
4499 *tile_size_bytes = tsb;
4500 *tile_col_size_bytes = tcsb;
4501
4502 if (tsb == 4 && tcsb == 4) {
4503 return data_size;
4504 } else {
4505 uint32_t wpos = 0;
4506 uint32_t rpos = 0;
4507
4508#if CONFIG_EXT_TILE
4509 int tile_row;
4510 int tile_col;
4511
4512 for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
4513 // All but the last column has a column header
4514 if (tile_col < cm->tile_cols - 1) {
4515 uint32_t tile_col_size = mem_get_le32(dst + rpos);
4516 rpos += 4;
4517
4518 // Adjust the tile column size by the number of bytes removed
4519 // from the tile size fields.
4520 tile_col_size -= (4 - tsb) * cm->tile_rows;
4521
4522 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
4523 wpos += tcsb;
4524 }
4525
4526 for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
4527 // All, including the last row has a header
4528 uint32_t tile_header = mem_get_le32(dst + rpos);
4529 rpos += 4;
4530
4531 // If this is a copy tile, we need to shift the MSB to the
4532 // top bit of the new width, and there is no data to copy.
4533 if (tile_header >> 31 != 0) {
4534 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
4535 mem_put_varsize(dst + wpos, tsb, tile_header);
4536 wpos += tsb;
4537 } else {
4538 mem_put_varsize(dst + wpos, tsb, tile_header);
4539 wpos += tsb;
4540
4541 memmove(dst + wpos, dst + rpos, tile_header);
4542 rpos += tile_header;
4543 wpos += tile_header;
4544 }
4545 }
4546 }
4547#else
4548 const int n_tiles = cm->tile_cols * cm->tile_rows;
4549 int n;
4550
4551 for (n = 0; n < n_tiles; n++) {
4552 int tile_size;
4553
4554 if (n == n_tiles - 1) {
4555 tile_size = data_size - rpos;
4556 } else {
4557 tile_size = mem_get_le32(dst + rpos);
4558 rpos += 4;
4559 mem_put_varsize(dst + wpos, tsb, tile_size);
4560 wpos += tsb;
4561 }
4562
4563 memmove(dst + wpos, dst + rpos, tile_size);
4564
4565 rpos += tile_size;
4566 wpos += tile_size;
4567 }
4568#endif // CONFIG_EXT_TILE
4569
4570 assert(rpos > wpos);
4571 assert(rpos == data_size);
4572
4573 return wpos;
4574 }
4575}
Thomas Davies80188d12016-10-26 16:08:35 -07004576#endif // CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004577
Yaowu Xuf883b422016-08-30 14:01:10 -07004578void av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004579 uint8_t *data = dst;
Thomas Davies80188d12016-10-26 16:08:35 -07004580#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004581 uint32_t compressed_header_size;
4582 uint32_t uncompressed_header_size;
Thomas Davies80188d12016-10-26 16:08:35 -07004583 struct aom_write_bit_buffer saved_wb;
4584#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004585 uint32_t data_size;
Yaowu Xuf883b422016-08-30 14:01:10 -07004586 struct aom_write_bit_buffer wb = { data, 0 };
Thomas Davies80188d12016-10-26 16:08:35 -07004587
Yaowu Xuc27fc142016-08-22 16:08:15 -07004588 unsigned int max_tile_size;
4589 unsigned int max_tile_col_size;
Thomas Davies80188d12016-10-26 16:08:35 -07004590
4591#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004592 int tile_size_bytes;
4593 int tile_col_size_bytes;
Yaowu Xuf883b422016-08-30 14:01:10 -07004594 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004595 const int have_tiles = cm->tile_cols * cm->tile_rows > 1;
4596
4597#if CONFIG_BITSTREAM_DEBUG
4598 bitstream_queue_reset_write();
4599#endif
4600
4601 // Write the uncompressed header
4602 write_uncompressed_header(cpi, &wb);
4603
4604#if CONFIG_EXT_REFS
4605 if (cm->show_existing_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004606 *size = aom_wb_bytes_written(&wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004607 return;
4608 }
4609#endif // CONFIG_EXT_REFS
4610
4611 // We do not know these in advance. Output placeholder bit.
4612 saved_wb = wb;
4613 // Write tile size magnitudes
4614 if (have_tiles) {
4615// Note that the last item in the uncompressed header is the data
4616// describing tile configuration.
4617#if CONFIG_EXT_TILE
4618 // Number of bytes in tile column size - 1
Yaowu Xuf883b422016-08-30 14:01:10 -07004619 aom_wb_write_literal(&wb, 0, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004620#endif // CONFIG_EXT_TILE
4621 // Number of bytes in tile size - 1
Yaowu Xuf883b422016-08-30 14:01:10 -07004622 aom_wb_write_literal(&wb, 0, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004623 }
4624 // Size of compressed header
Yaowu Xuf883b422016-08-30 14:01:10 -07004625 aom_wb_write_literal(&wb, 0, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004626
Yaowu Xuf883b422016-08-30 14:01:10 -07004627 uncompressed_header_size = (uint32_t)aom_wb_bytes_written(&wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004628 data += uncompressed_header_size;
4629
Yaowu Xuf883b422016-08-30 14:01:10 -07004630 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07004631
4632 // Write the compressed header
4633 compressed_header_size = write_compressed_header(cpi, data);
4634 data += compressed_header_size;
4635
4636 // Write the encoded tile data
4637 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
Thomas Davies80188d12016-10-26 16:08:35 -07004638#else
4639 data_size = write_tiles(cpi, &wb, &max_tile_size, &max_tile_col_size);
4640#endif
4641#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004642 if (have_tiles) {
4643 data_size =
4644 remux_tiles(cm, data, data_size, max_tile_size, max_tile_col_size,
4645 &tile_size_bytes, &tile_col_size_bytes);
4646 }
4647
4648 data += data_size;
4649
4650 // Now fill in the gaps in the uncompressed header.
4651 if (have_tiles) {
4652#if CONFIG_EXT_TILE
4653 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
Yaowu Xuf883b422016-08-30 14:01:10 -07004654 aom_wb_write_literal(&saved_wb, tile_col_size_bytes - 1, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004655#endif // CONFIG_EXT_TILE
4656 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
Yaowu Xuf883b422016-08-30 14:01:10 -07004657 aom_wb_write_literal(&saved_wb, tile_size_bytes - 1, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004658 }
4659 // TODO(jbb): Figure out what to do if compressed_header_size > 16 bits.
4660 assert(compressed_header_size <= 0xffff);
Yaowu Xuf883b422016-08-30 14:01:10 -07004661 aom_wb_write_literal(&saved_wb, compressed_header_size, 16);
Thomas Davies80188d12016-10-26 16:08:35 -07004662#else
4663 data += data_size;
4664#endif
Alex Converseb0bbd602016-10-21 14:15:06 -07004665#if CONFIG_ANS && ANS_REVERSE
4666 // Avoid aliasing the superframe index
4667 *data++ = 0;
4668#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004669 *size = data - dst;
4670}