blob: 7cc159ff5e601902bada724156a5281dad0aa3c2 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070017#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070018#include "aom_dsp/aom_dsp_common.h"
19#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070020#include "aom_ports/mem_ops.h"
21#include "aom_ports/system_state.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070022#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070023#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070024#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070025
26#if CONFIG_CLPF
27#include "av1/common/clpf.h"
28#endif
29#if CONFIG_DERING
30#include "av1/common/dering.h"
31#endif // CONFIG_DERING
32#include "av1/common/entropy.h"
33#include "av1/common/entropymode.h"
34#include "av1/common/entropymv.h"
35#include "av1/common/mvref_common.h"
36#include "av1/common/pred_common.h"
37#include "av1/common/reconinter.h"
38#include "av1/common/seg_common.h"
39#include "av1/common/tile_common.h"
40
41#if CONFIG_ANS
Alex Converse1ac1ae72016-09-17 15:11:16 -070042#include "aom_dsp/buf_ans.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070043#endif // CONFIG_ANS
44#include "av1/encoder/bitstream.h"
45#include "av1/encoder/cost.h"
46#include "av1/encoder/encodemv.h"
47#include "av1/encoder/mcomp.h"
48#include "av1/encoder/segmentation.h"
49#include "av1/encoder/subexp.h"
50#include "av1/encoder/tokenize.h"
51
Yaowu Xuf883b422016-08-30 14:01:10 -070052static const struct av1_token intra_mode_encodings[INTRA_MODES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070053 { 0, 1 }, { 6, 3 }, { 28, 5 }, { 30, 5 }, { 58, 6 },
54 { 59, 6 }, { 126, 7 }, { 127, 7 }, { 62, 6 }, { 2, 2 }
55};
56#if CONFIG_EXT_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -070057static const struct av1_token switchable_interp_encodings[SWITCHABLE_FILTERS] =
Yaowu Xuc27fc142016-08-22 16:08:15 -070058 { { 0, 1 }, { 4, 3 }, { 6, 3 }, { 5, 3 }, { 7, 3 } };
59#else
Yaowu Xuf883b422016-08-30 14:01:10 -070060static const struct av1_token switchable_interp_encodings[SWITCHABLE_FILTERS] =
Yaowu Xuc27fc142016-08-22 16:08:15 -070061 { { 0, 1 }, { 2, 2 }, { 3, 2 } };
62#endif // CONFIG_EXT_INTERP
63#if CONFIG_EXT_PARTITION_TYPES
Yaowu Xuf883b422016-08-30 14:01:10 -070064static const struct av1_token ext_partition_encodings[EXT_PARTITION_TYPES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070065 { 0, 1 }, { 4, 3 }, { 12, 4 }, { 7, 3 },
66 { 10, 4 }, { 11, 4 }, { 26, 5 }, { 27, 5 }
67};
68#endif
Yaowu Xuf883b422016-08-30 14:01:10 -070069static const struct av1_token partition_encodings[PARTITION_TYPES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070070 { 0, 1 }, { 2, 2 }, { 6, 3 }, { 7, 3 }
71};
72#if !CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -070073static const struct av1_token inter_mode_encodings[INTER_MODES] =
Yaowu Xuc27fc142016-08-22 16:08:15 -070074#if CONFIG_EXT_INTER
75 { { 2, 2 }, { 6, 3 }, { 0, 1 }, { 14, 4 }, { 15, 4 } };
76#else
77 { { 2, 2 }, { 6, 3 }, { 0, 1 }, { 7, 3 } };
78#endif // CONFIG_EXT_INTER
79#endif
80#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -070081static const struct av1_token
Yaowu Xuc27fc142016-08-22 16:08:15 -070082 inter_compound_mode_encodings[INTER_COMPOUND_MODES] = {
83 { 2, 2 }, { 50, 6 }, { 51, 6 }, { 24, 5 }, { 52, 6 },
84 { 53, 6 }, { 54, 6 }, { 55, 6 }, { 0, 1 }, { 7, 3 }
85 };
86#endif // CONFIG_EXT_INTER
Urvang Joshib100db72016-10-12 16:28:56 -070087#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -070088static const struct av1_token palette_size_encodings[] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070089 { 0, 1 }, { 2, 2 }, { 6, 3 }, { 14, 4 }, { 30, 5 }, { 62, 6 }, { 63, 6 },
90};
Yaowu Xuf883b422016-08-30 14:01:10 -070091static const struct av1_token
Yaowu Xuc27fc142016-08-22 16:08:15 -070092 palette_color_encodings[PALETTE_MAX_SIZE - 1][PALETTE_MAX_SIZE] = {
93 { { 0, 1 }, { 1, 1 } }, // 2 colors
94 { { 0, 1 }, { 2, 2 }, { 3, 2 } }, // 3 colors
95 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 7, 3 } }, // 4 colors
96 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 14, 4 }, { 15, 4 } }, // 5 colors
97 { { 0, 1 },
98 { 2, 2 },
99 { 6, 3 },
100 { 14, 4 },
101 { 30, 5 },
102 { 31, 5 } }, // 6 colors
103 { { 0, 1 },
104 { 2, 2 },
105 { 6, 3 },
106 { 14, 4 },
107 { 30, 5 },
108 { 62, 6 },
109 { 63, 6 } }, // 7 colors
110 { { 0, 1 },
111 { 2, 2 },
112 { 6, 3 },
113 { 14, 4 },
114 { 30, 5 },
115 { 62, 6 },
116 { 126, 7 },
117 { 127, 7 } }, // 8 colors
118 };
Urvang Joshib100db72016-10-12 16:28:56 -0700119#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700120
Yaowu Xuf883b422016-08-30 14:01:10 -0700121static const struct av1_token tx_size_encodings[TX_SIZES - 1][TX_SIZES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700122 { { 0, 1 }, { 1, 1 } }, // Max tx_size is 8X8
123 { { 0, 1 }, { 2, 2 }, { 3, 2 } }, // Max tx_size is 16X16
124 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 7, 3 } }, // Max tx_size is 32X32
125};
126
Urvang Joshib100db72016-10-12 16:28:56 -0700127#if CONFIG_EXT_INTRA || CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -0700128static INLINE void write_uniform(aom_writer *w, int n, int v) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700129 int l = get_unsigned_bits(n);
130 int m = (1 << l) - n;
131 if (l == 0) return;
132 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700133 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700134 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700135 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
136 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700137 }
138}
Urvang Joshib100db72016-10-12 16:28:56 -0700139#endif // CONFIG_EXT_INTRA || CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700140
141#if CONFIG_EXT_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700142static struct av1_token ext_tx_inter_encodings[EXT_TX_SETS_INTER][TX_TYPES];
143static struct av1_token ext_tx_intra_encodings[EXT_TX_SETS_INTRA][TX_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700144#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700145static struct av1_token ext_tx_encodings[TX_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700146#endif // CONFIG_EXT_TX
147#if CONFIG_GLOBAL_MOTION
Yaowu Xuf883b422016-08-30 14:01:10 -0700148static struct av1_token global_motion_types_encodings[GLOBAL_MOTION_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700149#endif // CONFIG_GLOBAL_MOTION
150#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -0700151static struct av1_token intra_filter_encodings[INTRA_FILTERS];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700152#endif // CONFIG_EXT_INTRA
153#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700154static struct av1_token interintra_mode_encodings[INTERINTRA_MODES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700155#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -0700156#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
157static struct av1_token motion_mode_encodings[MOTION_MODES];
158#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700159#if CONFIG_LOOP_RESTORATION
160static struct av1_token switchable_restore_encodings[RESTORE_SWITCHABLE_TYPES];
161#endif // CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -0700162
Yaowu Xuf883b422016-08-30 14:01:10 -0700163void av1_encode_token_init(void) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700164#if CONFIG_EXT_TX
165 int s;
166 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700167 av1_tokens_from_tree(ext_tx_inter_encodings[s], av1_ext_tx_inter_tree[s]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700168 }
169 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700170 av1_tokens_from_tree(ext_tx_intra_encodings[s], av1_ext_tx_intra_tree[s]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700171 }
172#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700173 av1_tokens_from_tree(ext_tx_encodings, av1_ext_tx_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700174#endif // CONFIG_EXT_TX
175#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -0700176 av1_tokens_from_tree(intra_filter_encodings, av1_intra_filter_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700177#endif // CONFIG_EXT_INTRA
178#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700179 av1_tokens_from_tree(interintra_mode_encodings, av1_interintra_mode_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700180#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -0700181#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
182 av1_tokens_from_tree(motion_mode_encodings, av1_motion_mode_tree);
183#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -0700184#if CONFIG_GLOBAL_MOTION
Yaowu Xuf883b422016-08-30 14:01:10 -0700185 av1_tokens_from_tree(global_motion_types_encodings,
186 av1_global_motion_types_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700187#endif // CONFIG_GLOBAL_MOTION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700188#if CONFIG_LOOP_RESTORATION
189 av1_tokens_from_tree(switchable_restore_encodings,
190 av1_switchable_restore_tree);
191#endif // CONFIG_LOOP_RESTORATION
Nathan E. Egge4947c292016-04-26 11:37:06 -0400192
193#if CONFIG_DAALA_EC
194 /* This hack is necessary when CONFIG_EXT_INTERP is enabled because the five
195 SWITCHABLE_FILTERS are not consecutive, e.g., 0, 1, 2, 3, 4, when doing
196 an in-order traversal of the av1_switchable_interp_tree structure. */
197 av1_indices_from_tree(av1_switchable_interp_ind, av1_switchable_interp_inv,
198 SWITCHABLE_FILTERS, av1_switchable_interp_tree);
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400199 /* This hack is necessary because the four TX_TYPES are not consecutive,
200 e.g., 0, 1, 2, 3, when doing an in-order traversal of the av1_ext_tx_tree
201 structure. */
202 av1_indices_from_tree(av1_ext_tx_ind, av1_ext_tx_inv, TX_TYPES,
203 av1_ext_tx_tree);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400204#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700205}
206
Yaowu Xuf883b422016-08-30 14:01:10 -0700207static void write_intra_mode(aom_writer *w, PREDICTION_MODE mode,
208 const aom_prob *probs) {
209 av1_write_token(w, av1_intra_mode_tree, probs, &intra_mode_encodings[mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700210}
211
212#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700213static void write_interintra_mode(aom_writer *w, INTERINTRA_MODE mode,
214 const aom_prob *probs) {
215 av1_write_token(w, av1_interintra_mode_tree, probs,
216 &interintra_mode_encodings[mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700217}
218#endif // CONFIG_EXT_INTER
219
Yaowu Xuf883b422016-08-30 14:01:10 -0700220static void write_inter_mode(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700221 PREDICTION_MODE mode,
222#if CONFIG_REF_MV && CONFIG_EXT_INTER
223 int is_compound,
224#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
225 const int16_t mode_ctx) {
226#if CONFIG_REF_MV
227 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700228 const aom_prob newmv_prob = cm->fc->newmv_prob[newmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700229#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700230 aom_write(w, mode != NEWMV && mode != NEWFROMNEARMV, newmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700231
232 if (!is_compound && (mode == NEWMV || mode == NEWFROMNEARMV))
Yaowu Xuf883b422016-08-30 14:01:10 -0700233 aom_write(w, mode == NEWFROMNEARMV, cm->fc->new2mv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700234
235 if (mode != NEWMV && mode != NEWFROMNEARMV) {
236#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700237 aom_write(w, mode != NEWMV, newmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700238
239 if (mode != NEWMV) {
240#endif // CONFIG_EXT_INTER
241 const int16_t zeromv_ctx = (mode_ctx >> ZEROMV_OFFSET) & ZEROMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700242 const aom_prob zeromv_prob = cm->fc->zeromv_prob[zeromv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700243
244 if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) {
245 assert(mode == ZEROMV);
246 return;
247 }
248
Yaowu Xuf883b422016-08-30 14:01:10 -0700249 aom_write(w, mode != ZEROMV, zeromv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700250
251 if (mode != ZEROMV) {
252 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700253 aom_prob refmv_prob;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700254
255 if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6;
256 if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7;
257 if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8;
258
259 refmv_prob = cm->fc->refmv_prob[refmv_ctx];
Yaowu Xuf883b422016-08-30 14:01:10 -0700260 aom_write(w, mode != NEARESTMV, refmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700261 }
262 }
263#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700264 const aom_prob *const inter_probs = cm->fc->inter_mode_probs[mode_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700265 assert(is_inter_mode(mode));
Yaowu Xuf883b422016-08-30 14:01:10 -0700266 av1_write_token(w, av1_inter_mode_tree, inter_probs,
267 &inter_mode_encodings[INTER_OFFSET(mode)]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700268#endif
269}
270
271#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700272static void write_drl_idx(const AV1_COMMON *cm, const MB_MODE_INFO *mbmi,
273 const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) {
274 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700275
276 assert(mbmi->ref_mv_idx < 3);
277
278 if (mbmi->mode == NEWMV) {
279 int idx;
280 for (idx = 0; idx < 2; ++idx) {
281 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
282 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700283 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
284 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700285
Yaowu Xuf883b422016-08-30 14:01:10 -0700286 aom_write(w, mbmi->ref_mv_idx != idx, drl_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700287 if (mbmi->ref_mv_idx == idx) return;
288 }
289 }
290 return;
291 }
292
293 if (mbmi->mode == NEARMV) {
294 int idx;
295 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
296 for (idx = 1; idx < 3; ++idx) {
297 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
298 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700299 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
300 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700301
Yaowu Xuf883b422016-08-30 14:01:10 -0700302 aom_write(w, mbmi->ref_mv_idx != (idx - 1), drl_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700303 if (mbmi->ref_mv_idx == (idx - 1)) return;
304 }
305 }
306 return;
307 }
308}
309#endif
310
311#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700312static void write_inter_compound_mode(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700313 PREDICTION_MODE mode,
314 const int16_t mode_ctx) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700315 const aom_prob *const inter_compound_probs =
Yaowu Xuc27fc142016-08-22 16:08:15 -0700316 cm->fc->inter_compound_mode_probs[mode_ctx];
317
318 assert(is_inter_compound_mode(mode));
Yaowu Xuf883b422016-08-30 14:01:10 -0700319 av1_write_token(w, av1_inter_compound_mode_tree, inter_compound_probs,
320 &inter_compound_mode_encodings[INTER_COMPOUND_OFFSET(mode)]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700321}
322#endif // CONFIG_EXT_INTER
323
Yaowu Xuf883b422016-08-30 14:01:10 -0700324static void encode_unsigned_max(struct aom_write_bit_buffer *wb, int data,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700325 int max) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700326 aom_wb_write_literal(wb, data, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700327}
328
Yaowu Xuf883b422016-08-30 14:01:10 -0700329static void prob_diff_update(const aom_tree_index *tree,
330 aom_prob probs[/*n - 1*/],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700331 const unsigned int counts[/*n - 1*/], int n,
Yaowu Xuf883b422016-08-30 14:01:10 -0700332 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700333 int i;
334 unsigned int branch_ct[32][2];
335
336 // Assuming max number of probabilities <= 32
337 assert(n <= 32);
338
Yaowu Xuf883b422016-08-30 14:01:10 -0700339 av1_tree_probs_from_distribution(tree, branch_ct, counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700340 for (i = 0; i < n - 1; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700341 av1_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700342}
343
Yaowu Xuf883b422016-08-30 14:01:10 -0700344static int prob_diff_update_savings(const aom_tree_index *tree,
345 aom_prob probs[/*n - 1*/],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700346 const unsigned int counts[/*n - 1*/],
347 int n) {
348 int i;
349 unsigned int branch_ct[32][2];
350 int savings = 0;
351
352 // Assuming max number of probabilities <= 32
353 assert(n <= 32);
Yaowu Xuf883b422016-08-30 14:01:10 -0700354 av1_tree_probs_from_distribution(tree, branch_ct, counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700355 for (i = 0; i < n - 1; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700356 savings += av1_cond_prob_diff_update_savings(&probs[i], branch_ct[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700357 }
358 return savings;
359}
360
361#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700362static void write_tx_size_vartx(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700363 const MB_MODE_INFO *mbmi, TX_SIZE tx_size,
Yaowu Xuf883b422016-08-30 14:01:10 -0700364 int blk_row, int blk_col, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700365 const int tx_row = blk_row >> 1;
366 const int tx_col = blk_col >> 1;
367 int max_blocks_high = num_4x4_blocks_high_lookup[mbmi->sb_type];
368 int max_blocks_wide = num_4x4_blocks_wide_lookup[mbmi->sb_type];
369 int ctx = txfm_partition_context(xd->above_txfm_context + tx_col,
370 xd->left_txfm_context + tx_row, tx_size);
371
372 if (xd->mb_to_bottom_edge < 0) max_blocks_high += xd->mb_to_bottom_edge >> 5;
373 if (xd->mb_to_right_edge < 0) max_blocks_wide += xd->mb_to_right_edge >> 5;
374
375 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
376
377 if (tx_size == mbmi->inter_tx_size[tx_row][tx_col]) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700378 aom_write(w, 0, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700379 txfm_partition_update(xd->above_txfm_context + tx_col,
380 xd->left_txfm_context + tx_row, tx_size);
381 } else {
382 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
383 int bsl = b_width_log2_lookup[bsize];
384 int i;
Yaowu Xuf883b422016-08-30 14:01:10 -0700385 aom_write(w, 1, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700386
387 if (tx_size == TX_8X8) {
388 txfm_partition_update(xd->above_txfm_context + tx_col,
389 xd->left_txfm_context + tx_row, TX_4X4);
390 return;
391 }
392
393 assert(bsl > 0);
394 --bsl;
395 for (i = 0; i < 4; ++i) {
396 int offsetr = blk_row + ((i >> 1) << bsl);
397 int offsetc = blk_col + ((i & 0x01) << bsl);
398 write_tx_size_vartx(cm, xd, mbmi, tx_size - 1, offsetr, offsetc, w);
399 }
400 }
401}
402
Yaowu Xuf883b422016-08-30 14:01:10 -0700403static void update_txfm_partition_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700404 FRAME_COUNTS *counts) {
405 int k;
406 for (k = 0; k < TXFM_PARTITION_CONTEXTS; ++k)
Yaowu Xuf883b422016-08-30 14:01:10 -0700407 av1_cond_prob_diff_update(w, &cm->fc->txfm_partition_prob[k],
408 counts->txfm_partition[k]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700409}
410#endif
411
Yaowu Xuf883b422016-08-30 14:01:10 -0700412static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd,
413 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700414 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
415 const BLOCK_SIZE bsize = mbmi->sb_type;
416 // For sub8x8 blocks the tx_size symbol does not need to be sent
417 if (bsize >= BLOCK_8X8) {
418 const TX_SIZE tx_size = mbmi->tx_size;
419 const int is_inter = is_inter_block(mbmi);
420 const int tx_size_ctx = get_tx_size_context(xd);
421 const int tx_size_cat = is_inter ? inter_tx_size_cat_lookup[bsize]
422 : intra_tx_size_cat_lookup[bsize];
423 const TX_SIZE coded_tx_size = txsize_sqr_up_map[tx_size];
424
425#if CONFIG_EXT_TX && CONFIG_RECT_TX
Yue Chen49587a72016-09-28 17:09:47 -0700426 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700427 assert(
428 IMPLIES(is_rect_tx(tx_size), tx_size == max_txsize_rect_lookup[bsize]));
429#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
430
Yaowu Xuf883b422016-08-30 14:01:10 -0700431 av1_write_token(w, av1_tx_size_tree[tx_size_cat],
432 cm->fc->tx_size_probs[tx_size_cat][tx_size_ctx],
433 &tx_size_encodings[tx_size_cat][coded_tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700434 }
435}
436
437#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700438static void update_inter_mode_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700439 FRAME_COUNTS *counts) {
440 int i;
441 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700442 av1_cond_prob_diff_update(w, &cm->fc->newmv_prob[i], counts->newmv_mode[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700443 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700444 av1_cond_prob_diff_update(w, &cm->fc->zeromv_prob[i],
445 counts->zeromv_mode[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700446 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700447 av1_cond_prob_diff_update(w, &cm->fc->refmv_prob[i], counts->refmv_mode[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700448 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700449 av1_cond_prob_diff_update(w, &cm->fc->drl_prob[i], counts->drl_mode[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700450#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700451 av1_cond_prob_diff_update(w, &cm->fc->new2mv_prob, counts->new2mv_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700452#endif // CONFIG_EXT_INTER
453}
454#endif
455
456#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700457static void update_inter_compound_mode_probs(AV1_COMMON *cm, aom_writer *w) {
458 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
459 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700460 int i;
461 int savings = 0;
462 int do_update = 0;
463 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
464 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700465 av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700466 cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES);
467 }
468 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700469 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700470 if (do_update) {
471 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
472 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700473 av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700474 cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES, w);
475 }
476 }
477}
478#endif // CONFIG_EXT_INTER
479
Yaowu Xuf883b422016-08-30 14:01:10 -0700480static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
481 int segment_id, const MODE_INFO *mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700482 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
483 return 1;
484 } else {
485 const int skip = mi->mbmi.skip;
Yaowu Xuf883b422016-08-30 14:01:10 -0700486 aom_write(w, skip, av1_get_skip_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700487 return skip;
488 }
489}
490
Yaowu Xuf883b422016-08-30 14:01:10 -0700491static void update_skip_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700492 FRAME_COUNTS *counts) {
493 int k;
494
495 for (k = 0; k < SKIP_CONTEXTS; ++k)
Yaowu Xuf883b422016-08-30 14:01:10 -0700496 av1_cond_prob_diff_update(w, &cm->fc->skip_probs[k], counts->skip[k]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700497}
498
Yaowu Xuf883b422016-08-30 14:01:10 -0700499static void update_switchable_interp_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700500 FRAME_COUNTS *counts) {
501 int j;
Nathan E. Egge4947c292016-04-26 11:37:06 -0400502 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700503 prob_diff_update(av1_switchable_interp_tree,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700504 cm->fc->switchable_interp_prob[j],
505 counts->switchable_interp[j], SWITCHABLE_FILTERS, w);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400506#if CONFIG_DAALA_EC
507 av1_tree_to_cdf(av1_switchable_interp_tree,
508 cm->fc->switchable_interp_prob[j],
509 cm->fc->switchable_interp_cdf[j]);
510#endif
511 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700512}
513
514#if CONFIG_EXT_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700515static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) {
516 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
517 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700518 int i, j;
519 int s;
520 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
521 int savings = 0;
522 int do_update = 0;
523 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
524 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
525 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700526 av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700527 cm->counts.inter_ext_tx[s][i], num_ext_tx_set_inter[s]);
528 }
529 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700530 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700531 if (do_update) {
532 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
533 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
534 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700535 av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700536 cm->counts.inter_ext_tx[s][i], num_ext_tx_set_inter[s], w);
537 }
538 }
539 }
540
541 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
542 int savings = 0;
543 int do_update = 0;
544 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
545 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
546 for (j = 0; j < INTRA_MODES; ++j)
547 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700548 av1_ext_tx_intra_tree[s], cm->fc->intra_ext_tx_prob[s][i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700549 cm->counts.intra_ext_tx[s][i][j], num_ext_tx_set_intra[s]);
550 }
551 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700552 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700553 if (do_update) {
554 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
555 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
556 for (j = 0; j < INTRA_MODES; ++j)
557 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700558 av1_ext_tx_intra_tree[s], cm->fc->intra_ext_tx_prob[s][i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700559 cm->counts.intra_ext_tx[s][i][j], num_ext_tx_set_intra[s], w);
560 }
561 }
562 }
563}
564
565#else
566
Yaowu Xuf883b422016-08-30 14:01:10 -0700567static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) {
568 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
569 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700570 int i, j;
571
572 int savings = 0;
573 int do_update = 0;
574 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
575 for (j = 0; j < TX_TYPES; ++j)
576 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700577 av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700578 cm->counts.intra_ext_tx[i][j], TX_TYPES);
579 }
580 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700581 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700582 if (do_update) {
583 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400584 for (j = 0; j < TX_TYPES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700585 prob_diff_update(av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700586 cm->counts.intra_ext_tx[i][j], TX_TYPES, w);
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400587#if CONFIG_DAALA_EC
588 av1_tree_to_cdf(av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
589 cm->fc->intra_ext_tx_cdf[i][j]);
590#endif
591 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700592 }
593 }
594 savings = 0;
595 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
596 savings +=
Yaowu Xuf883b422016-08-30 14:01:10 -0700597 prob_diff_update_savings(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700598 cm->counts.inter_ext_tx[i], TX_TYPES);
599 }
600 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700601 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700602 if (do_update) {
603 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700604 prob_diff_update(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700605 cm->counts.inter_ext_tx[i], TX_TYPES, w);
606 }
607 }
608}
609#endif // CONFIG_EXT_TX
610
Urvang Joshib100db72016-10-12 16:28:56 -0700611#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -0700612static void pack_palette_tokens(aom_writer *w, const TOKENEXTRA **tp, int n,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700613 int num) {
614 int i;
615 const TOKENEXTRA *p = *tp;
616
617 for (i = 0; i < num; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700618 av1_write_token(w, av1_palette_color_tree[n - 2], p->context_tree,
619 &palette_color_encodings[n - 2][p->token]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700620 ++p;
621 }
622
623 *tp = p;
624}
Urvang Joshib100db72016-10-12 16:28:56 -0700625#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700626
627#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -0700628static void update_supertx_probs(AV1_COMMON *cm, aom_writer *w) {
629 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
630 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700631 int i, j;
632 int savings = 0;
633 int do_update = 0;
634 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
635 for (j = 1; j < TX_SIZES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700636 savings += av1_cond_prob_diff_update_savings(&cm->fc->supertx_prob[i][j],
637 cm->counts.supertx[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700638 }
639 }
640 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700641 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700642 if (do_update) {
643 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
644 for (j = 1; j < TX_SIZES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700645 av1_cond_prob_diff_update(w, &cm->fc->supertx_prob[i][j],
646 cm->counts.supertx[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700647 }
648 }
649 }
650}
651#endif // CONFIG_SUPERTX
652
Yaowu Xuf883b422016-08-30 14:01:10 -0700653static void pack_mb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700654 const TOKENEXTRA *const stop,
Yaowu Xuf883b422016-08-30 14:01:10 -0700655 aom_bit_depth_t bit_depth, const TX_SIZE tx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700656 const TOKENEXTRA *p = *tp;
657#if CONFIG_VAR_TX
658 int count = 0;
659 const int seg_eob = get_tx2d_size(tx);
660#endif
661
662 while (p < stop && p->token != EOSB_TOKEN) {
663 const int t = p->token;
Alex Conversedc62b092016-10-11 16:50:56 -0700664#if !CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -0700665 const struct av1_token *const a = &av1_coef_encodings[t];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700666 int v = a->value;
667 int n = a->len;
Alex Conversedc62b092016-10-11 16:50:56 -0700668#endif // !CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -0700669#if CONFIG_AOM_HIGHBITDEPTH
670 const av1_extra_bit *b;
671 if (bit_depth == AOM_BITS_12)
672 b = &av1_extra_bits_high12[t];
673 else if (bit_depth == AOM_BITS_10)
674 b = &av1_extra_bits_high10[t];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700675 else
Yaowu Xuf883b422016-08-30 14:01:10 -0700676 b = &av1_extra_bits[t];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700677#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700678 const av1_extra_bit *const b = &av1_extra_bits[t];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700679 (void)bit_depth;
Yaowu Xuf883b422016-08-30 14:01:10 -0700680#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700681
Alex Conversedc62b092016-10-11 16:50:56 -0700682#if CONFIG_ANS
683 /* skip one or two nodes */
684 if (!p->skip_eob_node) aom_write(w, t != EOB_TOKEN, p->context_tree[0]);
685
686 if (t != EOB_TOKEN) {
Alex Converseea7e9902016-10-12 12:53:40 -0700687 aom_write(w, t != ZERO_TOKEN, p->context_tree[1]);
688
689 if (t != ZERO_TOKEN) {
Alex Conversea1ac9722016-10-12 15:59:58 -0700690 aom_write_symbol(w, t - ONE_TOKEN, *p->token_cdf,
691 CATEGORY6_TOKEN - ONE_TOKEN + 1);
Alex Converseea7e9902016-10-12 12:53:40 -0700692 }
Alex Conversedc62b092016-10-11 16:50:56 -0700693 }
694#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700695 /* skip one or two nodes */
696 if (p->skip_eob_node)
697 n -= p->skip_eob_node;
698 else
Yaowu Xuf883b422016-08-30 14:01:10 -0700699 aom_write(w, t != EOB_TOKEN, p->context_tree[0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700700
701 if (t != EOB_TOKEN) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700702 aom_write(w, t != ZERO_TOKEN, p->context_tree[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700703
704 if (t != ZERO_TOKEN) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700705 aom_write(w, t != ONE_TOKEN, p->context_tree[2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700706
707 if (t != ONE_TOKEN) {
708 int len = UNCONSTRAINED_NODES - p->skip_eob_node;
Nathan E. Eggeeeedc632016-06-19 12:02:33 -0400709 aom_write_tree(w, av1_coef_con_tree,
Yaowu Xuf883b422016-08-30 14:01:10 -0700710 av1_pareto8_full[p->context_tree[PIVOT_NODE] - 1], v,
711 n - len, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700712 }
713 }
714 }
Alex Conversedc62b092016-10-11 16:50:56 -0700715#endif // CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700716
717 if (b->base_val) {
718 const int e = p->extra, l = b->len;
719 int skip_bits = (b->base_val == CAT6_MIN_VAL)
720 ? TX_SIZES - 1 - txsize_sqr_up_map[tx]
721 : 0;
722
723 if (l) {
724 const unsigned char *pb = b->prob;
725 int v = e >> 1;
726 int n = l; /* number of bits in v, assumed nonzero */
727 int i = 0;
728
729 do {
730 const int bb = (v >> --n) & 1;
731 if (skip_bits) {
732 skip_bits--;
733 assert(!bb);
734 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700735 aom_write(w, bb, pb[i >> 1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700736 }
737 i = b->tree[i + bb];
738 } while (n);
739 }
740
Yaowu Xuf883b422016-08-30 14:01:10 -0700741 aom_write_bit(w, e & 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700742 }
743 ++p;
744
745#if CONFIG_VAR_TX
746 ++count;
747 if (t == EOB_TOKEN || count == seg_eob) break;
748#endif
749 }
750
751 *tp = p;
752}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700753
754#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700755static void pack_txb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700756 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
757 MB_MODE_INFO *mbmi, int plane,
Yaowu Xuf883b422016-08-30 14:01:10 -0700758 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700759 int block, int blk_row, int blk_col,
760 TX_SIZE tx_size) {
761 const struct macroblockd_plane *const pd = &xd->plane[plane];
762 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
763 const int tx_row = blk_row >> (1 - pd->subsampling_y);
764 const int tx_col = blk_col >> (1 - pd->subsampling_x);
765 TX_SIZE plane_tx_size;
766 int max_blocks_high = num_4x4_blocks_high_lookup[plane_bsize];
767 int max_blocks_wide = num_4x4_blocks_wide_lookup[plane_bsize];
768
769 if (xd->mb_to_bottom_edge < 0)
770 max_blocks_high += xd->mb_to_bottom_edge >> (5 + pd->subsampling_y);
771 if (xd->mb_to_right_edge < 0)
772 max_blocks_wide += xd->mb_to_right_edge >> (5 + pd->subsampling_x);
773
774 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
775
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700776 plane_tx_size =
777 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
778 : mbmi->inter_tx_size[tx_row][tx_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700779
780 if (tx_size == plane_tx_size) {
781 pack_mb_tokens(w, tp, tok_end, bit_depth, tx_size);
782 } else {
783 int bsl = b_width_log2_lookup[bsize];
784 int i;
785
786 assert(bsl > 0);
787 --bsl;
788
789 for (i = 0; i < 4; ++i) {
790 const int offsetr = blk_row + ((i >> 1) << bsl);
791 const int offsetc = blk_col + ((i & 0x01) << bsl);
792 int step = num_4x4_blocks_txsize_lookup[tx_size - 1];
793
794 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
795
796 pack_txb_tokens(w, tp, tok_end, xd, mbmi, plane, plane_bsize, bit_depth,
797 block + i * step, offsetr, offsetc, tx_size - 1);
798 }
799 }
800}
801#endif
802
Yaowu Xuf883b422016-08-30 14:01:10 -0700803static void write_segment_id(aom_writer *w, const struct segmentation *seg,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700804 const struct segmentation_probs *segp,
805 int segment_id) {
806 if (seg->enabled && seg->update_map)
Nathan E. Eggeeeedc632016-06-19 12:02:33 -0400807 aom_write_tree(w, av1_segment_tree, segp->tree_probs, segment_id, 3, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700808}
809
810// This function encodes the reference frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700811static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd,
812 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700813 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
814 const int is_compound = has_second_ref(mbmi);
815 const int segment_id = mbmi->segment_id;
816
817 // If segment level coding of this signal is disabled...
818 // or the segment allows multiple reference frame options
819 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
820 assert(!is_compound);
821 assert(mbmi->ref_frame[0] ==
822 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
823 } else {
824 // does the feature use compound prediction or not
825 // (if not specified at the frame/segment level)
826 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700827 aom_write(w, is_compound, av1_get_reference_mode_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700828 } else {
829 assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE));
830 }
831
832 if (is_compound) {
833#if CONFIG_EXT_REFS
834 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
835 mbmi->ref_frame[0] == LAST3_FRAME);
836 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
837#else // CONFIG_EXT_REFS
838 const int bit = mbmi->ref_frame[0] == GOLDEN_FRAME;
839#endif // CONFIG_EXT_REFS
840
Yaowu Xuf883b422016-08-30 14:01:10 -0700841 aom_write(w, bit, av1_get_pred_prob_comp_ref_p(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700842
843#if CONFIG_EXT_REFS
844 if (!bit) {
845 const int bit1 = mbmi->ref_frame[0] == LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700846 aom_write(w, bit1, av1_get_pred_prob_comp_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700847 } else {
848 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700849 aom_write(w, bit2, av1_get_pred_prob_comp_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700850 }
Yaowu Xuf883b422016-08-30 14:01:10 -0700851 aom_write(w, bit_bwd, av1_get_pred_prob_comp_bwdref_p(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700852#endif // CONFIG_EXT_REFS
853 } else {
854#if CONFIG_EXT_REFS
855 const int bit0 = (mbmi->ref_frame[0] == ALTREF_FRAME ||
856 mbmi->ref_frame[0] == BWDREF_FRAME);
Yaowu Xuf883b422016-08-30 14:01:10 -0700857 aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700858
859 if (bit0) {
860 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700861 aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700862 } else {
863 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
864 mbmi->ref_frame[0] == GOLDEN_FRAME);
Yaowu Xuf883b422016-08-30 14:01:10 -0700865 aom_write(w, bit2, av1_get_pred_prob_single_ref_p3(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700866
867 if (!bit2) {
868 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700869 aom_write(w, bit3, av1_get_pred_prob_single_ref_p4(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700870 } else {
871 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700872 aom_write(w, bit4, av1_get_pred_prob_single_ref_p5(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700873 }
874 }
875#else // CONFIG_EXT_REFS
876 const int bit0 = mbmi->ref_frame[0] != LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700877 aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700878
879 if (bit0) {
880 const int bit1 = mbmi->ref_frame[0] != GOLDEN_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700881 aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700882 }
883#endif // CONFIG_EXT_REFS
884 }
885 }
886}
887
888#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -0700889static void write_ext_intra_mode_info(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700890 const MB_MODE_INFO *const mbmi,
Yaowu Xuf883b422016-08-30 14:01:10 -0700891 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700892#if !ALLOW_FILTER_INTRA_MODES
893 return;
894#endif
Urvang Joshib100db72016-10-12 16:28:56 -0700895 if (mbmi->mode == DC_PRED
896#if CONFIG_PALETTE
897 && mbmi->palette_mode_info.palette_size[0] == 0
898#endif // CONFIG_PALETTE
899 ) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700900 aom_write(w, mbmi->ext_intra_mode_info.use_ext_intra_mode[0],
901 cm->fc->ext_intra_probs[0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700902 if (mbmi->ext_intra_mode_info.use_ext_intra_mode[0]) {
903 EXT_INTRA_MODE mode = mbmi->ext_intra_mode_info.ext_intra_mode[0];
904 write_uniform(w, FILTER_INTRA_MODES, mode);
905 }
906 }
907
Urvang Joshib100db72016-10-12 16:28:56 -0700908 if (mbmi->uv_mode == DC_PRED
909#if CONFIG_PALETTE
910 && mbmi->palette_mode_info.palette_size[1] == 0
911#endif // CONFIG_PALETTE
912 ) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700913 aom_write(w, mbmi->ext_intra_mode_info.use_ext_intra_mode[1],
914 cm->fc->ext_intra_probs[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700915 if (mbmi->ext_intra_mode_info.use_ext_intra_mode[1]) {
916 EXT_INTRA_MODE mode = mbmi->ext_intra_mode_info.ext_intra_mode[1];
917 write_uniform(w, FILTER_INTRA_MODES, mode);
918 }
919 }
920}
921
Yaowu Xuf883b422016-08-30 14:01:10 -0700922static void write_intra_angle_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
923 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700924 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
925 const BLOCK_SIZE bsize = mbmi->sb_type;
Yaowu Xuf883b422016-08-30 14:01:10 -0700926 const int intra_filter_ctx = av1_get_pred_context_intra_interp(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700927 int p_angle;
928
929 if (bsize < BLOCK_8X8) return;
930
931 if (mbmi->mode != DC_PRED && mbmi->mode != TM_PRED) {
932 write_uniform(w, 2 * MAX_ANGLE_DELTAS + 1,
933 MAX_ANGLE_DELTAS + mbmi->angle_delta[0]);
934 p_angle = mode_to_angle_map[mbmi->mode] + mbmi->angle_delta[0] * ANGLE_STEP;
Yaowu Xuf883b422016-08-30 14:01:10 -0700935 if (av1_is_intra_filter_switchable(p_angle)) {
936 av1_write_token(w, av1_intra_filter_tree,
937 cm->fc->intra_filter_probs[intra_filter_ctx],
938 &intra_filter_encodings[mbmi->intra_filter]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700939 }
940 }
941
942 if (mbmi->uv_mode != DC_PRED && mbmi->uv_mode != TM_PRED) {
943 write_uniform(w, 2 * MAX_ANGLE_DELTAS + 1,
944 MAX_ANGLE_DELTAS + mbmi->angle_delta[1]);
945 }
946}
947#endif // CONFIG_EXT_INTRA
948
Yaowu Xuf883b422016-08-30 14:01:10 -0700949static void write_switchable_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd,
950 aom_writer *w) {
951 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700952 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
953#if CONFIG_DUAL_FILTER
954 int dir;
955#endif
956 if (cm->interp_filter == SWITCHABLE) {
957#if CONFIG_EXT_INTERP
958#if CONFIG_DUAL_FILTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700959 if (!av1_is_interp_needed(xd)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700960 assert(mbmi->interp_filter[0] == EIGHTTAP_REGULAR);
961 return;
962 }
963#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700964 if (!av1_is_interp_needed(xd)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700965#if CONFIG_DUAL_FILTER
966 assert(mbmi->interp_filter[0] == EIGHTTAP_REGULAR);
967 assert(mbmi->interp_filter[1] == EIGHTTAP_REGULAR);
968#else
969 assert(mbmi->interp_filter == EIGHTTAP_REGULAR);
970#endif
971 return;
972 }
973#endif // CONFIG_DUAL_FILTER
974#endif // CONFIG_EXT_INTERP
975#if CONFIG_DUAL_FILTER
976 for (dir = 0; dir < 2; ++dir) {
977 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
978 (mbmi->ref_frame[1] > INTRA_FRAME &&
979 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700980 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
981 av1_write_token(w, av1_switchable_interp_tree,
982 cm->fc->switchable_interp_prob[ctx],
983 &switchable_interp_encodings[mbmi->interp_filter[dir]]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700984 ++cpi->interp_filter_selected[0][mbmi->interp_filter[dir]];
985 }
986 }
987#else
988 {
Yaowu Xuf883b422016-08-30 14:01:10 -0700989 const int ctx = av1_get_pred_context_switchable_interp(xd);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400990#if CONFIG_DAALA_EC
991 aom_write_tree_cdf(w, av1_switchable_interp_ind[mbmi->interp_filter],
992 cm->fc->switchable_interp_cdf[ctx],
993 SWITCHABLE_FILTERS);
994#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700995 av1_write_token(w, av1_switchable_interp_tree,
996 cm->fc->switchable_interp_prob[ctx],
997 &switchable_interp_encodings[mbmi->interp_filter]);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400998#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700999 ++cpi->interp_filter_selected[0][mbmi->interp_filter];
1000 }
1001#endif
1002 }
1003}
1004
Urvang Joshib100db72016-10-12 16:28:56 -07001005#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -07001006static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1007 const MODE_INFO *const mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001008 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1009 const MODE_INFO *const above_mi = xd->above_mi;
1010 const MODE_INFO *const left_mi = xd->left_mi;
1011 const BLOCK_SIZE bsize = mbmi->sb_type;
1012 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
1013 int palette_ctx = 0;
1014 int n, i;
1015
1016 if (mbmi->mode == DC_PRED) {
1017 n = pmi->palette_size[0];
1018 if (above_mi)
1019 palette_ctx += (above_mi->mbmi.palette_mode_info.palette_size[0] > 0);
1020 if (left_mi)
1021 palette_ctx += (left_mi->mbmi.palette_mode_info.palette_size[0] > 0);
Yaowu Xuf883b422016-08-30 14:01:10 -07001022 aom_write(w, n > 0,
1023 av1_default_palette_y_mode_prob[bsize - BLOCK_8X8][palette_ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001024 if (n > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001025 av1_write_token(w, av1_palette_size_tree,
1026 av1_default_palette_y_size_prob[bsize - BLOCK_8X8],
1027 &palette_size_encodings[n - 2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001028 for (i = 0; i < n; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07001029 aom_write_literal(w, pmi->palette_colors[i], cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001030 write_uniform(w, n, pmi->palette_first_color_idx[0]);
1031 }
1032 }
1033
1034 if (mbmi->uv_mode == DC_PRED) {
1035 n = pmi->palette_size[1];
Yaowu Xuf883b422016-08-30 14:01:10 -07001036 aom_write(w, n > 0,
1037 av1_default_palette_uv_mode_prob[pmi->palette_size[0] > 0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001038 if (n > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001039 av1_write_token(w, av1_palette_size_tree,
1040 av1_default_palette_uv_size_prob[bsize - BLOCK_8X8],
1041 &palette_size_encodings[n - 2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001042 for (i = 0; i < n; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001043 aom_write_literal(w, pmi->palette_colors[PALETTE_MAX_SIZE + i],
1044 cm->bit_depth);
1045 aom_write_literal(w, pmi->palette_colors[2 * PALETTE_MAX_SIZE + i],
1046 cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001047 }
1048 write_uniform(w, n, pmi->palette_first_color_idx[1]);
1049 }
1050 }
1051}
Urvang Joshib100db72016-10-12 16:28:56 -07001052#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001053
Yaowu Xuf883b422016-08-30 14:01:10 -07001054static void pack_inter_mode_mvs(AV1_COMP *cpi, const MODE_INFO *mi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001055#if CONFIG_SUPERTX
1056 int supertx_enabled,
1057#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001058 aom_writer *w) {
1059 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001060#if !CONFIG_REF_MV
1061 const nmv_context *nmvc = &cm->fc->nmvc;
1062#endif
1063 const MACROBLOCK *x = &cpi->td.mb;
1064 const MACROBLOCKD *xd = &x->e_mbd;
1065 const struct segmentation *const seg = &cm->seg;
1066 const struct segmentation_probs *const segp = &cm->fc->seg;
1067 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1068 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
1069 const PREDICTION_MODE mode = mbmi->mode;
1070 const int segment_id = mbmi->segment_id;
1071 const BLOCK_SIZE bsize = mbmi->sb_type;
1072 const int allow_hp = cm->allow_high_precision_mv;
1073 const int is_inter = is_inter_block(mbmi);
1074 const int is_compound = has_second_ref(mbmi);
1075 int skip, ref;
1076
1077 if (seg->update_map) {
1078 if (seg->temporal_update) {
1079 const int pred_flag = mbmi->seg_id_predicted;
Yaowu Xuf883b422016-08-30 14:01:10 -07001080 aom_prob pred_prob = av1_get_pred_prob_seg_id(segp, xd);
1081 aom_write(w, pred_flag, pred_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001082 if (!pred_flag) write_segment_id(w, seg, segp, segment_id);
1083 } else {
1084 write_segment_id(w, seg, segp, segment_id);
1085 }
1086 }
1087
1088#if CONFIG_SUPERTX
1089 if (supertx_enabled)
1090 skip = mbmi->skip;
1091 else
1092 skip = write_skip(cm, xd, segment_id, mi, w);
1093#else
1094 skip = write_skip(cm, xd, segment_id, mi, w);
1095#endif // CONFIG_SUPERTX
1096
1097#if CONFIG_SUPERTX
1098 if (!supertx_enabled)
1099#endif // CONFIG_SUPERTX
1100 if (!segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
Yaowu Xuf883b422016-08-30 14:01:10 -07001101 aom_write(w, is_inter, av1_get_intra_inter_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001102
1103 if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
1104#if CONFIG_SUPERTX
1105 !supertx_enabled &&
1106#endif // CONFIG_SUPERTX
1107 !(is_inter && skip) && !xd->lossless[segment_id]) {
1108#if CONFIG_VAR_TX
1109 if (is_inter) { // This implies skip flag is 0.
1110 const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
1111 const int txb_size = txsize_to_bsize[max_tx_size];
1112 const int bs = num_4x4_blocks_wide_lookup[txb_size];
1113 const int width = num_4x4_blocks_wide_lookup[bsize];
1114 const int height = num_4x4_blocks_high_lookup[bsize];
1115 int idx, idy;
Yue Chena1e48dc2016-08-29 17:29:33 -07001116
1117#if CONFIG_EXT_TX && CONFIG_RECT_TX
Yue Chen49587a72016-09-28 17:09:47 -07001118 if (is_rect_tx_allowed(xd, mbmi)) {
Yue Chena1e48dc2016-08-29 17:29:33 -07001119 int tx_size_cat = inter_tx_size_cat_lookup[bsize];
1120
1121 aom_write(w, is_rect_tx(mbmi->tx_size),
1122 cm->fc->rect_tx_prob[tx_size_cat]);
1123 }
1124
1125 if (is_rect_tx(mbmi->tx_size)) {
1126 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, xd);
1127 } else {
1128#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
1129 for (idy = 0; idy < height; idy += bs)
1130 for (idx = 0; idx < width; idx += bs)
1131 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, idy, idx, w);
1132#if CONFIG_EXT_TX && CONFIG_RECT_TX
1133 }
1134#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001135 } else {
1136 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, xd);
1137 write_selected_tx_size(cm, xd, w);
1138 }
1139 } else {
1140 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, xd);
1141#else
1142 write_selected_tx_size(cm, xd, w);
1143#endif
1144 }
1145
1146 if (!is_inter) {
1147 if (bsize >= BLOCK_8X8) {
1148 write_intra_mode(w, mode, cm->fc->y_mode_prob[size_group_lookup[bsize]]);
1149 } else {
1150 int idx, idy;
1151 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1152 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1153 for (idy = 0; idy < 2; idy += num_4x4_h) {
1154 for (idx = 0; idx < 2; idx += num_4x4_w) {
1155 const PREDICTION_MODE b_mode = mi->bmi[idy * 2 + idx].as_mode;
1156 write_intra_mode(w, b_mode, cm->fc->y_mode_prob[0]);
1157 }
1158 }
1159 }
1160 write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mode]);
1161#if CONFIG_EXT_INTRA
1162 write_intra_angle_info(cm, xd, w);
1163#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001164#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001165 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1166 write_palette_mode_info(cm, xd, mi, w);
Urvang Joshib100db72016-10-12 16:28:56 -07001167#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001168#if CONFIG_EXT_INTRA
1169 if (bsize >= BLOCK_8X8) write_ext_intra_mode_info(cm, mbmi, w);
1170#endif // CONFIG_EXT_INTRA
1171 } else {
1172 int16_t mode_ctx = mbmi_ext->mode_context[mbmi->ref_frame[0]];
1173 write_ref_frames(cm, xd, w);
1174
1175#if CONFIG_REF_MV
1176#if CONFIG_EXT_INTER
1177 if (is_compound)
1178 mode_ctx = mbmi_ext->compound_mode_context[mbmi->ref_frame[0]];
1179 else
1180#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001181 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1182 mbmi->ref_frame, bsize, -1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001183#endif
1184
1185 // If segment skip is not enabled code the mode.
1186 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
1187 if (bsize >= BLOCK_8X8) {
1188#if CONFIG_EXT_INTER
1189 if (is_inter_compound_mode(mode))
1190 write_inter_compound_mode(cm, w, mode, mode_ctx);
1191 else if (is_inter_singleref_mode(mode))
1192#endif // CONFIG_EXT_INTER
1193 write_inter_mode(cm, w, mode,
1194#if CONFIG_REF_MV && CONFIG_EXT_INTER
1195 is_compound,
1196#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1197 mode_ctx);
1198
1199#if CONFIG_REF_MV
1200 if (mode == NEARMV || mode == NEWMV)
1201 write_drl_idx(cm, mbmi, mbmi_ext, w);
1202#endif
1203 }
1204 }
1205
1206#if !CONFIG_EXT_INTERP && !CONFIG_DUAL_FILTER
1207 write_switchable_interp_filter(cpi, xd, w);
1208#endif // !CONFIG_EXT_INTERP
1209
1210 if (bsize < BLOCK_8X8) {
1211 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1212 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1213 int idx, idy;
1214 for (idy = 0; idy < 2; idy += num_4x4_h) {
1215 for (idx = 0; idx < 2; idx += num_4x4_w) {
1216 const int j = idy * 2 + idx;
1217 const PREDICTION_MODE b_mode = mi->bmi[j].as_mode;
1218#if CONFIG_REF_MV
1219#if CONFIG_EXT_INTER
1220 if (!is_compound)
1221#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001222 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1223 mbmi->ref_frame, bsize, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001224#endif
1225#if CONFIG_EXT_INTER
1226 if (is_inter_compound_mode(b_mode))
1227 write_inter_compound_mode(cm, w, b_mode, mode_ctx);
1228 else if (is_inter_singleref_mode(b_mode))
1229#endif // CONFIG_EXT_INTER
1230 write_inter_mode(cm, w, b_mode,
1231#if CONFIG_REF_MV && CONFIG_EXT_INTER
1232 has_second_ref(mbmi),
1233#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1234 mode_ctx);
1235
1236#if CONFIG_EXT_INTER
1237 if (b_mode == NEWMV || b_mode == NEWFROMNEARMV ||
1238 b_mode == NEW_NEWMV) {
1239#else
1240 if (b_mode == NEWMV) {
1241#endif // CONFIG_EXT_INTER
1242 for (ref = 0; ref < 1 + is_compound; ++ref) {
1243#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001244 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1245 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1246 mbmi_ext->ref_mv_stack[rf_type], ref,
1247 mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001248 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1249#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001250 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001251#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001252 &mi->bmi[j].ref_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001253#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001254 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001255#endif
1256#else
1257#if CONFIG_REF_MV
Yaowu Xuf5bbbfa2016-09-26 09:13:38 -07001258 &mi->bmi[j].pred_mv[ref].as_mv, is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001259#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001260 &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001261#endif // CONFIG_REF_MV
1262#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001263 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001264 }
1265 }
1266#if CONFIG_EXT_INTER
1267 else if (b_mode == NEAREST_NEWMV || b_mode == NEAR_NEWMV) {
1268#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001269 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1270 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1271 mbmi_ext->ref_mv_stack[rf_type], 1,
1272 mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001273 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1274#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001275 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[1].as_mv,
1276 &mi->bmi[j].ref_mv[1].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001277#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001278 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001279#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001280 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001281 } else if (b_mode == NEW_NEARESTMV || b_mode == NEW_NEARMV) {
1282#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001283 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1284 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1285 mbmi_ext->ref_mv_stack[rf_type], 0,
1286 mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001287 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1288#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001289 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[0].as_mv,
1290 &mi->bmi[j].ref_mv[0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001291#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001292 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001293#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001294 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001295 }
1296#endif // CONFIG_EXT_INTER
1297 }
1298 }
1299 } else {
1300#if CONFIG_EXT_INTER
1301 if (mode == NEWMV || mode == NEWFROMNEARMV || mode == NEW_NEWMV) {
1302#else
1303 if (mode == NEWMV) {
1304#endif // CONFIG_EXT_INTER
1305 int_mv ref_mv;
1306 for (ref = 0; ref < 1 + is_compound; ++ref) {
1307#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001308 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1309 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1310 mbmi_ext->ref_mv_stack[rf_type], ref,
1311 mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001312 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1313#endif
1314 ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0];
1315#if CONFIG_EXT_INTER
1316 if (mode == NEWFROMNEARMV)
Yaowu Xuf883b422016-08-30 14:01:10 -07001317 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv,
1318 &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][1].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001319#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001320 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001321#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001322 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001323 else
1324#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001325 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001326#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001327 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001328#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001329 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001330 }
1331#if CONFIG_EXT_INTER
1332 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
1333#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001334 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1335 int nmv_ctx =
1336 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1337 mbmi_ext->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001338 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1339#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001340 av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv,
1341 &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001342#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001343 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001344#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001345 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001346 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
1347#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001348 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1349 int nmv_ctx =
1350 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1351 mbmi_ext->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001352 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1353#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001354 av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv,
1355 &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001356#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001357 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001358#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001359 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001360#endif // CONFIG_EXT_INTER
1361 }
1362 }
1363
1364#if CONFIG_EXT_INTER
1365 if (cpi->common.reference_mode != COMPOUND_REFERENCE &&
1366#if CONFIG_SUPERTX
1367 !supertx_enabled &&
1368#endif // CONFIG_SUPERTX
1369 is_interintra_allowed(mbmi)) {
1370 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
1371 const int bsize_group = size_group_lookup[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -07001372 aom_write(w, interintra, cm->fc->interintra_prob[bsize_group]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001373 if (interintra) {
1374 write_interintra_mode(w, mbmi->interintra_mode,
1375 cm->fc->interintra_mode_prob[bsize_group]);
1376 if (is_interintra_wedge_used(bsize)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001377 aom_write(w, mbmi->use_wedge_interintra,
1378 cm->fc->wedge_interintra_prob[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001379 if (mbmi->use_wedge_interintra) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001380 aom_write_literal(w, mbmi->interintra_wedge_index,
1381 get_wedge_bits_lookup(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001382 assert(mbmi->interintra_wedge_sign == 0);
1383 }
1384 }
1385 }
1386 }
1387#endif // CONFIG_EXT_INTER
1388
Yue Chencb60b182016-10-13 15:18:22 -07001389#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001390#if CONFIG_SUPERTX
1391 if (!supertx_enabled)
1392#endif // CONFIG_SUPERTX
1393#if CONFIG_EXT_INTER
1394 if (mbmi->ref_frame[1] != INTRA_FRAME)
1395#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -07001396 if (is_motion_variation_allowed(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001397 // TODO(debargha): Might want to only emit this if SEG_LVL_SKIP
1398 // is not active, and assume SIMPLE_TRANSLATION in the decoder if
1399 // it is active.
Yue Chencb60b182016-10-13 15:18:22 -07001400 assert(mbmi->motion_mode < MOTION_MODES);
1401 av1_write_token(w, av1_motion_mode_tree,
1402 cm->fc->motion_mode_prob[bsize],
1403 &motion_mode_encodings[mbmi->motion_mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001404 }
Yue Chencb60b182016-10-13 15:18:22 -07001405#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001406
1407#if CONFIG_EXT_INTER
1408 if (cpi->common.reference_mode != SINGLE_REFERENCE &&
1409 is_inter_compound_mode(mbmi->mode) &&
Yue Chencb60b182016-10-13 15:18:22 -07001410#if CONFIG_MOTION_VAR
1411 !(is_motion_variation_allowed(mbmi) &&
1412 mbmi->motion_mode != SIMPLE_TRANSLATION) &&
1413#endif // CONFIG_MOTION_VAR
Yaowu Xuc27fc142016-08-22 16:08:15 -07001414 is_interinter_wedge_used(bsize)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001415 aom_write(w, mbmi->use_wedge_interinter,
1416 cm->fc->wedge_interinter_prob[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001417 if (mbmi->use_wedge_interinter) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001418 aom_write_literal(w, mbmi->interinter_wedge_index,
1419 get_wedge_bits_lookup(bsize));
1420 aom_write_bit(w, mbmi->interinter_wedge_sign);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001421 }
1422 }
1423#endif // CONFIG_EXT_INTER
1424
1425#if CONFIG_EXT_INTERP || CONFIG_DUAL_FILTER
1426 write_switchable_interp_filter(cpi, xd, w);
1427#endif // CONFIG_EXT_INTERP
1428 }
1429
1430 if (!FIXED_TX_TYPE) {
1431#if CONFIG_EXT_TX
1432 if (get_ext_tx_types(mbmi->tx_size, bsize, is_inter) > 1 &&
1433 cm->base_qindex > 0 && !mbmi->skip &&
1434#if CONFIG_SUPERTX
1435 !supertx_enabled &&
1436#endif // CONFIG_SUPERTX
1437 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1438 int eset = get_ext_tx_set(mbmi->tx_size, bsize, is_inter);
1439 if (is_inter) {
1440 assert(ext_tx_used_inter[eset][mbmi->tx_type]);
1441 if (eset > 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001442 av1_write_token(
1443 w, av1_ext_tx_inter_tree[eset],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001444 cm->fc->inter_ext_tx_prob[eset][txsize_sqr_map[mbmi->tx_size]],
1445 &ext_tx_inter_encodings[eset][mbmi->tx_type]);
1446 } else if (ALLOW_INTRA_EXT_TX) {
1447 if (eset > 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001448 av1_write_token(
1449 w, av1_ext_tx_intra_tree[eset],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001450 cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode],
1451 &ext_tx_intra_encodings[eset][mbmi->tx_type]);
1452 }
1453 }
1454#else
1455 if (mbmi->tx_size < TX_32X32 && cm->base_qindex > 0 && !mbmi->skip &&
1456#if CONFIG_SUPERTX
1457 !supertx_enabled &&
1458#endif // CONFIG_SUPERTX
1459 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1460 if (is_inter) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001461 av1_write_token(w, av1_ext_tx_tree,
1462 cm->fc->inter_ext_tx_prob[mbmi->tx_size],
1463 &ext_tx_encodings[mbmi->tx_type]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001464 } else {
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -04001465#if CONFIG_DAALA_EC
1466 aom_write_tree_cdf(
1467 w, av1_ext_tx_ind[mbmi->tx_type],
1468 cm->fc->intra_ext_tx_cdf[mbmi->tx_size]
1469 [intra_mode_to_tx_type_context[mbmi->mode]],
1470 TX_TYPES);
1471#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001472 av1_write_token(
1473 w, av1_ext_tx_tree,
clang-format67948d32016-09-07 22:40:40 -07001474 cm->fc
1475 ->intra_ext_tx_prob[mbmi->tx_size]
1476 [intra_mode_to_tx_type_context[mbmi->mode]],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001477 &ext_tx_encodings[mbmi->tx_type]);
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -04001478#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001479 }
1480 } else {
1481 if (!mbmi->skip) {
1482#if CONFIG_SUPERTX
1483 if (!supertx_enabled)
1484#endif // CONFIG_SUPERTX
1485 assert(mbmi->tx_type == DCT_DCT);
1486 }
1487 }
1488#endif // CONFIG_EXT_TX
1489 }
1490}
1491
Yaowu Xuf883b422016-08-30 14:01:10 -07001492static void write_mb_modes_kf(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1493 MODE_INFO **mi_8x8, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001494 const struct segmentation *const seg = &cm->seg;
1495 const struct segmentation_probs *const segp = &cm->fc->seg;
1496 const MODE_INFO *const mi = mi_8x8[0];
1497 const MODE_INFO *const above_mi = xd->above_mi;
1498 const MODE_INFO *const left_mi = xd->left_mi;
1499 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1500 const BLOCK_SIZE bsize = mbmi->sb_type;
1501
1502 if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id);
1503
1504 write_skip(cm, xd, mbmi->segment_id, mi, w);
1505
1506 if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
1507 !xd->lossless[mbmi->segment_id])
1508 write_selected_tx_size(cm, xd, w);
1509
1510 if (bsize >= BLOCK_8X8) {
1511 write_intra_mode(w, mbmi->mode,
1512 get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
1513 } else {
1514 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1515 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1516 int idx, idy;
1517
1518 for (idy = 0; idy < 2; idy += num_4x4_h) {
1519 for (idx = 0; idx < 2; idx += num_4x4_w) {
1520 const int block = idy * 2 + idx;
1521 write_intra_mode(w, mi->bmi[block].as_mode,
1522 get_y_mode_probs(cm, mi, above_mi, left_mi, block));
1523 }
1524 }
1525 }
1526
1527 write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mbmi->mode]);
1528#if CONFIG_EXT_INTRA
1529 write_intra_angle_info(cm, xd, w);
1530#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001531#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001532 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1533 write_palette_mode_info(cm, xd, mi, w);
Urvang Joshib100db72016-10-12 16:28:56 -07001534#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001535#if CONFIG_EXT_INTRA
1536 if (bsize >= BLOCK_8X8) write_ext_intra_mode_info(cm, mbmi, w);
1537#endif // CONFIG_EXT_INTRA
1538
1539 if (!FIXED_TX_TYPE) {
1540#if CONFIG_EXT_TX
1541 if (get_ext_tx_types(mbmi->tx_size, bsize, 0) > 1 && cm->base_qindex > 0 &&
1542 !mbmi->skip &&
1543 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP) &&
1544 ALLOW_INTRA_EXT_TX) {
1545 int eset = get_ext_tx_set(mbmi->tx_size, bsize, 0);
1546 if (eset > 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001547 av1_write_token(
1548 w, av1_ext_tx_intra_tree[eset],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001549 cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode],
1550 &ext_tx_intra_encodings[eset][mbmi->tx_type]);
1551 }
1552#else
1553 if (mbmi->tx_size < TX_32X32 && cm->base_qindex > 0 && !mbmi->skip &&
1554 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001555 av1_write_token(
1556 w, av1_ext_tx_tree,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001557 cm->fc->intra_ext_tx_prob[mbmi->tx_size]
1558 [intra_mode_to_tx_type_context[mbmi->mode]],
1559 &ext_tx_encodings[mbmi->tx_type]);
1560 }
1561#endif // CONFIG_EXT_TX
1562 }
1563}
1564
1565#if CONFIG_SUPERTX
1566#define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1567 mi_row, mi_col) \
1568 write_modes_b(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col)
1569#else
1570#define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1571 mi_row, mi_col) \
1572 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col)
1573#endif // CONFIG_ANS && CONFIG_SUPERTX
1574
Yaowu Xuf883b422016-08-30 14:01:10 -07001575static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
1576 aom_writer *w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001577 const TOKENEXTRA *const tok_end,
1578#if CONFIG_SUPERTX
1579 int supertx_enabled,
1580#endif
1581 int mi_row, int mi_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001582 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001583 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1584 MODE_INFO *m;
1585 int plane;
1586 int bh, bw;
1587#if CONFIG_ANS
1588 (void)tok;
1589 (void)tok_end;
1590 (void)plane;
1591#endif // !CONFIG_ANS
1592
1593 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1594 m = xd->mi[0];
1595
1596 assert(m->mbmi.sb_type <= cm->sb_size);
1597
1598 bh = num_8x8_blocks_high_lookup[m->mbmi.sb_type];
1599 bw = num_8x8_blocks_wide_lookup[m->mbmi.sb_type];
1600
1601 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1602
1603 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
1604 if (frame_is_intra_only(cm)) {
1605 write_mb_modes_kf(cm, xd, xd->mi, w);
1606 } else {
1607#if CONFIG_VAR_TX
1608 xd->above_txfm_context = cm->above_txfm_context + mi_col;
1609 xd->left_txfm_context =
1610 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
1611#endif
1612#if CONFIG_EXT_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -07001613 // av1_is_interp_needed needs the ref frame buffers set up to look
1614 // up if they are scaled. av1_is_interp_needed is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07001615 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
1616 set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
1617#endif // CONFIG_EXT_INTERP
1618#if 0
1619 // NOTE(zoeliu): For debug
1620 if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
1621 const PREDICTION_MODE mode = m->mbmi.mode;
1622 const int segment_id = m->mbmi.segment_id;
1623 const BLOCK_SIZE bsize = m->mbmi.sb_type;
1624
1625 // For sub8x8, simply dump out the first sub8x8 block info
1626 const PREDICTION_MODE b_mode =
1627 (bsize < BLOCK_8X8) ? m->bmi[0].as_mode : -1;
1628 const int mv_x = (bsize < BLOCK_8X8) ?
1629 m->bmi[0].as_mv[0].as_mv.row : m->mbmi.mv[0].as_mv.row;
1630 const int mv_y = (bsize < BLOCK_8X8) ?
1631 m->bmi[0].as_mv[0].as_mv.col : m->mbmi.mv[0].as_mv.col;
1632
1633 printf("Before pack_inter_mode_mvs(): "
1634 "Frame=%d, (mi_row,mi_col)=(%d,%d), "
1635 "mode=%d, segment_id=%d, bsize=%d, b_mode=%d, "
1636 "mv[0]=(%d, %d), ref[0]=%d, ref[1]=%d\n",
1637 cm->current_video_frame, mi_row, mi_col,
1638 mode, segment_id, bsize, b_mode, mv_x, mv_y,
1639 m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
1640 }
1641#endif // 0
1642 pack_inter_mode_mvs(cpi, m,
1643#if CONFIG_SUPERTX
1644 supertx_enabled,
1645#endif
1646 w);
1647 }
1648
Urvang Joshib100db72016-10-12 16:28:56 -07001649#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001650 for (plane = 0; plane <= 1; ++plane) {
1651 if (m->mbmi.palette_mode_info.palette_size[plane] > 0) {
1652 const int rows = (4 * num_4x4_blocks_high_lookup[m->mbmi.sb_type]) >>
1653 (xd->plane[plane].subsampling_y);
1654 const int cols = (4 * num_4x4_blocks_wide_lookup[m->mbmi.sb_type]) >>
1655 (xd->plane[plane].subsampling_x);
1656 assert(*tok < tok_end);
1657 pack_palette_tokens(w, tok, m->mbmi.palette_mode_info.palette_size[plane],
1658 rows * cols - 1);
1659 assert(*tok < tok_end + m->mbmi.skip);
1660 }
1661 }
Urvang Joshib100db72016-10-12 16:28:56 -07001662#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001663
1664#if CONFIG_SUPERTX
1665 if (supertx_enabled) return;
1666#endif // CONFIG_SUPERTX
1667
1668 if (!m->mbmi.skip) {
1669 assert(*tok < tok_end);
1670 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
1671#if CONFIG_VAR_TX
1672 const struct macroblockd_plane *const pd = &xd->plane[plane];
1673 MB_MODE_INFO *mbmi = &m->mbmi;
1674 BLOCK_SIZE bsize = mbmi->sb_type;
1675 const BLOCK_SIZE plane_bsize =
Yaowu Xuf883b422016-08-30 14:01:10 -07001676 get_plane_block_size(AOMMAX(bsize, BLOCK_8X8), pd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001677
1678 const int num_4x4_w = num_4x4_blocks_wide_lookup[plane_bsize];
1679 const int num_4x4_h = num_4x4_blocks_high_lookup[plane_bsize];
1680 int row, col;
1681#if CONFIG_EXT_TX && CONFIG_RECT_TX
1682 TX_SIZE tx_size =
1683 plane ? get_uv_tx_size(mbmi, &xd->plane[plane]) : mbmi->tx_size;
1684
Yue Chena1e48dc2016-08-29 17:29:33 -07001685 if (is_inter_block(mbmi) && !is_rect_tx(tx_size)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001686#else
1687 if (is_inter_block(mbmi)) {
1688#endif
1689 const TX_SIZE max_tx_size = max_txsize_lookup[plane_bsize];
1690 const BLOCK_SIZE txb_size = txsize_to_bsize[max_tx_size];
1691 int bw = num_4x4_blocks_wide_lookup[txb_size];
1692 int block = 0;
1693 const int step = num_4x4_blocks_txsize_lookup[max_tx_size];
1694 for (row = 0; row < num_4x4_h; row += bw) {
1695 for (col = 0; col < num_4x4_w; col += bw) {
1696 pack_txb_tokens(w, tok, tok_end, xd, mbmi, plane, plane_bsize,
1697 cm->bit_depth, block, row, col, max_tx_size);
1698 block += step;
1699 }
1700 }
1701 } else {
1702 TX_SIZE tx = plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane])
1703 : m->mbmi.tx_size;
1704 BLOCK_SIZE txb_size = txsize_to_bsize[tx];
1705 int bw = num_4x4_blocks_wide_lookup[txb_size];
1706 int bh = num_4x4_blocks_high_lookup[txb_size];
1707
1708 for (row = 0; row < num_4x4_h; row += bh)
1709 for (col = 0; col < num_4x4_w; col += bw)
1710 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx);
1711 }
1712#else
1713 TX_SIZE tx =
1714 plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane]) : m->mbmi.tx_size;
1715 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx);
1716#endif // CONFIG_VAR_TX
1717 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1718 (*tok)++;
1719 }
1720 }
1721}
1722
Yaowu Xuf883b422016-08-30 14:01:10 -07001723static void write_partition(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001724 const MACROBLOCKD *const xd, int hbs, int mi_row,
1725 int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize,
Yaowu Xuf883b422016-08-30 14:01:10 -07001726 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001727 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
Yaowu Xuf883b422016-08-30 14:01:10 -07001728 const aom_prob *const probs = cm->fc->partition_prob[ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001729 const int has_rows = (mi_row + hbs) < cm->mi_rows;
1730 const int has_cols = (mi_col + hbs) < cm->mi_cols;
1731
1732 if (has_rows && has_cols) {
1733#if CONFIG_EXT_PARTITION_TYPES
1734 if (bsize <= BLOCK_8X8)
Yaowu Xuf883b422016-08-30 14:01:10 -07001735 av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001736 else
Yaowu Xuf883b422016-08-30 14:01:10 -07001737 av1_write_token(w, av1_ext_partition_tree, probs,
1738 &ext_partition_encodings[p]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001739#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001740 av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001741#endif // CONFIG_EXT_PARTITION_TYPES
1742 } else if (!has_rows && has_cols) {
1743 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Yaowu Xuf883b422016-08-30 14:01:10 -07001744 aom_write(w, p == PARTITION_SPLIT, probs[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001745 } else if (has_rows && !has_cols) {
1746 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Yaowu Xuf883b422016-08-30 14:01:10 -07001747 aom_write(w, p == PARTITION_SPLIT, probs[2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001748 } else {
1749 assert(p == PARTITION_SPLIT);
1750 }
1751}
1752
1753#if CONFIG_SUPERTX
1754#define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1755 mi_row, mi_col, bsize) \
1756 write_modes_sb(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col, \
1757 bsize)
1758#else
1759#define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1760 mi_row, mi_col, bsize) \
1761 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, bsize)
1762#endif // CONFIG_ANS && CONFIG_SUPERTX
1763
Yaowu Xuf883b422016-08-30 14:01:10 -07001764static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile,
1765 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001766 const TOKENEXTRA *const tok_end,
1767#if CONFIG_SUPERTX
1768 int supertx_enabled,
1769#endif
1770 int mi_row, int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001771 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001772 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1773 const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
1774 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
1775 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
1776#if CONFIG_SUPERTX
1777 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1778 MB_MODE_INFO *mbmi;
1779 const int pack_token = !supertx_enabled;
1780 TX_SIZE supertx_size;
1781 int plane;
1782#endif
1783
1784 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
1785
1786 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
1787#if CONFIG_SUPERTX
1788 mbmi = &cm->mi_grid_visible[mi_offset]->mbmi;
1789 xd->mi = cm->mi_grid_visible + mi_offset;
1790 set_mi_row_col(xd, tile, mi_row, num_8x8_blocks_high_lookup[bsize], mi_col,
1791 num_8x8_blocks_wide_lookup[bsize], cm->mi_rows, cm->mi_cols);
1792 if (!supertx_enabled && !frame_is_intra_only(cm) &&
1793 partition != PARTITION_NONE && bsize <= MAX_SUPERTX_BLOCK_SIZE &&
1794 !xd->lossless[0]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001795 aom_prob prob;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001796 supertx_size = max_txsize_lookup[bsize];
1797 prob = cm->fc->supertx_prob[partition_supertx_context_lookup[partition]]
1798 [supertx_size];
1799 supertx_enabled = (xd->mi[0]->mbmi.tx_size == supertx_size);
Yaowu Xuf883b422016-08-30 14:01:10 -07001800 aom_write(w, supertx_enabled, prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001801 }
1802#endif // CONFIG_SUPERTX
1803 if (subsize < BLOCK_8X8) {
1804 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row,
1805 mi_col);
1806 } else {
1807 switch (partition) {
1808 case PARTITION_NONE:
1809 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1810 mi_row, mi_col);
1811 break;
1812 case PARTITION_HORZ:
1813 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1814 mi_row, mi_col);
1815 if (mi_row + hbs < cm->mi_rows)
1816 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1817 mi_row + hbs, mi_col);
1818 break;
1819 case PARTITION_VERT:
1820 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1821 mi_row, mi_col);
1822 if (mi_col + hbs < cm->mi_cols)
1823 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1824 mi_row, mi_col + hbs);
1825 break;
1826 case PARTITION_SPLIT:
1827 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1828 mi_row, mi_col, subsize);
1829 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1830 mi_row, mi_col + hbs, subsize);
1831 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1832 mi_row + hbs, mi_col, subsize);
1833 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1834 mi_row + hbs, mi_col + hbs, subsize);
1835 break;
1836#if CONFIG_EXT_PARTITION_TYPES
1837 case PARTITION_HORZ_A:
1838 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1839 mi_row, mi_col);
1840 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1841 mi_row, mi_col + hbs);
1842 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1843 mi_row + hbs, mi_col);
1844 break;
1845 case PARTITION_HORZ_B:
1846 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1847 mi_row, mi_col);
1848 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1849 mi_row + hbs, mi_col);
1850 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1851 mi_row + hbs, mi_col + hbs);
1852 break;
1853 case PARTITION_VERT_A:
1854 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1855 mi_row, mi_col);
1856 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1857 mi_row + hbs, mi_col);
1858 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1859 mi_row, mi_col + hbs);
1860 break;
1861 case PARTITION_VERT_B:
1862 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1863 mi_row, mi_col);
1864 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1865 mi_row, mi_col + hbs);
1866 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1867 mi_row + hbs, mi_col + hbs);
1868 break;
1869#endif // CONFIG_EXT_PARTITION_TYPES
1870 default: assert(0);
1871 }
1872 }
1873#if CONFIG_SUPERTX
1874 if (partition != PARTITION_NONE && supertx_enabled && pack_token) {
1875 int skip;
1876 xd->mi = cm->mi_grid_visible + mi_offset;
1877 supertx_size = mbmi->tx_size;
1878 set_mi_row_col(xd, tile, mi_row, num_8x8_blocks_high_lookup[bsize], mi_col,
1879 num_8x8_blocks_wide_lookup[bsize], cm->mi_rows, cm->mi_cols);
1880
1881 assert(IMPLIES(!cm->seg.enabled, mbmi->segment_id_supertx == 0));
1882 assert(mbmi->segment_id_supertx < MAX_SEGMENTS);
1883
1884 skip = write_skip(cm, xd, mbmi->segment_id_supertx, xd->mi[0], w);
1885#if CONFIG_EXT_TX
1886 if (get_ext_tx_types(supertx_size, bsize, 1) > 1 && !skip) {
1887 int eset = get_ext_tx_set(supertx_size, bsize, 1);
1888 if (eset > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001889 av1_write_token(w, av1_ext_tx_inter_tree[eset],
1890 cm->fc->inter_ext_tx_prob[eset][supertx_size],
1891 &ext_tx_inter_encodings[eset][mbmi->tx_type]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001892 }
1893 }
1894#else
1895 if (supertx_size < TX_32X32 && !skip) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001896 av1_write_token(w, av1_ext_tx_tree,
1897 cm->fc->inter_ext_tx_prob[supertx_size],
1898 &ext_tx_encodings[mbmi->tx_type]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001899 }
1900#endif // CONFIG_EXT_TX
1901
1902 if (!skip) {
1903 assert(*tok < tok_end);
1904 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
1905 const int mbmi_txb_size = txsize_to_bsize[mbmi->tx_size];
1906 const int num_4x4_w = num_4x4_blocks_wide_lookup[mbmi_txb_size];
1907 const int num_4x4_h = num_4x4_blocks_high_lookup[mbmi_txb_size];
1908 int row, col;
1909 TX_SIZE tx =
1910 plane ? get_uv_tx_size(mbmi, &xd->plane[plane]) : mbmi->tx_size;
1911 BLOCK_SIZE txb_size = txsize_to_bsize[tx];
1912 int bw = num_4x4_blocks_wide_lookup[txb_size];
1913
1914 for (row = 0; row < num_4x4_h; row += bw)
1915 for (col = 0; col < num_4x4_w; col += bw)
1916 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx);
1917 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1918 (*tok)++;
1919 }
1920 }
1921 }
1922#endif // CONFIG_SUPERTX
1923
1924// update partition context
1925#if CONFIG_EXT_PARTITION_TYPES
1926 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
1927#else
1928 if (bsize >= BLOCK_8X8 &&
1929 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
1930 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
1931
Yaowu Xud71be782016-10-14 08:47:03 -07001932#if CONFIG_CLPF
1933 if (bsize == BLOCK_64X64 && cm->clpf_blocks && cm->clpf_strength_y &&
1934 cm->clpf_size != CLPF_NOSIZE) {
1935 const int tl = mi_row * MI_SIZE / MIN_FB_SIZE * cm->clpf_stride +
1936 mi_col * MI_SIZE / MIN_FB_SIZE;
1937 const int tr = tl + 1;
1938 const int bl = tl + cm->clpf_stride;
1939 const int br = tr + cm->clpf_stride;
1940
1941 // Up to four bits per SB.
1942 // When clpf_size indicates a size larger than the SB size
1943 // (CLPF_128X128), one bit for every fourth SB will be transmitted
1944 // regardless of skip blocks.
1945 if (cm->clpf_blocks[tl] != CLPF_NOFLAG)
1946 aom_write_literal(w, cm->clpf_blocks[tl], 1);
1947
1948 if (mi_col + MI_SIZE / 2 < cm->mi_cols &&
1949 cm->clpf_blocks[tr] != CLPF_NOFLAG)
1950 aom_write_literal(w, cm->clpf_blocks[tr], 1);
1951
1952 if (mi_row + MI_SIZE / 2 < cm->mi_rows &&
1953 cm->clpf_blocks[bl] != CLPF_NOFLAG)
1954 aom_write_literal(w, cm->clpf_blocks[bl], 1);
1955
1956 if (mi_row + MI_SIZE / 2 < cm->mi_rows &&
1957 mi_col + MI_SIZE / 2 < cm->mi_cols &&
1958 cm->clpf_blocks[br] != CLPF_NOFLAG)
1959 aom_write_literal(w, cm->clpf_blocks[br], 1);
1960 }
1961#endif
1962
Jean-Marc Valine874ce02016-08-22 15:27:46 -04001963#if CONFIG_DERING
Yaowu Xuc27fc142016-08-22 16:08:15 -07001964 if (bsize == BLOCK_64X64 && cm->dering_level != 0 &&
1965 !sb_all_skip(cm, mi_row, mi_col)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001966 aom_write_literal(
Yaowu Xuc27fc142016-08-22 16:08:15 -07001967 w,
1968 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain,
1969 DERING_REFINEMENT_BITS);
1970 }
1971#endif
1972#endif // CONFIG_EXT_PARTITION_TYPES
1973}
1974
Yaowu Xuf883b422016-08-30 14:01:10 -07001975static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile,
1976 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001977 const TOKENEXTRA *const tok_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001978 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001979 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1980 const int mi_row_start = tile->mi_row_start;
1981 const int mi_row_end = tile->mi_row_end;
1982 const int mi_col_start = tile->mi_col_start;
1983 const int mi_col_end = tile->mi_col_end;
1984 int mi_row, mi_col;
1985
Yaowu Xuf883b422016-08-30 14:01:10 -07001986 av1_zero_above_context(cm, mi_col_start, mi_col_end);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001987
1988 for (mi_row = mi_row_start; mi_row < mi_row_end; mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001989 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001990
1991 for (mi_col = mi_col_start; mi_col < mi_col_end; mi_col += cm->mib_size) {
1992 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, 0, mi_row, mi_col,
1993 cm->sb_size);
1994 }
1995 }
1996}
1997
Yaowu Xuf883b422016-08-30 14:01:10 -07001998static void build_tree_distribution(AV1_COMP *cpi, TX_SIZE tx_size,
1999 av1_coeff_stats *coef_branch_ct,
2000 av1_coeff_probs_model *coef_probs) {
2001 av1_coeff_count *coef_counts = cpi->td.rd_counts.coef_counts[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002002 unsigned int(*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
2003 cpi->common.counts.eob_branch[tx_size];
2004 int i, j, k, l, m;
2005
2006 for (i = 0; i < PLANE_TYPES; ++i) {
2007 for (j = 0; j < REF_TYPES; ++j) {
2008 for (k = 0; k < COEF_BANDS; ++k) {
2009 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002010 av1_tree_probs_from_distribution(av1_coef_tree,
2011 coef_branch_ct[i][j][k][l],
2012 coef_counts[i][j][k][l]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002013 coef_branch_ct[i][j][k][l][0][1] =
2014 eob_branch_ct[i][j][k][l] - coef_branch_ct[i][j][k][l][0][0];
2015 for (m = 0; m < UNCONSTRAINED_NODES; ++m)
2016 coef_probs[i][j][k][l][m] =
2017 get_binary_prob(coef_branch_ct[i][j][k][l][m][0],
2018 coef_branch_ct[i][j][k][l][m][1]);
2019 }
2020 }
2021 }
2022 }
2023}
2024
Yaowu Xuf883b422016-08-30 14:01:10 -07002025static void update_coef_probs_common(aom_writer *const bc, AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002026 TX_SIZE tx_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07002027 av1_coeff_stats *frame_branch_ct,
2028 av1_coeff_probs_model *new_coef_probs) {
2029 av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
2030 const aom_prob upd = DIFF_UPDATE_PROB;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002031 const int entropy_nodes_update = UNCONSTRAINED_NODES;
2032 int i, j, k, l, t;
2033 int stepsize = cpi->sf.coeff_prob_appx_step;
2034
2035 switch (cpi->sf.use_fast_coef_updates) {
2036 case TWO_LOOP: {
2037 /* dry run to see if there is any update at all needed */
2038 int savings = 0;
2039 int update[2] = { 0, 0 };
2040 for (i = 0; i < PLANE_TYPES; ++i) {
2041 for (j = 0; j < REF_TYPES; ++j) {
2042 for (k = 0; k < COEF_BANDS; ++k) {
2043 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2044 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002045 aom_prob newp = new_coef_probs[i][j][k][l][t];
2046 const aom_prob oldp = old_coef_probs[i][j][k][l][t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002047 int s;
2048 int u = 0;
2049 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002050 s = av1_prob_diff_update_savings_search_model(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002051 frame_branch_ct[i][j][k][l][0],
2052 old_coef_probs[i][j][k][l], &newp, upd, stepsize);
2053 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002054 s = av1_prob_diff_update_savings_search(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002055 frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
2056 if (s > 0 && newp != oldp) u = 1;
2057 if (u)
Yaowu Xuf883b422016-08-30 14:01:10 -07002058 savings += s - (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002059 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002060 savings -= (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002061 update[u]++;
2062 }
2063 }
2064 }
2065 }
2066 }
2067
2068 /* Is coef updated at all */
2069 if (update[1] == 0 || savings < 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002070 aom_write_bit(bc, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002071 return;
2072 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002073 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002074 for (i = 0; i < PLANE_TYPES; ++i) {
2075 for (j = 0; j < REF_TYPES; ++j) {
2076 for (k = 0; k < COEF_BANDS; ++k) {
2077 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2078 // calc probs and branch cts for this frame only
2079 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002080 aom_prob newp = new_coef_probs[i][j][k][l][t];
2081 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
2082 const aom_prob upd = DIFF_UPDATE_PROB;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002083 int s;
2084 int u = 0;
2085 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002086 s = av1_prob_diff_update_savings_search_model(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002087 frame_branch_ct[i][j][k][l][0],
2088 old_coef_probs[i][j][k][l], &newp, upd, stepsize);
2089 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002090 s = av1_prob_diff_update_savings_search(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002091 frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd);
2092 if (s > 0 && newp != *oldp) u = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002093 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002094 if (u) {
2095 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002096 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002097 *oldp = newp;
2098 }
2099 }
2100 }
2101 }
2102 }
2103 }
2104 return;
2105 }
2106
2107 case ONE_LOOP_REDUCED: {
2108 int updates = 0;
2109 int noupdates_before_first = 0;
2110 for (i = 0; i < PLANE_TYPES; ++i) {
2111 for (j = 0; j < REF_TYPES; ++j) {
2112 for (k = 0; k < COEF_BANDS; ++k) {
2113 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2114 // calc probs and branch cts for this frame only
2115 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002116 aom_prob newp = new_coef_probs[i][j][k][l][t];
2117 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002118 int s;
2119 int u = 0;
2120
2121 if (t == PIVOT_NODE) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002122 s = av1_prob_diff_update_savings_search_model(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002123 frame_branch_ct[i][j][k][l][0],
2124 old_coef_probs[i][j][k][l], &newp, upd, stepsize);
2125 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002126 s = av1_prob_diff_update_savings_search(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002127 frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd);
2128 }
2129
2130 if (s > 0 && newp != *oldp) u = 1;
2131 updates += u;
2132 if (u == 0 && updates == 0) {
2133 noupdates_before_first++;
2134 continue;
2135 }
2136 if (u == 1 && updates == 1) {
2137 int v;
2138 // first update
Yaowu Xuf883b422016-08-30 14:01:10 -07002139 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002140 for (v = 0; v < noupdates_before_first; ++v)
Yaowu Xuf883b422016-08-30 14:01:10 -07002141 aom_write(bc, 0, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002142 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002143 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002144 if (u) {
2145 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002146 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002147 *oldp = newp;
2148 }
2149 }
2150 }
2151 }
2152 }
2153 }
2154 if (updates == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002155 aom_write_bit(bc, 0); // no updates
Yaowu Xuc27fc142016-08-22 16:08:15 -07002156 }
2157 return;
2158 }
2159 default: assert(0);
2160 }
2161}
2162
2163#if CONFIG_ENTROPY
2164// Calculate the token counts between subsequent subframe updates.
clang-format67948d32016-09-07 22:40:40 -07002165static void get_coef_counts_diff(AV1_COMP *cpi, int index,
2166 av1_coeff_count coef_counts[TX_SIZES]
2167 [PLANE_TYPES],
2168 unsigned int eob_counts[TX_SIZES][PLANE_TYPES]
2169 [REF_TYPES][COEF_BANDS]
2170 [COEFF_CONTEXTS]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002171 int i, j, k, l, m, tx_size, val;
2172 const int max_idx = cpi->common.coef_probs_update_idx;
2173 const TX_MODE tx_mode = cpi->common.tx_mode;
Urvang Joshicb586f32016-09-20 11:36:33 -07002174 const int max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002175 const SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats;
2176
2177 assert(max_idx < COEF_PROBS_BUFS);
2178
2179 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
2180 for (i = 0; i < PLANE_TYPES; ++i)
2181 for (j = 0; j < REF_TYPES; ++j)
2182 for (k = 0; k < COEF_BANDS; ++k)
2183 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2184 if (index == max_idx) {
2185 val =
2186 cpi->common.counts.eob_branch[tx_size][i][j][k][l] -
2187 subframe_stats->eob_counts_buf[max_idx][tx_size][i][j][k][l];
2188 } else {
clang-format67948d32016-09-07 22:40:40 -07002189 val = subframe_stats->eob_counts_buf[index + 1][tx_size][i][j][k]
2190 [l] -
Yaowu Xuc27fc142016-08-22 16:08:15 -07002191 subframe_stats->eob_counts_buf[index][tx_size][i][j][k][l];
2192 }
2193 assert(val >= 0);
2194 eob_counts[tx_size][i][j][k][l] = val;
2195
2196 for (m = 0; m < ENTROPY_TOKENS; ++m) {
2197 if (index == max_idx) {
2198 val = cpi->td.rd_counts.coef_counts[tx_size][i][j][k][l][m] -
clang-format67948d32016-09-07 22:40:40 -07002199 subframe_stats->coef_counts_buf[max_idx][tx_size][i][j][k]
2200 [l][m];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002201 } else {
clang-format67948d32016-09-07 22:40:40 -07002202 val = subframe_stats->coef_counts_buf[index + 1][tx_size][i][j]
2203 [k][l][m] -
2204 subframe_stats->coef_counts_buf[index][tx_size][i][j][k]
2205 [l][m];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002206 }
2207 assert(val >= 0);
2208 coef_counts[tx_size][i][j][k][l][m] = val;
2209 }
2210 }
2211}
2212
2213static void update_coef_probs_subframe(
Yaowu Xuf883b422016-08-30 14:01:10 -07002214 aom_writer *const bc, AV1_COMP *cpi, TX_SIZE tx_size,
2215 av1_coeff_stats branch_ct[COEF_PROBS_BUFS][TX_SIZES][PLANE_TYPES],
2216 av1_coeff_probs_model *new_coef_probs) {
2217 av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
2218 const aom_prob upd = DIFF_UPDATE_PROB;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002219 const int entropy_nodes_update = UNCONSTRAINED_NODES;
2220 int i, j, k, l, t;
2221 int stepsize = cpi->sf.coeff_prob_appx_step;
2222 const int max_idx = cpi->common.coef_probs_update_idx;
2223 int idx;
2224 unsigned int this_branch_ct[ENTROPY_NODES][COEF_PROBS_BUFS][2];
2225
2226 switch (cpi->sf.use_fast_coef_updates) {
2227 case TWO_LOOP: {
2228 /* dry run to see if there is any update at all needed */
2229 int savings = 0;
2230 int update[2] = { 0, 0 };
2231 for (i = 0; i < PLANE_TYPES; ++i) {
2232 for (j = 0; j < REF_TYPES; ++j) {
2233 for (k = 0; k < COEF_BANDS; ++k) {
2234 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2235 for (t = 0; t < ENTROPY_NODES; ++t) {
2236 for (idx = 0; idx <= max_idx; ++idx) {
2237 memcpy(this_branch_ct[t][idx],
2238 branch_ct[idx][tx_size][i][j][k][l][t],
2239 2 * sizeof(this_branch_ct[t][idx][0]));
2240 }
2241 }
2242 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002243 aom_prob newp = new_coef_probs[i][j][k][l][t];
2244 const aom_prob oldp = old_coef_probs[i][j][k][l][t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002245 int s, u = 0;
2246
2247 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002248 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002249 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2250 stepsize, max_idx);
2251 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002252 s = av1_prob_update_search_subframe(this_branch_ct[t], oldp,
2253 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002254 if (s > 0 && newp != oldp) u = 1;
2255 if (u)
Yaowu Xuf883b422016-08-30 14:01:10 -07002256 savings += s - (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002257 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002258 savings -= (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002259 update[u]++;
2260 }
2261 }
2262 }
2263 }
2264 }
2265
2266 /* Is coef updated at all */
2267 if (update[1] == 0 || savings < 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002268 aom_write_bit(bc, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002269 return;
2270 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002271 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002272 for (i = 0; i < PLANE_TYPES; ++i) {
2273 for (j = 0; j < REF_TYPES; ++j) {
2274 for (k = 0; k < COEF_BANDS; ++k) {
2275 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2276 for (t = 0; t < ENTROPY_NODES; ++t) {
2277 for (idx = 0; idx <= max_idx; ++idx) {
2278 memcpy(this_branch_ct[t][idx],
2279 branch_ct[idx][tx_size][i][j][k][l][t],
2280 2 * sizeof(this_branch_ct[t][idx][0]));
2281 }
2282 }
2283 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002284 aom_prob newp = new_coef_probs[i][j][k][l][t];
2285 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
2286 const aom_prob upd = DIFF_UPDATE_PROB;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002287 int s;
2288 int u = 0;
2289
2290 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002291 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002292 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2293 stepsize, max_idx);
2294 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002295 s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp,
2296 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002297 if (s > 0 && newp != *oldp) u = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002298 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002299 if (u) {
2300 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002301 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002302 *oldp = newp;
2303 }
2304 }
2305 }
2306 }
2307 }
2308 }
2309 return;
2310 }
2311
2312 case ONE_LOOP_REDUCED: {
2313 int updates = 0;
2314 int noupdates_before_first = 0;
2315 for (i = 0; i < PLANE_TYPES; ++i) {
2316 for (j = 0; j < REF_TYPES; ++j) {
2317 for (k = 0; k < COEF_BANDS; ++k) {
2318 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2319 for (t = 0; t < ENTROPY_NODES; ++t) {
2320 for (idx = 0; idx <= max_idx; ++idx) {
2321 memcpy(this_branch_ct[t][idx],
2322 branch_ct[idx][tx_size][i][j][k][l][t],
2323 2 * sizeof(this_branch_ct[t][idx][0]));
2324 }
2325 }
2326 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002327 aom_prob newp = new_coef_probs[i][j][k][l][t];
2328 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002329 int s;
2330 int u = 0;
2331
2332 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002333 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002334 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2335 stepsize, max_idx);
2336 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002337 s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp,
2338 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002339 if (s > 0 && newp != *oldp) u = 1;
2340 updates += u;
2341 if (u == 0 && updates == 0) {
2342 noupdates_before_first++;
2343 continue;
2344 }
2345 if (u == 1 && updates == 1) {
2346 int v;
2347 // first update
Yaowu Xuf883b422016-08-30 14:01:10 -07002348 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002349 for (v = 0; v < noupdates_before_first; ++v)
Yaowu Xuf883b422016-08-30 14:01:10 -07002350 aom_write(bc, 0, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002351 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002352 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002353 if (u) {
2354 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002355 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002356 *oldp = newp;
2357 }
2358 }
2359 }
2360 }
2361 }
2362 }
2363 if (updates == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002364 aom_write_bit(bc, 0); // no updates
Yaowu Xuc27fc142016-08-22 16:08:15 -07002365 }
2366 return;
2367 }
2368 default: assert(0);
2369 }
2370}
2371#endif // CONFIG_ENTROPY
2372
Yaowu Xuf883b422016-08-30 14:01:10 -07002373static void update_coef_probs(AV1_COMP *cpi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002374 const TX_MODE tx_mode = cpi->common.tx_mode;
2375 const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
2376 TX_SIZE tx_size;
2377#if CONFIG_ANS
2378 int update = 0;
2379#endif // CONFIG_ANS
2380#if CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07002381 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002382 SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats;
2383 unsigned int eob_counts_copy[TX_SIZES][PLANE_TYPES][REF_TYPES][COEF_BANDS]
2384 [COEFF_CONTEXTS];
2385 int i;
Yaowu Xuf883b422016-08-30 14:01:10 -07002386 av1_coeff_probs_model dummy_frame_coef_probs[PLANE_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002387
2388 if (cm->do_subframe_update &&
2389 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002390 av1_copy(cpi->common.fc->coef_probs,
2391 subframe_stats->enc_starting_coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002392 for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) {
2393 get_coef_counts_diff(cpi, i, cpi->wholeframe_stats.coef_counts_buf[i],
2394 cpi->wholeframe_stats.eob_counts_buf[i]);
2395 }
2396 }
2397#endif // CONFIG_ENTROPY
2398
2399 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002400 av1_coeff_stats frame_branch_ct[PLANE_TYPES];
2401 av1_coeff_probs_model frame_coef_probs[PLANE_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002402 if (cpi->td.counts->tx_size_totals[tx_size] <= 20 ||
2403 (tx_size >= TX_16X16 && cpi->sf.tx_size_search_method == USE_TX_8X8)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002404 aom_write_bit(w, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002405 } else {
2406#if CONFIG_ENTROPY
2407 if (cm->do_subframe_update &&
2408 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
clang-format67948d32016-09-07 22:40:40 -07002409 unsigned int eob_counts_copy[PLANE_TYPES][REF_TYPES][COEF_BANDS]
2410 [COEFF_CONTEXTS];
Yaowu Xuf883b422016-08-30 14:01:10 -07002411 av1_coeff_count coef_counts_copy[PLANE_TYPES];
2412 av1_copy(eob_counts_copy, cpi->common.counts.eob_branch[tx_size]);
2413 av1_copy(coef_counts_copy, cpi->td.rd_counts.coef_counts[tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002414 build_tree_distribution(cpi, tx_size, frame_branch_ct,
2415 frame_coef_probs);
2416 for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002417 av1_copy(cpi->common.counts.eob_branch[tx_size],
2418 cpi->wholeframe_stats.eob_counts_buf[i][tx_size]);
2419 av1_copy(cpi->td.rd_counts.coef_counts[tx_size],
2420 cpi->wholeframe_stats.coef_counts_buf[i][tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002421 build_tree_distribution(cpi, tx_size, cpi->branch_ct_buf[i][tx_size],
2422 dummy_frame_coef_probs);
2423 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002424 av1_copy(cpi->common.counts.eob_branch[tx_size], eob_counts_copy);
2425 av1_copy(cpi->td.rd_counts.coef_counts[tx_size], coef_counts_copy);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002426
2427 update_coef_probs_subframe(w, cpi, tx_size, cpi->branch_ct_buf,
2428 frame_coef_probs);
2429#if CONFIG_ANS
2430 update = 1;
2431#endif // CONFIG_ANS
2432 } else {
2433#endif // CONFIG_ENTROPY
2434 build_tree_distribution(cpi, tx_size, frame_branch_ct,
2435 frame_coef_probs);
2436 update_coef_probs_common(w, cpi, tx_size, frame_branch_ct,
2437 frame_coef_probs);
2438#if CONFIG_ANS
2439 update = 1;
2440#endif // CONFIG_ANS
2441#if CONFIG_ENTROPY
2442 }
2443#endif // CONFIG_ENTROPY
2444 }
2445 }
2446
2447#if CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07002448 av1_copy(cm->starting_coef_probs, cm->fc->coef_probs);
2449 av1_copy(subframe_stats->coef_probs_buf[0], cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002450 if (cm->do_subframe_update &&
2451 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002452 av1_copy(eob_counts_copy, cm->counts.eob_branch);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002453 for (i = 1; i <= cpi->common.coef_probs_update_idx; ++i) {
2454 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
Yaowu Xuf883b422016-08-30 14:01:10 -07002455 av1_full_to_model_counts(cm->counts.coef[tx_size],
2456 subframe_stats->coef_counts_buf[i][tx_size]);
2457 av1_copy(cm->counts.eob_branch, subframe_stats->eob_counts_buf[i]);
2458 av1_partial_adapt_probs(cm, 0, 0);
2459 av1_copy(subframe_stats->coef_probs_buf[i], cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002460 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002461 av1_copy(cm->fc->coef_probs, subframe_stats->coef_probs_buf[0]);
2462 av1_copy(cm->counts.eob_branch, eob_counts_copy);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002463 }
2464#endif // CONFIG_ENTROPY
2465#if CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -07002466 if (update) av1_coef_pareto_cdfs(cpi->common.fc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002467#endif // CONFIG_ANS
2468}
2469
2470#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002471static void encode_restoration_mode(AV1_COMMON *cm,
2472 struct aom_write_bit_buffer *wb) {
2473 RestorationInfo *rst = &cm->rst_info;
2474 switch (rst->frame_restoration_type) {
2475 case RESTORE_NONE:
2476 aom_wb_write_bit(wb, 0);
2477 aom_wb_write_bit(wb, 0);
2478 break;
2479 case RESTORE_SWITCHABLE:
2480 aom_wb_write_bit(wb, 0);
2481 aom_wb_write_bit(wb, 1);
2482 break;
2483 case RESTORE_BILATERAL:
2484 aom_wb_write_bit(wb, 1);
2485 aom_wb_write_bit(wb, 0);
2486 break;
2487 case RESTORE_WIENER:
2488 aom_wb_write_bit(wb, 1);
2489 aom_wb_write_bit(wb, 1);
2490 break;
2491 default: assert(0);
2492 }
2493}
2494
2495static void encode_restoration(AV1_COMMON *cm, aom_writer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002496 int i;
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002497 RestorationInfo *rsi = &cm->rst_info;
2498 if (rsi->frame_restoration_type != RESTORE_NONE) {
2499 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002500 // RESTORE_SWITCHABLE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002501 for (i = 0; i < cm->rst_internal.ntiles; ++i) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002502 av1_write_token(
clang-formatbda8d612016-09-19 15:55:46 -07002503 wb, av1_switchable_restore_tree, cm->fc->switchable_restore_prob,
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002504 &switchable_restore_encodings[rsi->restoration_type[i]]);
2505 if (rsi->restoration_type[i] == RESTORE_BILATERAL) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002506 int s;
2507 for (s = 0; s < BILATERAL_SUBTILES; ++s) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002508#if BILATERAL_SUBTILES == 0
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002509 aom_write_literal(wb, rsi->bilateral_info[i].level[s],
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002510 av1_bilateral_level_bits(cm));
2511#else
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002512 aom_write(wb, rsi->bilateral_info[i].level[s] >= 0,
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002513 RESTORE_NONE_BILATERAL_PROB);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002514 if (rsi->bilateral_info[i].level[s] >= 0) {
2515 aom_write_literal(wb, rsi->bilateral_info[i].level[s],
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002516 av1_bilateral_level_bits(cm));
2517 }
2518#endif
2519 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002520 } else if (rsi->restoration_type[i] == RESTORE_WIENER) {
2521 aom_write_literal(
2522 wb, rsi->wiener_info[i].vfilter[0] - WIENER_FILT_TAP0_MINV,
2523 WIENER_FILT_TAP0_BITS);
2524 aom_write_literal(
2525 wb, rsi->wiener_info[i].vfilter[1] - WIENER_FILT_TAP1_MINV,
2526 WIENER_FILT_TAP1_BITS);
2527 aom_write_literal(
2528 wb, rsi->wiener_info[i].vfilter[2] - WIENER_FILT_TAP2_MINV,
2529 WIENER_FILT_TAP2_BITS);
2530 aom_write_literal(
2531 wb, rsi->wiener_info[i].hfilter[0] - WIENER_FILT_TAP0_MINV,
2532 WIENER_FILT_TAP0_BITS);
2533 aom_write_literal(
2534 wb, rsi->wiener_info[i].hfilter[1] - WIENER_FILT_TAP1_MINV,
2535 WIENER_FILT_TAP1_BITS);
2536 aom_write_literal(
2537 wb, rsi->wiener_info[i].hfilter[2] - WIENER_FILT_TAP2_MINV,
2538 WIENER_FILT_TAP2_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002539 }
2540 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002541 } else if (rsi->frame_restoration_type == RESTORE_BILATERAL) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002542 for (i = 0; i < cm->rst_internal.ntiles; ++i) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002543 int s;
2544 for (s = 0; s < BILATERAL_SUBTILES; ++s) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002545 aom_write(wb, rsi->bilateral_info[i].level[s] >= 0,
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002546 RESTORE_NONE_BILATERAL_PROB);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002547 if (rsi->bilateral_info[i].level[s] >= 0) {
2548 aom_write_literal(wb, rsi->bilateral_info[i].level[s],
clang-formatbda8d612016-09-19 15:55:46 -07002549 av1_bilateral_level_bits(cm));
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002550 }
2551 }
2552 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002553 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002554 for (i = 0; i < cm->rst_internal.ntiles; ++i) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002555 aom_write(wb, rsi->wiener_info[i].level != 0, RESTORE_NONE_WIENER_PROB);
2556 if (rsi->wiener_info[i].level) {
2557 aom_write_literal(
2558 wb, rsi->wiener_info[i].vfilter[0] - WIENER_FILT_TAP0_MINV,
2559 WIENER_FILT_TAP0_BITS);
2560 aom_write_literal(
2561 wb, rsi->wiener_info[i].vfilter[1] - WIENER_FILT_TAP1_MINV,
2562 WIENER_FILT_TAP1_BITS);
2563 aom_write_literal(
2564 wb, rsi->wiener_info[i].vfilter[2] - WIENER_FILT_TAP2_MINV,
2565 WIENER_FILT_TAP2_BITS);
2566 aom_write_literal(
2567 wb, rsi->wiener_info[i].hfilter[0] - WIENER_FILT_TAP0_MINV,
2568 WIENER_FILT_TAP0_BITS);
2569 aom_write_literal(
2570 wb, rsi->wiener_info[i].hfilter[1] - WIENER_FILT_TAP1_MINV,
2571 WIENER_FILT_TAP1_BITS);
2572 aom_write_literal(
2573 wb, rsi->wiener_info[i].hfilter[2] - WIENER_FILT_TAP2_MINV,
2574 WIENER_FILT_TAP2_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002575 }
2576 }
2577 }
2578 }
2579}
2580#endif // CONFIG_LOOP_RESTORATION
2581
Yaowu Xuf883b422016-08-30 14:01:10 -07002582static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002583 int i;
2584 struct loopfilter *lf = &cm->lf;
2585
2586 // Encode the loop filter level and type
Yaowu Xuf883b422016-08-30 14:01:10 -07002587 aom_wb_write_literal(wb, lf->filter_level, 6);
2588 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002589
2590 // Write out loop filter deltas applied at the MB level based on mode or
2591 // ref frame (if they are enabled).
Yaowu Xuf883b422016-08-30 14:01:10 -07002592 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002593
2594 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002595 aom_wb_write_bit(wb, lf->mode_ref_delta_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002596 if (lf->mode_ref_delta_update) {
2597 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
2598 const int delta = lf->ref_deltas[i];
2599 const int changed = delta != lf->last_ref_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002600 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002601 if (changed) {
2602 lf->last_ref_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002603 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002604 }
2605 }
2606
2607 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
2608 const int delta = lf->mode_deltas[i];
2609 const int changed = delta != lf->last_mode_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002610 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002611 if (changed) {
2612 lf->last_mode_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002613 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002614 }
2615 }
2616 }
2617 }
2618}
2619
2620#if CONFIG_CLPF
Yaowu Xuf883b422016-08-30 14:01:10 -07002621static void encode_clpf(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +02002622 aom_wb_write_literal(wb, cm->clpf_strength_y, 2);
2623 aom_wb_write_literal(wb, cm->clpf_strength_u, 2);
2624 aom_wb_write_literal(wb, cm->clpf_strength_v, 2);
2625 if (cm->clpf_strength_y) {
Steinar Midtskogend06588a2016-05-06 13:48:20 +02002626 aom_wb_write_literal(wb, cm->clpf_size, 2);
Steinar Midtskogend06588a2016-05-06 13:48:20 +02002627 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002628}
2629#endif
2630
2631#if CONFIG_DERING
Yaowu Xuf883b422016-08-30 14:01:10 -07002632static void encode_dering(int level, struct aom_write_bit_buffer *wb) {
2633 aom_wb_write_literal(wb, level, DERING_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002634}
2635#endif // CONFIG_DERING
2636
Yaowu Xuf883b422016-08-30 14:01:10 -07002637static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002638 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002639 aom_wb_write_bit(wb, 1);
2640 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002641 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002642 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002643 }
2644}
2645
Yaowu Xuf883b422016-08-30 14:01:10 -07002646static void encode_quantization(const AV1_COMMON *const cm,
2647 struct aom_write_bit_buffer *wb) {
2648 aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002649 write_delta_q(wb, cm->y_dc_delta_q);
2650 write_delta_q(wb, cm->uv_dc_delta_q);
2651 write_delta_q(wb, cm->uv_ac_delta_q);
2652#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07002653 aom_wb_write_bit(wb, cm->using_qmatrix);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002654 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002655 aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS);
2656 aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002657 }
2658#endif
2659}
2660
Yaowu Xuf883b422016-08-30 14:01:10 -07002661static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
2662 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002663 int i, j;
2664 const struct segmentation *seg = &cm->seg;
2665
Yaowu Xuf883b422016-08-30 14:01:10 -07002666 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002667 if (!seg->enabled) return;
2668
2669 // Segmentation map
2670 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002671 aom_wb_write_bit(wb, seg->update_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002672 } else {
2673 assert(seg->update_map == 1);
2674 }
2675 if (seg->update_map) {
2676 // Select the coding strategy (temporal or spatial)
Yaowu Xuf883b422016-08-30 14:01:10 -07002677 av1_choose_segmap_coding_method(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002678
2679 // Write out the chosen coding method.
2680 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002681 aom_wb_write_bit(wb, seg->temporal_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002682 } else {
2683 assert(seg->temporal_update == 0);
2684 }
2685 }
2686
2687 // Segmentation data
Yaowu Xuf883b422016-08-30 14:01:10 -07002688 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002689 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002690 aom_wb_write_bit(wb, seg->abs_delta);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002691
2692 for (i = 0; i < MAX_SEGMENTS; i++) {
2693 for (j = 0; j < SEG_LVL_MAX; j++) {
2694 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002695 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002696 if (active) {
2697 const int data = get_segdata(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002698 const int data_max = av1_seg_feature_data_max(j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002699
Yaowu Xuf883b422016-08-30 14:01:10 -07002700 if (av1_is_segfeature_signed(j)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002701 encode_unsigned_max(wb, abs(data), data_max);
Yaowu Xuf883b422016-08-30 14:01:10 -07002702 aom_wb_write_bit(wb, data < 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002703 } else {
2704 encode_unsigned_max(wb, data, data_max);
2705 }
2706 }
2707 }
2708 }
2709 }
2710}
2711
Yaowu Xuf883b422016-08-30 14:01:10 -07002712static void update_seg_probs(AV1_COMP *cpi, aom_writer *w) {
2713 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002714
2715 if (!cm->seg.enabled || !cm->seg.update_map) return;
2716
2717 if (cm->seg.temporal_update) {
2718 int i;
2719
2720 for (i = 0; i < PREDICTION_PROBS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07002721 av1_cond_prob_diff_update(w, &cm->fc->seg.pred_probs[i],
2722 cm->counts.seg.pred[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002723
Yaowu Xuf883b422016-08-30 14:01:10 -07002724 prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002725 cm->counts.seg.tree_mispred, MAX_SEGMENTS, w);
2726 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002727 prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002728 cm->counts.seg.tree_total, MAX_SEGMENTS, w);
2729 }
2730}
2731
Yaowu Xuf883b422016-08-30 14:01:10 -07002732static void write_txfm_mode(TX_MODE mode, struct aom_write_bit_buffer *wb) {
2733 aom_wb_write_bit(wb, mode == TX_MODE_SELECT);
2734 if (mode != TX_MODE_SELECT) aom_wb_write_literal(wb, mode, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002735}
2736
Yaowu Xuf883b422016-08-30 14:01:10 -07002737static void update_txfm_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002738 FRAME_COUNTS *counts) {
2739 if (cm->tx_mode == TX_MODE_SELECT) {
2740 int i, j;
2741 for (i = 0; i < TX_SIZES - 1; ++i)
2742 for (j = 0; j < TX_SIZE_CONTEXTS; ++j)
Yaowu Xuf883b422016-08-30 14:01:10 -07002743 prob_diff_update(av1_tx_size_tree[i], cm->fc->tx_size_probs[i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -07002744 counts->tx_size[i][j], i + 2, w);
2745 }
2746}
2747
James Zern7b9407a2016-05-18 23:48:05 -07002748static void write_interp_filter(InterpFilter filter,
Yaowu Xuf883b422016-08-30 14:01:10 -07002749 struct aom_write_bit_buffer *wb) {
2750 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002751 if (filter != SWITCHABLE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002752 aom_wb_write_literal(wb, filter, 2 + CONFIG_EXT_INTERP);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002753}
2754
Yaowu Xuf883b422016-08-30 14:01:10 -07002755static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002756 if (cm->interp_filter == SWITCHABLE) {
2757 // Check to see if only one of the filters is actually used
2758 int count[SWITCHABLE_FILTERS];
2759 int i, j, c = 0;
2760 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2761 count[i] = 0;
2762 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
2763 count[i] += counts->switchable_interp[j][i];
2764 c += (count[i] > 0);
2765 }
2766 if (c == 1) {
2767 // Only one filter is used. So set the filter at frame level
2768 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2769 if (count[i]) {
2770 cm->interp_filter = i;
2771 break;
2772 }
2773 }
2774 }
2775 }
2776}
2777
Yaowu Xuf883b422016-08-30 14:01:10 -07002778static void write_tile_info(const AV1_COMMON *const cm,
2779 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002780#if CONFIG_EXT_TILE
2781 const int tile_width =
2782 ALIGN_POWER_OF_TWO(cm->tile_width, cm->mib_size_log2) >>
2783 cm->mib_size_log2;
2784 const int tile_height =
2785 ALIGN_POWER_OF_TWO(cm->tile_height, cm->mib_size_log2) >>
2786 cm->mib_size_log2;
2787
2788 assert(tile_width > 0);
2789 assert(tile_height > 0);
2790
2791// Write the tile sizes
2792#if CONFIG_EXT_PARTITION
2793 if (cm->sb_size == BLOCK_128X128) {
2794 assert(tile_width <= 32);
2795 assert(tile_height <= 32);
Yaowu Xuf883b422016-08-30 14:01:10 -07002796 aom_wb_write_literal(wb, tile_width - 1, 5);
2797 aom_wb_write_literal(wb, tile_height - 1, 5);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002798 } else
2799#endif // CONFIG_EXT_PARTITION
2800 {
2801 assert(tile_width <= 64);
2802 assert(tile_height <= 64);
Yaowu Xuf883b422016-08-30 14:01:10 -07002803 aom_wb_write_literal(wb, tile_width - 1, 6);
2804 aom_wb_write_literal(wb, tile_height - 1, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002805 }
2806#else
2807 int min_log2_tile_cols, max_log2_tile_cols, ones;
Yaowu Xuf883b422016-08-30 14:01:10 -07002808 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002809
2810 // columns
2811 ones = cm->log2_tile_cols - min_log2_tile_cols;
Yaowu Xuf883b422016-08-30 14:01:10 -07002812 while (ones--) aom_wb_write_bit(wb, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002813
Yaowu Xuf883b422016-08-30 14:01:10 -07002814 if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002815
2816 // rows
Yaowu Xuf883b422016-08-30 14:01:10 -07002817 aom_wb_write_bit(wb, cm->log2_tile_rows != 0);
2818 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002819#endif // CONFIG_EXT_TILE
2820}
2821
Yaowu Xuf883b422016-08-30 14:01:10 -07002822static int get_refresh_mask(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002823 int refresh_mask = 0;
2824
2825#if CONFIG_EXT_REFS
2826 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
2827 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
2828 // the 3 LAST reference frames will be updated accordingly, i.e.:
2829 // (1) The original virtual index for LAST3_FRAME will become the new virtual
2830 // index for LAST_FRAME; and
2831 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
2832 // shifted and become the new virtual indexes for LAST2_FRAME and
2833 // LAST3_FRAME.
2834 refresh_mask |=
2835 (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]);
2836 if (cpi->rc.is_bwd_ref_frame && cpi->num_extra_arfs) {
2837 // We have swapped the virtual indices
2838 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->arf_map[0]);
2839 } else {
2840 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
2841 }
2842#else
2843 refresh_mask |= (cpi->refresh_last_frame << cpi->lst_fb_idx);
2844#endif // CONFIG_EXT_REFS
2845
Yaowu Xuf883b422016-08-30 14:01:10 -07002846 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002847 // We have decided to preserve the previously existing golden frame as our
2848 // new ARF frame. However, in the short term we leave it in the GF slot and,
2849 // if we're updating the GF with the current decoded frame, we save it
2850 // instead to the ARF slot.
Yaowu Xuf883b422016-08-30 14:01:10 -07002851 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
Yaowu Xuc27fc142016-08-22 16:08:15 -07002852 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
2853 // there so that it can be done outside of the recode loop.
2854 // Note: This is highly specific to the use of ARF as a forward reference,
2855 // and this needs to be generalized as other uses are implemented
2856 // (like RTC/temporal scalability).
2857 return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx);
2858 } else {
2859 int arf_idx = cpi->alt_fb_idx;
2860#if CONFIG_EXT_REFS
2861 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2862 arf_idx = cpi->arf_map[gf_group->arf_update_idx[gf_group->index]];
2863#else
2864 if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
2865 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2866 arf_idx = gf_group->arf_update_idx[gf_group->index];
2867 }
2868#endif // CONFIG_EXT_REFS
2869 return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) |
2870 (cpi->refresh_alt_ref_frame << arf_idx);
2871 }
2872}
2873
2874#if CONFIG_EXT_TILE
2875static INLINE int find_identical_tile(
2876 const int tile_row, const int tile_col,
2877 TileBufferEnc (*const tile_buffers)[1024]) {
2878 const MV32 candidate_offset[1] = { { 1, 0 } };
2879 const uint8_t *const cur_tile_data =
2880 tile_buffers[tile_row][tile_col].data + 4;
2881 const unsigned int cur_tile_size = tile_buffers[tile_row][tile_col].size;
2882
2883 int i;
2884
2885 if (tile_row == 0) return 0;
2886
2887 // (TODO: yunqingwang) For now, only above tile is checked and used.
2888 // More candidates such as left tile can be added later.
2889 for (i = 0; i < 1; i++) {
2890 int row_offset = candidate_offset[0].row;
2891 int col_offset = candidate_offset[0].col;
2892 int row = tile_row - row_offset;
2893 int col = tile_col - col_offset;
2894 uint8_t tile_hdr;
2895 const uint8_t *tile_data;
2896 TileBufferEnc *candidate;
2897
2898 if (row < 0 || col < 0) continue;
2899
2900 tile_hdr = *(tile_buffers[row][col].data);
2901
2902 // Read out tcm bit
2903 if ((tile_hdr >> 7) == 1) {
2904 // The candidate is a copy tile itself
2905 row_offset += tile_hdr & 0x7f;
2906 row = tile_row - row_offset;
2907 }
2908
2909 candidate = &tile_buffers[row][col];
2910
2911 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
2912
2913 tile_data = candidate->data + 4;
2914
2915 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
2916
2917 // Identical tile found
2918 assert(row_offset > 0);
2919 return row_offset;
2920 }
2921
2922 // No identical tile found
2923 return 0;
2924}
2925#endif // CONFIG_EXT_TILE
2926
Yaowu Xuf883b422016-08-30 14:01:10 -07002927static uint32_t write_tiles(AV1_COMP *const cpi, uint8_t *const dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002928 unsigned int *max_tile_size,
2929 unsigned int *max_tile_col_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002930 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002931#if CONFIG_ANS
2932 struct AnsCoder token_ans;
2933#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002934 aom_writer mode_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002935#endif // CONFIG_ANS
2936 int tile_row, tile_col;
2937 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
clang-format67948d32016-09-07 22:40:40 -07002938 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002939 size_t total_size = 0;
2940 const int tile_cols = cm->tile_cols;
2941 const int tile_rows = cm->tile_rows;
2942#if CONFIG_EXT_TILE
2943 const int have_tiles = tile_cols * tile_rows > 1;
2944#endif // CONFIG_EXT_TILE
2945#if CONFIG_ANS
Alex Converse080a2cc2016-09-20 16:39:01 -07002946 struct BufAnsCoder *buf_ans = &cpi->buf_ans;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002947#endif // CONFIG_ANS
2948
2949 *max_tile_size = 0;
2950 *max_tile_col_size = 0;
2951
2952// All tile size fields are output on 4 bytes. A call to remux_tiles will
2953// later compact the data if smaller headers are adequate.
2954
2955#if CONFIG_EXT_TILE
2956 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
2957 TileInfo tile_info;
2958 const int is_last_col = (tile_col == tile_cols - 1);
2959 const size_t col_offset = total_size;
2960
Yaowu Xuf883b422016-08-30 14:01:10 -07002961 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002962
2963 // The last column does not have a column header
2964 if (!is_last_col) total_size += 4;
2965
2966 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
2967 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
2968 unsigned int tile_size;
2969 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
2970 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
2971 const int data_offset = have_tiles ? 4 : 0;
2972
Yaowu Xuf883b422016-08-30 14:01:10 -07002973 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002974
2975 buf->data = dst + total_size;
2976
2977 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
2978 // even for the last one, unless no tiling is used at all.
2979 total_size += data_offset;
2980#if !CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -07002981 aom_start_encode(&mode_bc, buf->data + data_offset);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002982 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
2983 assert(tok == tok_end);
Yaowu Xuf883b422016-08-30 14:01:10 -07002984 aom_stop_encode(&mode_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002985 tile_size = mode_bc.pos;
2986#else
2987 buf_ans_write_reset(buf_ans);
2988 write_modes(cpi, &tile_info, buf_ans, &tok, tok_end);
2989 assert(tok == tok_end);
2990 ans_write_init(&token_ans, buf->data + data_offset);
2991 buf_ans_flush(buf_ans, &token_ans);
2992 tile_size = ans_write_end(&token_ans);
2993#endif // !CONFIG_ANS
2994
2995 buf->size = tile_size;
2996
2997 // Record the maximum tile size we see, so we can compact headers later.
Yaowu Xuf883b422016-08-30 14:01:10 -07002998 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002999
3000 if (have_tiles) {
3001 // tile header: size of this tile, or copy offset
3002 uint32_t tile_header = tile_size;
3003
3004 // Check if this tile is a copy tile.
3005 // Very low chances to have copy tiles on the key frames, so don't
3006 // search on key frames to reduce unnecessary search.
3007 if (cm->frame_type != KEY_FRAME) {
3008 const int idendical_tile_offset =
3009 find_identical_tile(tile_row, tile_col, tile_buffers);
3010
3011 if (idendical_tile_offset > 0) {
3012 tile_size = 0;
3013 tile_header = idendical_tile_offset | 0x80;
3014 tile_header <<= 24;
3015 }
3016 }
3017
3018 mem_put_le32(buf->data, tile_header);
3019 }
3020
3021 total_size += tile_size;
3022 }
3023
3024 if (!is_last_col) {
3025 size_t col_size = total_size - col_offset - 4;
3026 mem_put_le32(dst + col_offset, col_size);
3027
3028 // If it is not final packing, record the maximum tile column size we see,
3029 // otherwise, check if the tile size is out of the range.
Yaowu Xuf883b422016-08-30 14:01:10 -07003030 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003031 }
3032 }
3033#else
3034 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3035 TileInfo tile_info;
3036 const int is_last_row = (tile_row == tile_rows - 1);
3037
Yaowu Xuf883b422016-08-30 14:01:10 -07003038 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003039
3040 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3041 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
3042 const int is_last_col = (tile_col == tile_cols - 1);
3043 const int is_last_tile = is_last_col && is_last_row;
3044 unsigned int tile_size;
3045 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3046 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3047
Yaowu Xuf883b422016-08-30 14:01:10 -07003048 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003049
3050 buf->data = dst + total_size;
3051
3052 // The last tile does not have a header.
3053 if (!is_last_tile) total_size += 4;
3054
3055#if !CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -07003056 aom_start_encode(&mode_bc, dst + total_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003057 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
3058 assert(tok == tok_end);
Yaowu Xuf883b422016-08-30 14:01:10 -07003059 aom_stop_encode(&mode_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003060 tile_size = mode_bc.pos;
3061#else
3062 buf_ans_write_reset(buf_ans);
3063 write_modes(cpi, &tile_info, buf_ans, &tok, tok_end);
3064 assert(tok == tok_end);
3065 ans_write_init(&token_ans, dst + total_size);
3066 buf_ans_flush(buf_ans, &token_ans);
3067 tile_size = ans_write_end(&token_ans);
3068#endif // !CONFIG_ANS
3069
3070 assert(tile_size > 0);
3071
3072 buf->size = tile_size;
3073
3074 if (!is_last_tile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003075 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003076 // size of this tile
3077 mem_put_le32(buf->data, tile_size);
3078 }
3079
3080 total_size += tile_size;
3081 }
3082 }
3083#endif // CONFIG_EXT_TILE
3084 return (uint32_t)total_size;
3085}
3086
Yaowu Xuf883b422016-08-30 14:01:10 -07003087static void write_render_size(const AV1_COMMON *cm,
3088 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003089 const int scaling_active =
3090 cm->width != cm->render_width || cm->height != cm->render_height;
Yaowu Xuf883b422016-08-30 14:01:10 -07003091 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003092 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003093 aom_wb_write_literal(wb, cm->render_width - 1, 16);
3094 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003095 }
3096}
3097
Yaowu Xuf883b422016-08-30 14:01:10 -07003098static void write_frame_size(const AV1_COMMON *cm,
3099 struct aom_write_bit_buffer *wb) {
3100 aom_wb_write_literal(wb, cm->width - 1, 16);
3101 aom_wb_write_literal(wb, cm->height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003102
3103 write_render_size(cm, wb);
3104}
3105
Yaowu Xuf883b422016-08-30 14:01:10 -07003106static void write_frame_size_with_refs(AV1_COMP *cpi,
3107 struct aom_write_bit_buffer *wb) {
3108 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003109 int found = 0;
3110
3111 MV_REFERENCE_FRAME ref_frame;
3112 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3113 YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
3114
3115 if (cfg != NULL) {
3116 found =
3117 cm->width == cfg->y_crop_width && cm->height == cfg->y_crop_height;
3118 found &= cm->render_width == cfg->render_width &&
3119 cm->render_height == cfg->render_height;
3120 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003121 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003122 if (found) {
3123 break;
3124 }
3125 }
3126
3127 if (!found) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003128 aom_wb_write_literal(wb, cm->width - 1, 16);
3129 aom_wb_write_literal(wb, cm->height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003130 write_render_size(cm, wb);
3131 }
3132}
3133
Yaowu Xuf883b422016-08-30 14:01:10 -07003134static void write_sync_code(struct aom_write_bit_buffer *wb) {
3135 aom_wb_write_literal(wb, AV1_SYNC_CODE_0, 8);
3136 aom_wb_write_literal(wb, AV1_SYNC_CODE_1, 8);
3137 aom_wb_write_literal(wb, AV1_SYNC_CODE_2, 8);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003138}
3139
3140static void write_profile(BITSTREAM_PROFILE profile,
Yaowu Xuf883b422016-08-30 14:01:10 -07003141 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003142 switch (profile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003143 case PROFILE_0: aom_wb_write_literal(wb, 0, 2); break;
3144 case PROFILE_1: aom_wb_write_literal(wb, 2, 2); break;
3145 case PROFILE_2: aom_wb_write_literal(wb, 1, 2); break;
3146 case PROFILE_3: aom_wb_write_literal(wb, 6, 3); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003147 default: assert(0);
3148 }
3149}
3150
3151static void write_bitdepth_colorspace_sampling(
Yaowu Xuf883b422016-08-30 14:01:10 -07003152 AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003153 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003154 assert(cm->bit_depth > AOM_BITS_8);
3155 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003156 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003157 aom_wb_write_literal(wb, cm->color_space, 3);
3158 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003159 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Yaowu Xuf883b422016-08-30 14:01:10 -07003160 aom_wb_write_bit(wb, cm->color_range);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003161 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
3162 assert(cm->subsampling_x != 1 || cm->subsampling_y != 1);
Yaowu Xuf883b422016-08-30 14:01:10 -07003163 aom_wb_write_bit(wb, cm->subsampling_x);
3164 aom_wb_write_bit(wb, cm->subsampling_y);
3165 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003166 } else {
3167 assert(cm->subsampling_x == 1 && cm->subsampling_y == 1);
3168 }
3169 } else {
3170 assert(cm->profile == PROFILE_1 || cm->profile == PROFILE_3);
Yaowu Xuf883b422016-08-30 14:01:10 -07003171 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003172 }
3173}
3174
Yaowu Xuf883b422016-08-30 14:01:10 -07003175static void write_uncompressed_header(AV1_COMP *cpi,
3176 struct aom_write_bit_buffer *wb) {
3177 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003178 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3179
Yaowu Xuf883b422016-08-30 14:01:10 -07003180 aom_wb_write_literal(wb, AOM_FRAME_MARKER, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003181
3182 write_profile(cm->profile, wb);
3183
3184#if CONFIG_EXT_REFS
3185 // NOTE: By default all coded frames to be used as a reference
3186 cm->is_reference_frame = 1;
3187
3188 if (cm->show_existing_frame) {
3189 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3190 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3191
3192 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003193 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003194 "Buffer %d does not contain a reconstructed frame",
3195 frame_to_show);
3196 }
3197 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3198
Yaowu Xuf883b422016-08-30 14:01:10 -07003199 aom_wb_write_bit(wb, 1); // show_existing_frame
3200 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003201
3202 return;
3203 } else {
3204#endif // CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07003205 aom_wb_write_bit(wb, 0); // show_existing_frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07003206#if CONFIG_EXT_REFS
3207 }
3208#endif // CONFIG_EXT_REFS
3209
Yaowu Xuf883b422016-08-30 14:01:10 -07003210 aom_wb_write_bit(wb, cm->frame_type);
3211 aom_wb_write_bit(wb, cm->show_frame);
3212 aom_wb_write_bit(wb, cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003213
3214 if (cm->frame_type == KEY_FRAME) {
3215 write_sync_code(wb);
3216 write_bitdepth_colorspace_sampling(cm, wb);
3217 write_frame_size(cm, wb);
Urvang Joshib100db72016-10-12 16:28:56 -07003218#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07003219 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Urvang Joshib100db72016-10-12 16:28:56 -07003220#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003221 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003222 if (!cm->show_frame) aom_wb_write_bit(wb, cm->intra_only);
Urvang Joshib100db72016-10-12 16:28:56 -07003223#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07003224 if (cm->intra_only) aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Urvang Joshib100db72016-10-12 16:28:56 -07003225#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003226 if (!cm->error_resilient_mode) {
3227 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003228 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003229 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3230 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003231 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003232 cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
3233 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003234 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003235 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3236 }
3237 }
3238
3239#if CONFIG_EXT_REFS
3240 cpi->refresh_frame_mask = get_refresh_mask(cpi);
3241#endif // CONFIG_EXT_REFS
3242
3243 if (cm->intra_only) {
3244 write_sync_code(wb);
3245 write_bitdepth_colorspace_sampling(cm, wb);
3246
3247#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07003248 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003249#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003250 aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003251#endif // CONFIG_EXT_REFS
3252 write_frame_size(cm, wb);
3253 } else {
3254 MV_REFERENCE_FRAME ref_frame;
3255
3256#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07003257 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003258#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003259 aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003260#endif // CONFIG_EXT_REFS
3261
3262#if CONFIG_EXT_REFS
3263 if (!cpi->refresh_frame_mask) {
3264 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
3265 // will not be used as a reference
3266 cm->is_reference_frame = 0;
3267 }
3268#endif // CONFIG_EXT_REFS
3269
3270 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3271 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
Yaowu Xuf883b422016-08-30 14:01:10 -07003272 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003273 REF_FRAMES_LOG2);
Yaowu Xuf883b422016-08-30 14:01:10 -07003274 aom_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003275 }
3276
3277 write_frame_size_with_refs(cpi, wb);
3278
Yaowu Xuf883b422016-08-30 14:01:10 -07003279 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003280
3281 fix_interp_filter(cm, cpi->td.counts);
3282 write_interp_filter(cm->interp_filter, wb);
3283 }
3284 }
3285
3286 if (!cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003287 aom_wb_write_bit(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003288 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
3289 }
3290
Yaowu Xuf883b422016-08-30 14:01:10 -07003291 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003292
3293 assert(cm->mib_size == num_8x8_blocks_wide_lookup[cm->sb_size]);
3294 assert(cm->mib_size == 1 << cm->mib_size_log2);
3295#if CONFIG_EXT_PARTITION
3296 assert(cm->sb_size == BLOCK_128X128 || cm->sb_size == BLOCK_64X64);
Yaowu Xuf883b422016-08-30 14:01:10 -07003297 aom_wb_write_bit(wb, cm->sb_size == BLOCK_128X128 ? 1 : 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003298#else
3299 assert(cm->sb_size == BLOCK_64X64);
3300#endif // CONFIG_EXT_PARTITION
3301
3302 encode_loopfilter(cm, wb);
3303#if CONFIG_CLPF
3304 encode_clpf(cm, wb);
3305#endif
3306#if CONFIG_DERING
3307 encode_dering(cm->dering_level, wb);
3308#endif // CONFIG_DERING
3309#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003310 encode_restoration_mode(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003311#endif // CONFIG_LOOP_RESTORATION
3312 encode_quantization(cm, wb);
3313 encode_segmentation(cm, xd, wb);
3314 if (!cm->seg.enabled && xd->lossless[0])
Urvang Joshicb586f32016-09-20 11:36:33 -07003315 cm->tx_mode = ONLY_4X4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003316 else
3317 write_txfm_mode(cm->tx_mode, wb);
3318
3319 if (cpi->allow_comp_inter_inter) {
3320 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
3321 const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE;
3322
Yaowu Xuf883b422016-08-30 14:01:10 -07003323 aom_wb_write_bit(wb, use_hybrid_pred);
3324 if (!use_hybrid_pred) aom_wb_write_bit(wb, use_compound_pred);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003325 }
3326
3327 write_tile_info(cm, wb);
3328}
3329
3330#if CONFIG_GLOBAL_MOTION
3331static void write_global_motion_params(Global_Motion_Params *params,
Yaowu Xuf883b422016-08-30 14:01:10 -07003332 aom_prob *probs, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003333 GLOBAL_MOTION_TYPE gmtype = get_gmtype(params);
Yaowu Xuf883b422016-08-30 14:01:10 -07003334 av1_write_token(w, av1_global_motion_types_tree, probs,
3335 &global_motion_types_encodings[gmtype]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003336 switch (gmtype) {
3337 case GLOBAL_ZERO: break;
3338 case GLOBAL_AFFINE:
Sarah Parkere5299862016-08-16 14:57:37 -07003339 aom_write_primitive_symmetric(
Sarah Parkerc4bcb502016-09-07 13:24:53 -07003340 w, (params->motion_params.wmmat[2].as_mv.row >> GM_ALPHA_PREC_DIFF),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003341 GM_ABS_ALPHA_BITS);
Sarah Parkere5299862016-08-16 14:57:37 -07003342 aom_write_primitive_symmetric(
Sarah Parkerc4bcb502016-09-07 13:24:53 -07003343 w, (params->motion_params.wmmat[2].as_mv.col >> GM_ALPHA_PREC_DIFF) -
3344 (1 << GM_ALPHA_PREC_BITS),
Sarah Parkere5299862016-08-16 14:57:37 -07003345 GM_ABS_ALPHA_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003346 // fallthrough intended
3347 case GLOBAL_ROTZOOM:
Yaowu Xuf883b422016-08-30 14:01:10 -07003348 aom_write_primitive_symmetric(
Sarah Parkere5299862016-08-16 14:57:37 -07003349 w, (params->motion_params.wmmat[1].as_mv.row >> GM_ALPHA_PREC_DIFF),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003350 GM_ABS_ALPHA_BITS);
Yaowu Xuf883b422016-08-30 14:01:10 -07003351 aom_write_primitive_symmetric(
Sarah Parkere5299862016-08-16 14:57:37 -07003352 w, (params->motion_params.wmmat[1].as_mv.col >> GM_ALPHA_PREC_DIFF) -
3353 (1 << GM_ALPHA_PREC_BITS),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003354 GM_ABS_ALPHA_BITS);
3355 // fallthrough intended
3356 case GLOBAL_TRANSLATION:
Yaowu Xuf883b422016-08-30 14:01:10 -07003357 aom_write_primitive_symmetric(
Sarah Parkere5299862016-08-16 14:57:37 -07003358 w, (params->motion_params.wmmat[0].as_mv.row >> GM_TRANS_PREC_DIFF),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003359 GM_ABS_TRANS_BITS);
Yaowu Xuf883b422016-08-30 14:01:10 -07003360 aom_write_primitive_symmetric(
Sarah Parkere5299862016-08-16 14:57:37 -07003361 w, (params->motion_params.wmmat[0].as_mv.col >> GM_TRANS_PREC_DIFF),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003362 GM_ABS_TRANS_BITS);
3363 break;
3364 default: assert(0);
3365 }
3366}
3367
Yaowu Xuf883b422016-08-30 14:01:10 -07003368static void write_global_motion(AV1_COMP *cpi, aom_writer *w) {
3369 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003370 int frame;
3371 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
3372 if (!cpi->global_motion_used[frame]) {
3373 memset(&cm->global_motion[frame], 0, sizeof(*cm->global_motion));
3374 }
3375 write_global_motion_params(&cm->global_motion[frame],
3376 cm->fc->global_motion_types_prob, w);
Sarah Parkere5299862016-08-16 14:57:37 -07003377 /*
3378 printf("Enc Ref %d [%d] (used %d): %d %d %d %d\n",
3379 frame, cm->current_video_frame, cpi->global_motion_used[frame],
3380 cm->global_motion[frame].motion_params.wmmat[0].as_mv.row,
3381 cm->global_motion[frame].motion_params.wmmat[0].as_mv.col,
3382 cm->global_motion[frame].motion_params.wmmat[1].as_mv.row,
3383 cm->global_motion[frame].motion_params.wmmat[1].as_mv.col);
3384 */
Yaowu Xuc27fc142016-08-22 16:08:15 -07003385 }
3386}
3387#endif
3388
Yaowu Xuf883b422016-08-30 14:01:10 -07003389static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data) {
3390 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003391#if CONFIG_SUPERTX
3392 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3393#endif // CONFIG_SUPERTX
3394 FRAME_CONTEXT *const fc = cm->fc;
3395 FRAME_COUNTS *counts = cpi->td.counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07003396 aom_writer *header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003397 int i, j;
3398
3399#if CONFIG_ANS
3400 struct AnsCoder header_ans;
3401 int header_size;
3402 header_bc = &cpi->buf_ans;
3403 buf_ans_write_reset(header_bc);
3404#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003405 aom_writer real_header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003406 header_bc = &real_header_bc;
Yaowu Xuf883b422016-08-30 14:01:10 -07003407 aom_start_encode(header_bc, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003408#endif
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003409
3410#if CONFIG_LOOP_RESTORATION
3411 encode_restoration(cm, header_bc);
3412#endif // CONFIG_LOOP_RESTORATION
3413
Yaowu Xuc27fc142016-08-22 16:08:15 -07003414 update_txfm_probs(cm, header_bc, counts);
3415 update_coef_probs(cpi, header_bc);
3416
3417#if CONFIG_VAR_TX
3418 update_txfm_partition_probs(cm, header_bc, counts);
Yue Chena1e48dc2016-08-29 17:29:33 -07003419#if CONFIG_EXT_TX && CONFIG_RECT_TX
3420 if (cm->tx_mode == TX_MODE_SELECT) {
3421 for (i = 1; i < TX_SIZES - 1; ++i)
3422 av1_cond_prob_diff_update(header_bc, &fc->rect_tx_prob[i],
3423 counts->rect_tx[i]);
3424 }
3425#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003426#endif
3427
3428 update_skip_probs(cm, header_bc, counts);
3429 update_seg_probs(cpi, header_bc);
3430
3431 for (i = 0; i < INTRA_MODES; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003432 prob_diff_update(av1_intra_mode_tree, fc->uv_mode_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003433 counts->uv_mode[i], INTRA_MODES, header_bc);
3434
3435#if CONFIG_EXT_PARTITION_TYPES
Yaowu Xuf883b422016-08-30 14:01:10 -07003436 prob_diff_update(av1_partition_tree, fc->partition_prob[0],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003437 counts->partition[0], PARTITION_TYPES, header_bc);
3438 for (i = 1; i < PARTITION_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003439 prob_diff_update(av1_ext_partition_tree, fc->partition_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003440 counts->partition[i], EXT_PARTITION_TYPES, header_bc);
3441#else
3442 for (i = 0; i < PARTITION_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003443 prob_diff_update(av1_partition_tree, fc->partition_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003444 counts->partition[i], PARTITION_TYPES, header_bc);
3445#endif // CONFIG_EXT_PARTITION_TYPES
3446
3447#if CONFIG_EXT_INTRA
3448 for (i = 0; i < INTRA_FILTERS + 1; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003449 prob_diff_update(av1_intra_filter_tree, fc->intra_filter_probs[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003450 counts->intra_filter[i], INTRA_FILTERS, header_bc);
3451#endif // CONFIG_EXT_INTRA
3452
3453 if (frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003454 av1_copy(cm->kf_y_prob, av1_kf_y_mode_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003455 for (i = 0; i < INTRA_MODES; ++i)
3456 for (j = 0; j < INTRA_MODES; ++j)
Yaowu Xuf883b422016-08-30 14:01:10 -07003457 prob_diff_update(av1_intra_mode_tree, cm->kf_y_prob[i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003458 counts->kf_y_mode[i][j], INTRA_MODES, header_bc);
3459 } else {
3460#if CONFIG_REF_MV
3461 update_inter_mode_probs(cm, header_bc, counts);
3462#else
3463 for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003464 prob_diff_update(av1_inter_mode_tree, cm->fc->inter_mode_probs[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003465 counts->inter_mode[i], INTER_MODES, header_bc);
3466#endif
3467
3468#if CONFIG_EXT_INTER
3469 update_inter_compound_mode_probs(cm, header_bc);
3470
3471 if (cm->reference_mode != COMPOUND_REFERENCE) {
3472 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
3473 if (is_interintra_allowed_bsize_group(i)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003474 av1_cond_prob_diff_update(header_bc, &fc->interintra_prob[i],
3475 cm->counts.interintra[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003476 }
3477 }
3478 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
3479 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -07003480 av1_interintra_mode_tree, cm->fc->interintra_mode_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003481 counts->interintra_mode[i], INTERINTRA_MODES, header_bc);
3482 }
3483 for (i = 0; i < BLOCK_SIZES; i++) {
3484 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i))
Yaowu Xuf883b422016-08-30 14:01:10 -07003485 av1_cond_prob_diff_update(header_bc, &fc->wedge_interintra_prob[i],
3486 cm->counts.wedge_interintra[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003487 }
3488 }
3489 if (cm->reference_mode != SINGLE_REFERENCE) {
3490 for (i = 0; i < BLOCK_SIZES; i++)
3491 if (is_interinter_wedge_used(i))
Yaowu Xuf883b422016-08-30 14:01:10 -07003492 av1_cond_prob_diff_update(header_bc, &fc->wedge_interinter_prob[i],
3493 cm->counts.wedge_interinter[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003494 }
3495#endif // CONFIG_EXT_INTER
3496
Yue Chencb60b182016-10-13 15:18:22 -07003497#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07003498 for (i = BLOCK_8X8; i < BLOCK_SIZES; ++i)
Yue Chencb60b182016-10-13 15:18:22 -07003499 prob_diff_update(av1_motion_mode_tree, fc->motion_mode_prob[i],
3500 counts->motion_mode[i], MOTION_MODES, header_bc);
3501#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07003502
3503 if (cm->interp_filter == SWITCHABLE)
3504 update_switchable_interp_probs(cm, header_bc, counts);
3505
3506 for (i = 0; i < INTRA_INTER_CONTEXTS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07003507 av1_cond_prob_diff_update(header_bc, &fc->intra_inter_prob[i],
3508 counts->intra_inter[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003509
3510 if (cpi->allow_comp_inter_inter) {
3511 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
3512 if (use_hybrid_pred)
3513 for (i = 0; i < COMP_INTER_CONTEXTS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07003514 av1_cond_prob_diff_update(header_bc, &fc->comp_inter_prob[i],
3515 counts->comp_inter[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003516 }
3517
3518 if (cm->reference_mode != COMPOUND_REFERENCE) {
3519 for (i = 0; i < REF_CONTEXTS; i++) {
3520 for (j = 0; j < (SINGLE_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003521 av1_cond_prob_diff_update(header_bc, &fc->single_ref_prob[i][j],
3522 counts->single_ref[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003523 }
3524 }
3525 }
3526
3527 if (cm->reference_mode != SINGLE_REFERENCE) {
3528 for (i = 0; i < REF_CONTEXTS; i++) {
3529#if CONFIG_EXT_REFS
3530 for (j = 0; j < (FWD_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003531 av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
3532 counts->comp_ref[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003533 }
3534 for (j = 0; j < (BWD_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003535 av1_cond_prob_diff_update(header_bc, &fc->comp_bwdref_prob[i][j],
3536 counts->comp_bwdref[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003537 }
3538#else
3539 for (j = 0; j < (COMP_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003540 av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
3541 counts->comp_ref[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003542 }
3543#endif // CONFIG_EXT_REFS
3544 }
3545 }
3546
3547 for (i = 0; i < BLOCK_SIZE_GROUPS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003548 prob_diff_update(av1_intra_mode_tree, cm->fc->y_mode_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003549 counts->y_mode[i], INTRA_MODES, header_bc);
3550
Jingning Hanfd0cf162016-09-30 10:33:50 -07003551 av1_write_nmv_probs(cm, cm->allow_high_precision_mv, header_bc,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003552#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07003553 counts->mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003554#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003555 &counts->mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003556#endif
3557 update_ext_tx_probs(cm, header_bc);
3558#if CONFIG_SUPERTX
3559 if (!xd->lossless[0]) update_supertx_probs(cm, header_bc);
3560#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003561#if CONFIG_GLOBAL_MOTION
Sarah Parker689b0ca2016-10-11 12:06:33 -07003562 write_global_motion(cpi, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003563#endif // CONFIG_GLOBAL_MOTION
Sarah Parker689b0ca2016-10-11 12:06:33 -07003564 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003565#if CONFIG_ANS
3566 ans_write_init(&header_ans, data);
3567 buf_ans_flush(header_bc, &header_ans);
3568 header_size = ans_write_end(&header_ans);
3569 assert(header_size <= 0xffff);
3570 return header_size;
3571#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003572 aom_stop_encode(header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003573 assert(header_bc->pos <= 0xffff);
3574 return header_bc->pos;
3575#endif // CONFIG_ANS
3576}
3577
3578static int choose_size_bytes(uint32_t size, int spare_msbs) {
3579 // Choose the number of bytes required to represent size, without
3580 // using the 'spare_msbs' number of most significant bits.
3581
3582 // Make sure we will fit in 4 bytes to start with..
3583 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
3584
3585 // Normalise to 32 bits
3586 size <<= spare_msbs;
3587
3588 if (size >> 24 != 0)
3589 return 4;
3590 else if (size >> 16 != 0)
3591 return 3;
3592 else if (size >> 8 != 0)
3593 return 2;
3594 else
3595 return 1;
3596}
3597
3598static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) {
3599 switch (sz) {
3600 case 1: dst[0] = (uint8_t)(val & 0xff); break;
3601 case 2: mem_put_le16(dst, val); break;
3602 case 3: mem_put_le24(dst, val); break;
3603 case 4: mem_put_le32(dst, val); break;
3604 default: assert("Invalid size" && 0); break;
3605 }
3606}
3607
Yaowu Xuf883b422016-08-30 14:01:10 -07003608static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003609 const uint32_t data_size, const uint32_t max_tile_size,
3610 const uint32_t max_tile_col_size,
3611 int *const tile_size_bytes,
3612 int *const tile_col_size_bytes) {
3613// Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
3614#if CONFIG_EXT_TILE
3615 // The top bit in the tile size field indicates tile copy mode, so we
3616 // have 1 less bit to code the tile size
3617 const int tsb = choose_size_bytes(max_tile_size, 1);
3618 const int tcsb = choose_size_bytes(max_tile_col_size, 0);
3619#else
3620 const int tsb = choose_size_bytes(max_tile_size, 0);
3621 const int tcsb = 4; // This is ignored
3622 (void)max_tile_col_size;
3623#endif // CONFIG_EXT_TILE
3624
3625 assert(tsb > 0);
3626 assert(tcsb > 0);
3627
3628 *tile_size_bytes = tsb;
3629 *tile_col_size_bytes = tcsb;
3630
3631 if (tsb == 4 && tcsb == 4) {
3632 return data_size;
3633 } else {
3634 uint32_t wpos = 0;
3635 uint32_t rpos = 0;
3636
3637#if CONFIG_EXT_TILE
3638 int tile_row;
3639 int tile_col;
3640
3641 for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
3642 // All but the last column has a column header
3643 if (tile_col < cm->tile_cols - 1) {
3644 uint32_t tile_col_size = mem_get_le32(dst + rpos);
3645 rpos += 4;
3646
3647 // Adjust the tile column size by the number of bytes removed
3648 // from the tile size fields.
3649 tile_col_size -= (4 - tsb) * cm->tile_rows;
3650
3651 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
3652 wpos += tcsb;
3653 }
3654
3655 for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
3656 // All, including the last row has a header
3657 uint32_t tile_header = mem_get_le32(dst + rpos);
3658 rpos += 4;
3659
3660 // If this is a copy tile, we need to shift the MSB to the
3661 // top bit of the new width, and there is no data to copy.
3662 if (tile_header >> 31 != 0) {
3663 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
3664 mem_put_varsize(dst + wpos, tsb, tile_header);
3665 wpos += tsb;
3666 } else {
3667 mem_put_varsize(dst + wpos, tsb, tile_header);
3668 wpos += tsb;
3669
3670 memmove(dst + wpos, dst + rpos, tile_header);
3671 rpos += tile_header;
3672 wpos += tile_header;
3673 }
3674 }
3675 }
3676#else
3677 const int n_tiles = cm->tile_cols * cm->tile_rows;
3678 int n;
3679
3680 for (n = 0; n < n_tiles; n++) {
3681 int tile_size;
3682
3683 if (n == n_tiles - 1) {
3684 tile_size = data_size - rpos;
3685 } else {
3686 tile_size = mem_get_le32(dst + rpos);
3687 rpos += 4;
3688 mem_put_varsize(dst + wpos, tsb, tile_size);
3689 wpos += tsb;
3690 }
3691
3692 memmove(dst + wpos, dst + rpos, tile_size);
3693
3694 rpos += tile_size;
3695 wpos += tile_size;
3696 }
3697#endif // CONFIG_EXT_TILE
3698
3699 assert(rpos > wpos);
3700 assert(rpos == data_size);
3701
3702 return wpos;
3703 }
3704}
3705
Yaowu Xuf883b422016-08-30 14:01:10 -07003706void av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003707 uint8_t *data = dst;
3708 uint32_t compressed_header_size;
3709 uint32_t uncompressed_header_size;
3710 uint32_t data_size;
Yaowu Xuf883b422016-08-30 14:01:10 -07003711 struct aom_write_bit_buffer wb = { data, 0 };
3712 struct aom_write_bit_buffer saved_wb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003713 unsigned int max_tile_size;
3714 unsigned int max_tile_col_size;
3715 int tile_size_bytes;
3716 int tile_col_size_bytes;
3717
Yaowu Xuf883b422016-08-30 14:01:10 -07003718 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003719 const int have_tiles = cm->tile_cols * cm->tile_rows > 1;
3720
3721#if CONFIG_BITSTREAM_DEBUG
3722 bitstream_queue_reset_write();
3723#endif
3724
3725 // Write the uncompressed header
3726 write_uncompressed_header(cpi, &wb);
3727
3728#if CONFIG_EXT_REFS
3729 if (cm->show_existing_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003730 *size = aom_wb_bytes_written(&wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003731 return;
3732 }
3733#endif // CONFIG_EXT_REFS
3734
3735 // We do not know these in advance. Output placeholder bit.
3736 saved_wb = wb;
3737 // Write tile size magnitudes
3738 if (have_tiles) {
3739// Note that the last item in the uncompressed header is the data
3740// describing tile configuration.
3741#if CONFIG_EXT_TILE
3742 // Number of bytes in tile column size - 1
Yaowu Xuf883b422016-08-30 14:01:10 -07003743 aom_wb_write_literal(&wb, 0, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003744#endif // CONFIG_EXT_TILE
3745 // Number of bytes in tile size - 1
Yaowu Xuf883b422016-08-30 14:01:10 -07003746 aom_wb_write_literal(&wb, 0, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003747 }
3748 // Size of compressed header
Yaowu Xuf883b422016-08-30 14:01:10 -07003749 aom_wb_write_literal(&wb, 0, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003750
Yaowu Xuf883b422016-08-30 14:01:10 -07003751 uncompressed_header_size = (uint32_t)aom_wb_bytes_written(&wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003752 data += uncompressed_header_size;
3753
Yaowu Xuf883b422016-08-30 14:01:10 -07003754 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003755
3756 // Write the compressed header
3757 compressed_header_size = write_compressed_header(cpi, data);
3758 data += compressed_header_size;
3759
3760 // Write the encoded tile data
3761 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
3762
3763 if (have_tiles) {
3764 data_size =
3765 remux_tiles(cm, data, data_size, max_tile_size, max_tile_col_size,
3766 &tile_size_bytes, &tile_col_size_bytes);
3767 }
3768
3769 data += data_size;
3770
3771 // Now fill in the gaps in the uncompressed header.
3772 if (have_tiles) {
3773#if CONFIG_EXT_TILE
3774 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
Yaowu Xuf883b422016-08-30 14:01:10 -07003775 aom_wb_write_literal(&saved_wb, tile_col_size_bytes - 1, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003776#endif // CONFIG_EXT_TILE
3777 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
Yaowu Xuf883b422016-08-30 14:01:10 -07003778 aom_wb_write_literal(&saved_wb, tile_size_bytes - 1, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003779 }
3780 // TODO(jbb): Figure out what to do if compressed_header_size > 16 bits.
3781 assert(compressed_header_size <= 0xffff);
Yaowu Xuf883b422016-08-30 14:01:10 -07003782 aom_wb_write_literal(&saved_wb, compressed_header_size, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003783
3784 *size = data - dst;
3785}