blob: a4bd8d96fd13cbfcaa87106d21c579732e906e56 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070017#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070018#include "aom_dsp/aom_dsp_common.h"
19#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070020#include "aom_ports/mem_ops.h"
21#include "aom_ports/system_state.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070022#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070023#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070024#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070025
26#if CONFIG_CLPF
27#include "av1/common/clpf.h"
28#endif
29#if CONFIG_DERING
30#include "av1/common/dering.h"
31#endif // CONFIG_DERING
32#include "av1/common/entropy.h"
33#include "av1/common/entropymode.h"
34#include "av1/common/entropymv.h"
35#include "av1/common/mvref_common.h"
Thomas Daviesf6936102016-09-05 16:51:31 +010036#include "av1/common/odintrin.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070037#include "av1/common/pred_common.h"
38#include "av1/common/reconinter.h"
39#include "av1/common/seg_common.h"
40#include "av1/common/tile_common.h"
41
42#if CONFIG_ANS
Alex Converse1ac1ae72016-09-17 15:11:16 -070043#include "aom_dsp/buf_ans.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070044#endif // CONFIG_ANS
45#include "av1/encoder/bitstream.h"
46#include "av1/encoder/cost.h"
47#include "av1/encoder/encodemv.h"
48#include "av1/encoder/mcomp.h"
49#include "av1/encoder/segmentation.h"
50#include "av1/encoder/subexp.h"
51#include "av1/encoder/tokenize.h"
52
Yaowu Xuf883b422016-08-30 14:01:10 -070053static const struct av1_token intra_mode_encodings[INTRA_MODES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070054 { 0, 1 }, { 6, 3 }, { 28, 5 }, { 30, 5 }, { 58, 6 },
55 { 59, 6 }, { 126, 7 }, { 127, 7 }, { 62, 6 }, { 2, 2 }
56};
57#if CONFIG_EXT_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -070058static const struct av1_token switchable_interp_encodings[SWITCHABLE_FILTERS] =
Yaowu Xuc27fc142016-08-22 16:08:15 -070059 { { 0, 1 }, { 4, 3 }, { 6, 3 }, { 5, 3 }, { 7, 3 } };
60#else
Yaowu Xuf883b422016-08-30 14:01:10 -070061static const struct av1_token switchable_interp_encodings[SWITCHABLE_FILTERS] =
Yaowu Xuc27fc142016-08-22 16:08:15 -070062 { { 0, 1 }, { 2, 2 }, { 3, 2 } };
63#endif // CONFIG_EXT_INTERP
64#if CONFIG_EXT_PARTITION_TYPES
Yaowu Xuf883b422016-08-30 14:01:10 -070065static const struct av1_token ext_partition_encodings[EXT_PARTITION_TYPES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070066 { 0, 1 }, { 4, 3 }, { 12, 4 }, { 7, 3 },
67 { 10, 4 }, { 11, 4 }, { 26, 5 }, { 27, 5 }
68};
69#endif
Yaowu Xuf883b422016-08-30 14:01:10 -070070static const struct av1_token partition_encodings[PARTITION_TYPES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070071 { 0, 1 }, { 2, 2 }, { 6, 3 }, { 7, 3 }
72};
73#if !CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -070074static const struct av1_token inter_mode_encodings[INTER_MODES] =
Yaowu Xuc27fc142016-08-22 16:08:15 -070075#if CONFIG_EXT_INTER
76 { { 2, 2 }, { 6, 3 }, { 0, 1 }, { 14, 4 }, { 15, 4 } };
77#else
78 { { 2, 2 }, { 6, 3 }, { 0, 1 }, { 7, 3 } };
79#endif // CONFIG_EXT_INTER
80#endif
81#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -070082static const struct av1_token
Yaowu Xuc27fc142016-08-22 16:08:15 -070083 inter_compound_mode_encodings[INTER_COMPOUND_MODES] = {
84 { 2, 2 }, { 50, 6 }, { 51, 6 }, { 24, 5 }, { 52, 6 },
85 { 53, 6 }, { 54, 6 }, { 55, 6 }, { 0, 1 }, { 7, 3 }
86 };
87#endif // CONFIG_EXT_INTER
Urvang Joshib100db72016-10-12 16:28:56 -070088#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -070089static const struct av1_token palette_size_encodings[] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070090 { 0, 1 }, { 2, 2 }, { 6, 3 }, { 14, 4 }, { 30, 5 }, { 62, 6 }, { 63, 6 },
91};
Yaowu Xuf883b422016-08-30 14:01:10 -070092static const struct av1_token
Yaowu Xuc27fc142016-08-22 16:08:15 -070093 palette_color_encodings[PALETTE_MAX_SIZE - 1][PALETTE_MAX_SIZE] = {
94 { { 0, 1 }, { 1, 1 } }, // 2 colors
95 { { 0, 1 }, { 2, 2 }, { 3, 2 } }, // 3 colors
96 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 7, 3 } }, // 4 colors
97 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 14, 4 }, { 15, 4 } }, // 5 colors
98 { { 0, 1 },
99 { 2, 2 },
100 { 6, 3 },
101 { 14, 4 },
102 { 30, 5 },
103 { 31, 5 } }, // 6 colors
104 { { 0, 1 },
105 { 2, 2 },
106 { 6, 3 },
107 { 14, 4 },
108 { 30, 5 },
109 { 62, 6 },
110 { 63, 6 } }, // 7 colors
111 { { 0, 1 },
112 { 2, 2 },
113 { 6, 3 },
114 { 14, 4 },
115 { 30, 5 },
116 { 62, 6 },
117 { 126, 7 },
118 { 127, 7 } }, // 8 colors
119 };
Urvang Joshib100db72016-10-12 16:28:56 -0700120#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700121
Yaowu Xuf883b422016-08-30 14:01:10 -0700122static const struct av1_token tx_size_encodings[TX_SIZES - 1][TX_SIZES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700123 { { 0, 1 }, { 1, 1 } }, // Max tx_size is 8X8
124 { { 0, 1 }, { 2, 2 }, { 3, 2 } }, // Max tx_size is 16X16
125 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 7, 3 } }, // Max tx_size is 32X32
126};
127
hui su5db97432016-10-14 16:10:14 -0700128#if CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -0700129static INLINE void write_uniform(aom_writer *w, int n, int v) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700130 int l = get_unsigned_bits(n);
131 int m = (1 << l) - n;
132 if (l == 0) return;
133 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700134 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700135 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700136 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
137 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700138 }
139}
hui su5db97432016-10-14 16:10:14 -0700140#endif // CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700141
142#if CONFIG_EXT_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700143static struct av1_token ext_tx_inter_encodings[EXT_TX_SETS_INTER][TX_TYPES];
144static struct av1_token ext_tx_intra_encodings[EXT_TX_SETS_INTRA][TX_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700145#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700146static struct av1_token ext_tx_encodings[TX_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700147#endif // CONFIG_EXT_TX
148#if CONFIG_GLOBAL_MOTION
Yaowu Xuf883b422016-08-30 14:01:10 -0700149static struct av1_token global_motion_types_encodings[GLOBAL_MOTION_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700150#endif // CONFIG_GLOBAL_MOTION
151#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -0700152static struct av1_token intra_filter_encodings[INTRA_FILTERS];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700153#endif // CONFIG_EXT_INTRA
154#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700155static struct av1_token interintra_mode_encodings[INTERINTRA_MODES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700156#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -0700157#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
158static struct av1_token motion_mode_encodings[MOTION_MODES];
159#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700160#if CONFIG_LOOP_RESTORATION
161static struct av1_token switchable_restore_encodings[RESTORE_SWITCHABLE_TYPES];
162#endif // CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -0700163
Yaowu Xuf883b422016-08-30 14:01:10 -0700164void av1_encode_token_init(void) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700165#if CONFIG_EXT_TX
166 int s;
167 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700168 av1_tokens_from_tree(ext_tx_inter_encodings[s], av1_ext_tx_inter_tree[s]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700169 }
170 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700171 av1_tokens_from_tree(ext_tx_intra_encodings[s], av1_ext_tx_intra_tree[s]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700172 }
173#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700174 av1_tokens_from_tree(ext_tx_encodings, av1_ext_tx_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700175#endif // CONFIG_EXT_TX
176#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -0700177 av1_tokens_from_tree(intra_filter_encodings, av1_intra_filter_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700178#endif // CONFIG_EXT_INTRA
179#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700180 av1_tokens_from_tree(interintra_mode_encodings, av1_interintra_mode_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700181#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -0700182#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
183 av1_tokens_from_tree(motion_mode_encodings, av1_motion_mode_tree);
184#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -0700185#if CONFIG_GLOBAL_MOTION
Yaowu Xuf883b422016-08-30 14:01:10 -0700186 av1_tokens_from_tree(global_motion_types_encodings,
187 av1_global_motion_types_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700188#endif // CONFIG_GLOBAL_MOTION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700189#if CONFIG_LOOP_RESTORATION
190 av1_tokens_from_tree(switchable_restore_encodings,
191 av1_switchable_restore_tree);
192#endif // CONFIG_LOOP_RESTORATION
Nathan E. Egge4947c292016-04-26 11:37:06 -0400193
194#if CONFIG_DAALA_EC
195 /* This hack is necessary when CONFIG_EXT_INTERP is enabled because the five
196 SWITCHABLE_FILTERS are not consecutive, e.g., 0, 1, 2, 3, 4, when doing
197 an in-order traversal of the av1_switchable_interp_tree structure. */
198 av1_indices_from_tree(av1_switchable_interp_ind, av1_switchable_interp_inv,
199 SWITCHABLE_FILTERS, av1_switchable_interp_tree);
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400200 /* This hack is necessary because the four TX_TYPES are not consecutive,
201 e.g., 0, 1, 2, 3, when doing an in-order traversal of the av1_ext_tx_tree
202 structure. */
203 av1_indices_from_tree(av1_ext_tx_ind, av1_ext_tx_inv, TX_TYPES,
204 av1_ext_tx_tree);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400205#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700206}
207
Yaowu Xuf883b422016-08-30 14:01:10 -0700208static void write_intra_mode(aom_writer *w, PREDICTION_MODE mode,
209 const aom_prob *probs) {
210 av1_write_token(w, av1_intra_mode_tree, probs, &intra_mode_encodings[mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700211}
212
213#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700214static void write_interintra_mode(aom_writer *w, INTERINTRA_MODE mode,
215 const aom_prob *probs) {
216 av1_write_token(w, av1_interintra_mode_tree, probs,
217 &interintra_mode_encodings[mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700218}
219#endif // CONFIG_EXT_INTER
220
Yaowu Xuf883b422016-08-30 14:01:10 -0700221static void write_inter_mode(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700222 PREDICTION_MODE mode,
223#if CONFIG_REF_MV && CONFIG_EXT_INTER
224 int is_compound,
225#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
226 const int16_t mode_ctx) {
227#if CONFIG_REF_MV
228 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700229 const aom_prob newmv_prob = cm->fc->newmv_prob[newmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700230#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700231 aom_write(w, mode != NEWMV && mode != NEWFROMNEARMV, newmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700232
233 if (!is_compound && (mode == NEWMV || mode == NEWFROMNEARMV))
Yaowu Xuf883b422016-08-30 14:01:10 -0700234 aom_write(w, mode == NEWFROMNEARMV, cm->fc->new2mv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700235
236 if (mode != NEWMV && mode != NEWFROMNEARMV) {
237#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700238 aom_write(w, mode != NEWMV, newmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700239
240 if (mode != NEWMV) {
241#endif // CONFIG_EXT_INTER
242 const int16_t zeromv_ctx = (mode_ctx >> ZEROMV_OFFSET) & ZEROMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700243 const aom_prob zeromv_prob = cm->fc->zeromv_prob[zeromv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700244
245 if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) {
246 assert(mode == ZEROMV);
247 return;
248 }
249
Yaowu Xuf883b422016-08-30 14:01:10 -0700250 aom_write(w, mode != ZEROMV, zeromv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700251
252 if (mode != ZEROMV) {
253 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700254 aom_prob refmv_prob;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700255
256 if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6;
257 if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7;
258 if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8;
259
260 refmv_prob = cm->fc->refmv_prob[refmv_ctx];
Yaowu Xuf883b422016-08-30 14:01:10 -0700261 aom_write(w, mode != NEARESTMV, refmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700262 }
263 }
264#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700265 const aom_prob *const inter_probs = cm->fc->inter_mode_probs[mode_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700266 assert(is_inter_mode(mode));
Yaowu Xuf883b422016-08-30 14:01:10 -0700267 av1_write_token(w, av1_inter_mode_tree, inter_probs,
268 &inter_mode_encodings[INTER_OFFSET(mode)]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700269#endif
270}
271
272#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700273static void write_drl_idx(const AV1_COMMON *cm, const MB_MODE_INFO *mbmi,
274 const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) {
275 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700276
277 assert(mbmi->ref_mv_idx < 3);
278
279 if (mbmi->mode == NEWMV) {
280 int idx;
281 for (idx = 0; idx < 2; ++idx) {
282 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
283 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700284 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
285 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700286
Yaowu Xuf883b422016-08-30 14:01:10 -0700287 aom_write(w, mbmi->ref_mv_idx != idx, drl_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700288 if (mbmi->ref_mv_idx == idx) return;
289 }
290 }
291 return;
292 }
293
294 if (mbmi->mode == NEARMV) {
295 int idx;
296 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
297 for (idx = 1; idx < 3; ++idx) {
298 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
299 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700300 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
301 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700302
Yaowu Xuf883b422016-08-30 14:01:10 -0700303 aom_write(w, mbmi->ref_mv_idx != (idx - 1), drl_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700304 if (mbmi->ref_mv_idx == (idx - 1)) return;
305 }
306 }
307 return;
308 }
309}
310#endif
311
312#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700313static void write_inter_compound_mode(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700314 PREDICTION_MODE mode,
315 const int16_t mode_ctx) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700316 const aom_prob *const inter_compound_probs =
Yaowu Xuc27fc142016-08-22 16:08:15 -0700317 cm->fc->inter_compound_mode_probs[mode_ctx];
318
319 assert(is_inter_compound_mode(mode));
Yaowu Xuf883b422016-08-30 14:01:10 -0700320 av1_write_token(w, av1_inter_compound_mode_tree, inter_compound_probs,
321 &inter_compound_mode_encodings[INTER_COMPOUND_OFFSET(mode)]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700322}
323#endif // CONFIG_EXT_INTER
324
Yaowu Xuf883b422016-08-30 14:01:10 -0700325static void encode_unsigned_max(struct aom_write_bit_buffer *wb, int data,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700326 int max) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700327 aom_wb_write_literal(wb, data, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700328}
329
Yaowu Xuf883b422016-08-30 14:01:10 -0700330static void prob_diff_update(const aom_tree_index *tree,
331 aom_prob probs[/*n - 1*/],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700332 const unsigned int counts[/*n - 1*/], int n,
Yaowu Xuf883b422016-08-30 14:01:10 -0700333 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700334 int i;
335 unsigned int branch_ct[32][2];
336
337 // Assuming max number of probabilities <= 32
338 assert(n <= 32);
339
Yaowu Xuf883b422016-08-30 14:01:10 -0700340 av1_tree_probs_from_distribution(tree, branch_ct, counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700341 for (i = 0; i < n - 1; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700342 av1_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700343}
344
Yaowu Xuf883b422016-08-30 14:01:10 -0700345static int prob_diff_update_savings(const aom_tree_index *tree,
346 aom_prob probs[/*n - 1*/],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700347 const unsigned int counts[/*n - 1*/],
348 int n) {
349 int i;
350 unsigned int branch_ct[32][2];
351 int savings = 0;
352
353 // Assuming max number of probabilities <= 32
354 assert(n <= 32);
Yaowu Xuf883b422016-08-30 14:01:10 -0700355 av1_tree_probs_from_distribution(tree, branch_ct, counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700356 for (i = 0; i < n - 1; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700357 savings += av1_cond_prob_diff_update_savings(&probs[i], branch_ct[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700358 }
359 return savings;
360}
361
362#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700363static void write_tx_size_vartx(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700364 const MB_MODE_INFO *mbmi, TX_SIZE tx_size,
Yaowu Xuf883b422016-08-30 14:01:10 -0700365 int blk_row, int blk_col, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700366 const int tx_row = blk_row >> 1;
367 const int tx_col = blk_col >> 1;
368 int max_blocks_high = num_4x4_blocks_high_lookup[mbmi->sb_type];
369 int max_blocks_wide = num_4x4_blocks_wide_lookup[mbmi->sb_type];
370 int ctx = txfm_partition_context(xd->above_txfm_context + tx_col,
371 xd->left_txfm_context + tx_row, tx_size);
372
373 if (xd->mb_to_bottom_edge < 0) max_blocks_high += xd->mb_to_bottom_edge >> 5;
374 if (xd->mb_to_right_edge < 0) max_blocks_wide += xd->mb_to_right_edge >> 5;
375
376 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
377
378 if (tx_size == mbmi->inter_tx_size[tx_row][tx_col]) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700379 aom_write(w, 0, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700380 txfm_partition_update(xd->above_txfm_context + tx_col,
381 xd->left_txfm_context + tx_row, tx_size);
382 } else {
383 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
384 int bsl = b_width_log2_lookup[bsize];
385 int i;
Yaowu Xuf883b422016-08-30 14:01:10 -0700386 aom_write(w, 1, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700387
388 if (tx_size == TX_8X8) {
389 txfm_partition_update(xd->above_txfm_context + tx_col,
390 xd->left_txfm_context + tx_row, TX_4X4);
391 return;
392 }
393
394 assert(bsl > 0);
395 --bsl;
396 for (i = 0; i < 4; ++i) {
397 int offsetr = blk_row + ((i >> 1) << bsl);
398 int offsetc = blk_col + ((i & 0x01) << bsl);
399 write_tx_size_vartx(cm, xd, mbmi, tx_size - 1, offsetr, offsetc, w);
400 }
401 }
402}
403
Yaowu Xuf883b422016-08-30 14:01:10 -0700404static void update_txfm_partition_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700405 FRAME_COUNTS *counts) {
406 int k;
407 for (k = 0; k < TXFM_PARTITION_CONTEXTS; ++k)
Yaowu Xuf883b422016-08-30 14:01:10 -0700408 av1_cond_prob_diff_update(w, &cm->fc->txfm_partition_prob[k],
409 counts->txfm_partition[k]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700410}
411#endif
412
Yaowu Xuf883b422016-08-30 14:01:10 -0700413static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd,
414 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700415 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
416 const BLOCK_SIZE bsize = mbmi->sb_type;
417 // For sub8x8 blocks the tx_size symbol does not need to be sent
418 if (bsize >= BLOCK_8X8) {
419 const TX_SIZE tx_size = mbmi->tx_size;
420 const int is_inter = is_inter_block(mbmi);
421 const int tx_size_ctx = get_tx_size_context(xd);
422 const int tx_size_cat = is_inter ? inter_tx_size_cat_lookup[bsize]
423 : intra_tx_size_cat_lookup[bsize];
424 const TX_SIZE coded_tx_size = txsize_sqr_up_map[tx_size];
425
426#if CONFIG_EXT_TX && CONFIG_RECT_TX
Yue Chen49587a72016-09-28 17:09:47 -0700427 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700428 assert(
429 IMPLIES(is_rect_tx(tx_size), tx_size == max_txsize_rect_lookup[bsize]));
430#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
431
Yaowu Xuf883b422016-08-30 14:01:10 -0700432 av1_write_token(w, av1_tx_size_tree[tx_size_cat],
433 cm->fc->tx_size_probs[tx_size_cat][tx_size_ctx],
434 &tx_size_encodings[tx_size_cat][coded_tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700435 }
436}
437
438#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700439static void update_inter_mode_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700440 FRAME_COUNTS *counts) {
441 int i;
442 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700443 av1_cond_prob_diff_update(w, &cm->fc->newmv_prob[i], counts->newmv_mode[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700444 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700445 av1_cond_prob_diff_update(w, &cm->fc->zeromv_prob[i],
446 counts->zeromv_mode[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700447 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700448 av1_cond_prob_diff_update(w, &cm->fc->refmv_prob[i], counts->refmv_mode[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700449 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700450 av1_cond_prob_diff_update(w, &cm->fc->drl_prob[i], counts->drl_mode[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700451#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700452 av1_cond_prob_diff_update(w, &cm->fc->new2mv_prob, counts->new2mv_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700453#endif // CONFIG_EXT_INTER
454}
455#endif
456
457#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700458static void update_inter_compound_mode_probs(AV1_COMMON *cm, aom_writer *w) {
459 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
460 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700461 int i;
462 int savings = 0;
463 int do_update = 0;
464 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
465 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700466 av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700467 cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES);
468 }
469 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700470 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700471 if (do_update) {
472 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
473 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700474 av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700475 cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES, w);
476 }
477 }
478}
479#endif // CONFIG_EXT_INTER
480
Yaowu Xuf883b422016-08-30 14:01:10 -0700481static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
482 int segment_id, const MODE_INFO *mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700483 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
484 return 1;
485 } else {
486 const int skip = mi->mbmi.skip;
Yaowu Xuf883b422016-08-30 14:01:10 -0700487 aom_write(w, skip, av1_get_skip_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700488 return skip;
489 }
490}
491
Arild Fuldseth07441162016-08-15 15:07:52 +0200492#if CONFIG_DELTA_Q
Thomas Daviesf6936102016-09-05 16:51:31 +0100493static void write_delta_qindex(const AV1_COMMON *cm, int delta_qindex,
494 aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +0200495 int sign = delta_qindex < 0;
496 int abs = sign ? -delta_qindex : delta_qindex;
Thomas Daviesf6936102016-09-05 16:51:31 +0100497 int rem_bits, thr, i = 0;
498 int smallval = abs < DELTA_Q_SMALL ? 1 : 0;
499
500 while (i < DELTA_Q_SMALL && i <= abs) {
501 int bit = (i < abs);
502 aom_write(w, bit, cm->fc->delta_q_prob[i]);
503 i++;
504 }
505
506 if (!smallval) {
507 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
508 thr = (1 << rem_bits) + 1;
509 aom_write_literal(w, rem_bits, 3);
510 aom_write_literal(w, abs - thr, rem_bits);
Arild Fuldseth07441162016-08-15 15:07:52 +0200511 }
512 if (abs > 0) {
513 aom_write_bit(w, sign);
514 }
515}
Thomas Daviesf6936102016-09-05 16:51:31 +0100516
517static void update_delta_q_probs(AV1_COMMON *cm, aom_writer *w,
518 FRAME_COUNTS *counts) {
519 int k;
520
521 for (k = 0; k < DELTA_Q_CONTEXTS; ++k) {
522 av1_cond_prob_diff_update(w, &cm->fc->delta_q_prob[k], counts->delta_q[k]);
523 }
524}
Arild Fuldseth07441162016-08-15 15:07:52 +0200525#endif
526
Yaowu Xuf883b422016-08-30 14:01:10 -0700527static void update_skip_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700528 FRAME_COUNTS *counts) {
529 int k;
530
531 for (k = 0; k < SKIP_CONTEXTS; ++k)
Yaowu Xuf883b422016-08-30 14:01:10 -0700532 av1_cond_prob_diff_update(w, &cm->fc->skip_probs[k], counts->skip[k]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700533}
534
Yaowu Xuf883b422016-08-30 14:01:10 -0700535static void update_switchable_interp_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700536 FRAME_COUNTS *counts) {
537 int j;
Nathan E. Egge4947c292016-04-26 11:37:06 -0400538 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700539 prob_diff_update(av1_switchable_interp_tree,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700540 cm->fc->switchable_interp_prob[j],
541 counts->switchable_interp[j], SWITCHABLE_FILTERS, w);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400542#if CONFIG_DAALA_EC
543 av1_tree_to_cdf(av1_switchable_interp_tree,
544 cm->fc->switchable_interp_prob[j],
545 cm->fc->switchable_interp_cdf[j]);
546#endif
547 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700548}
549
550#if CONFIG_EXT_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700551static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) {
552 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
553 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700554 int i, j;
555 int s;
556 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
557 int savings = 0;
558 int do_update = 0;
559 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
560 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
561 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700562 av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700563 cm->counts.inter_ext_tx[s][i], num_ext_tx_set_inter[s]);
564 }
565 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700566 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700567 if (do_update) {
568 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
569 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
570 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700571 av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700572 cm->counts.inter_ext_tx[s][i], num_ext_tx_set_inter[s], w);
573 }
574 }
575 }
576
577 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
578 int savings = 0;
579 int do_update = 0;
580 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
581 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
582 for (j = 0; j < INTRA_MODES; ++j)
583 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700584 av1_ext_tx_intra_tree[s], cm->fc->intra_ext_tx_prob[s][i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700585 cm->counts.intra_ext_tx[s][i][j], num_ext_tx_set_intra[s]);
586 }
587 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700588 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700589 if (do_update) {
590 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
591 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
592 for (j = 0; j < INTRA_MODES; ++j)
593 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700594 av1_ext_tx_intra_tree[s], cm->fc->intra_ext_tx_prob[s][i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700595 cm->counts.intra_ext_tx[s][i][j], num_ext_tx_set_intra[s], w);
596 }
597 }
598 }
599}
600
601#else
602
Yaowu Xuf883b422016-08-30 14:01:10 -0700603static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) {
604 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
605 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700606 int i, j;
607
608 int savings = 0;
609 int do_update = 0;
610 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
611 for (j = 0; j < TX_TYPES; ++j)
612 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700613 av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700614 cm->counts.intra_ext_tx[i][j], TX_TYPES);
615 }
616 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700617 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700618 if (do_update) {
619 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400620 for (j = 0; j < TX_TYPES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700621 prob_diff_update(av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700622 cm->counts.intra_ext_tx[i][j], TX_TYPES, w);
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400623#if CONFIG_DAALA_EC
624 av1_tree_to_cdf(av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
625 cm->fc->intra_ext_tx_cdf[i][j]);
626#endif
627 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700628 }
629 }
630 savings = 0;
631 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
632 savings +=
Yaowu Xuf883b422016-08-30 14:01:10 -0700633 prob_diff_update_savings(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700634 cm->counts.inter_ext_tx[i], TX_TYPES);
635 }
636 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700637 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700638 if (do_update) {
639 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700640 prob_diff_update(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700641 cm->counts.inter_ext_tx[i], TX_TYPES, w);
Nathan E. Egge93878c42016-05-03 10:01:32 -0400642#if CONFIG_DAALA_EC
643 av1_tree_to_cdf(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
644 cm->fc->inter_ext_tx_cdf[i]);
645#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700646 }
647 }
648}
649#endif // CONFIG_EXT_TX
650
Urvang Joshib100db72016-10-12 16:28:56 -0700651#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -0700652static void pack_palette_tokens(aom_writer *w, const TOKENEXTRA **tp, int n,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700653 int num) {
654 int i;
655 const TOKENEXTRA *p = *tp;
656
657 for (i = 0; i < num; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700658 av1_write_token(w, av1_palette_color_tree[n - 2], p->context_tree,
659 &palette_color_encodings[n - 2][p->token]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700660 ++p;
661 }
662
663 *tp = p;
664}
Urvang Joshib100db72016-10-12 16:28:56 -0700665#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700666
667#if CONFIG_SUPERTX
Yaowu Xuf883b422016-08-30 14:01:10 -0700668static void update_supertx_probs(AV1_COMMON *cm, aom_writer *w) {
669 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
670 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700671 int i, j;
672 int savings = 0;
673 int do_update = 0;
674 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
675 for (j = 1; j < TX_SIZES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700676 savings += av1_cond_prob_diff_update_savings(&cm->fc->supertx_prob[i][j],
677 cm->counts.supertx[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700678 }
679 }
680 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700681 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700682 if (do_update) {
683 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
684 for (j = 1; j < TX_SIZES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700685 av1_cond_prob_diff_update(w, &cm->fc->supertx_prob[i][j],
686 cm->counts.supertx[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700687 }
688 }
689 }
690}
691#endif // CONFIG_SUPERTX
692
Yaowu Xuf883b422016-08-30 14:01:10 -0700693static void pack_mb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700694 const TOKENEXTRA *const stop,
Yaowu Xuf883b422016-08-30 14:01:10 -0700695 aom_bit_depth_t bit_depth, const TX_SIZE tx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700696 const TOKENEXTRA *p = *tp;
697#if CONFIG_VAR_TX
698 int count = 0;
699 const int seg_eob = get_tx2d_size(tx);
700#endif
701
702 while (p < stop && p->token != EOSB_TOKEN) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700703 const int token = p->token;
704 aom_tree_index index = 0;
Alex Converseec6fb642016-10-19 11:31:48 -0700705#if !CONFIG_RANS
Urvang Joshi454280d2016-10-14 16:51:44 -0700706 const struct av1_token *const coef_encoding = &av1_coef_encodings[token];
707 int coef_value = coef_encoding->value;
708 int coef_length = coef_encoding->len;
Alex Converseec6fb642016-10-19 11:31:48 -0700709#endif // !CONFIG_RANS
Yaowu Xuf883b422016-08-30 14:01:10 -0700710#if CONFIG_AOM_HIGHBITDEPTH
Urvang Joshi454280d2016-10-14 16:51:44 -0700711 const av1_extra_bit *const extra_bits_av1 =
712 (bit_depth == AOM_BITS_12)
713 ? &av1_extra_bits_high12[token]
714 : (bit_depth == AOM_BITS_10) ? &av1_extra_bits_high10[token]
715 : &av1_extra_bits[token];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700716#else
Urvang Joshi454280d2016-10-14 16:51:44 -0700717 const av1_extra_bit *const extra_bits_av1 = &av1_extra_bits[token];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700718 (void)bit_depth;
Yaowu Xuf883b422016-08-30 14:01:10 -0700719#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700720
Alex Converseec6fb642016-10-19 11:31:48 -0700721#if CONFIG_RANS
Alex Conversedc62b092016-10-11 16:50:56 -0700722 /* skip one or two nodes */
Urvang Joshi454280d2016-10-14 16:51:44 -0700723 if (!p->skip_eob_node) aom_write(w, token != EOB_TOKEN, p->context_tree[0]);
Alex Conversedc62b092016-10-11 16:50:56 -0700724
Urvang Joshi454280d2016-10-14 16:51:44 -0700725 if (token != EOB_TOKEN) {
726 aom_write(w, token != ZERO_TOKEN, p->context_tree[1]);
Alex Converseea7e9902016-10-12 12:53:40 -0700727
Urvang Joshi454280d2016-10-14 16:51:44 -0700728 if (token != ZERO_TOKEN) {
729 aom_write_symbol(w, token - ONE_TOKEN, *p->token_cdf,
Alex Conversea1ac9722016-10-12 15:59:58 -0700730 CATEGORY6_TOKEN - ONE_TOKEN + 1);
Alex Converseea7e9902016-10-12 12:53:40 -0700731 }
Alex Conversedc62b092016-10-11 16:50:56 -0700732 }
733#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700734 /* skip one or two nodes */
735 if (p->skip_eob_node)
Urvang Joshi454280d2016-10-14 16:51:44 -0700736 coef_length -= p->skip_eob_node;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700737 else
Urvang Joshi454280d2016-10-14 16:51:44 -0700738 aom_write(w, token != EOB_TOKEN, p->context_tree[0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700739
Urvang Joshi454280d2016-10-14 16:51:44 -0700740 if (token != EOB_TOKEN) {
741 aom_write(w, token != ZERO_TOKEN, p->context_tree[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700742
Urvang Joshi454280d2016-10-14 16:51:44 -0700743 if (token != ZERO_TOKEN) {
744 aom_write(w, token != ONE_TOKEN, p->context_tree[2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700745
Urvang Joshi454280d2016-10-14 16:51:44 -0700746 if (token != ONE_TOKEN) {
747 const int unconstrained_len = UNCONSTRAINED_NODES - p->skip_eob_node;
Nathan E. Eggeeeedc632016-06-19 12:02:33 -0400748 aom_write_tree(w, av1_coef_con_tree,
Urvang Joshi454280d2016-10-14 16:51:44 -0700749 av1_pareto8_full[p->context_tree[PIVOT_NODE] - 1],
750 coef_value, coef_length - unconstrained_len, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700751 }
752 }
753 }
Alex Converseec6fb642016-10-19 11:31:48 -0700754#endif // CONFIG_RANS
Yaowu Xuc27fc142016-08-22 16:08:15 -0700755
Urvang Joshi454280d2016-10-14 16:51:44 -0700756 if (extra_bits_av1->base_val) {
757 const int extra_bits = p->extra;
758 const int extra_bits_av1_length = extra_bits_av1->len;
759 int skip_bits = (extra_bits_av1->base_val == CAT6_MIN_VAL)
Yaowu Xuc27fc142016-08-22 16:08:15 -0700760 ? TX_SIZES - 1 - txsize_sqr_up_map[tx]
761 : 0;
762
Urvang Joshi454280d2016-10-14 16:51:44 -0700763 if (extra_bits_av1_length) {
764 const unsigned char *pb = extra_bits_av1->prob;
765 const int value = extra_bits >> 1;
766 int num_bits = extra_bits_av1_length; // number of bits in value
767 assert(num_bits > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700768
Urvang Joshi454280d2016-10-14 16:51:44 -0700769 index = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700770 do {
Urvang Joshi454280d2016-10-14 16:51:44 -0700771 const int bb = (value >> --num_bits) & 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700772 if (skip_bits) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700773 --skip_bits;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700774 assert(!bb);
775 } else {
Urvang Joshi454280d2016-10-14 16:51:44 -0700776 aom_write(w, bb, pb[index >> 1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700777 }
Urvang Joshi454280d2016-10-14 16:51:44 -0700778 index = extra_bits_av1->tree[index + bb];
779 } while (num_bits);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700780 }
781
Urvang Joshi454280d2016-10-14 16:51:44 -0700782 aom_write_bit(w, extra_bits & 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700783 }
784 ++p;
785
786#if CONFIG_VAR_TX
787 ++count;
Urvang Joshi454280d2016-10-14 16:51:44 -0700788 if (token == EOB_TOKEN || count == seg_eob) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700789#endif
790 }
791
792 *tp = p;
793}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700794
795#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700796static void pack_txb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700797 const TOKENEXTRA *const tok_end, MACROBLOCKD *xd,
798 MB_MODE_INFO *mbmi, int plane,
Yaowu Xuf883b422016-08-30 14:01:10 -0700799 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700800 int block, int blk_row, int blk_col,
801 TX_SIZE tx_size) {
802 const struct macroblockd_plane *const pd = &xd->plane[plane];
803 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
804 const int tx_row = blk_row >> (1 - pd->subsampling_y);
805 const int tx_col = blk_col >> (1 - pd->subsampling_x);
806 TX_SIZE plane_tx_size;
807 int max_blocks_high = num_4x4_blocks_high_lookup[plane_bsize];
808 int max_blocks_wide = num_4x4_blocks_wide_lookup[plane_bsize];
809
810 if (xd->mb_to_bottom_edge < 0)
811 max_blocks_high += xd->mb_to_bottom_edge >> (5 + pd->subsampling_y);
812 if (xd->mb_to_right_edge < 0)
813 max_blocks_wide += xd->mb_to_right_edge >> (5 + pd->subsampling_x);
814
815 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
816
Debargha Mukherjee2f123402016-08-30 17:43:38 -0700817 plane_tx_size =
818 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
819 : mbmi->inter_tx_size[tx_row][tx_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700820
821 if (tx_size == plane_tx_size) {
822 pack_mb_tokens(w, tp, tok_end, bit_depth, tx_size);
823 } else {
824 int bsl = b_width_log2_lookup[bsize];
825 int i;
826
827 assert(bsl > 0);
828 --bsl;
829
830 for (i = 0; i < 4; ++i) {
831 const int offsetr = blk_row + ((i >> 1) << bsl);
832 const int offsetc = blk_col + ((i & 0x01) << bsl);
833 int step = num_4x4_blocks_txsize_lookup[tx_size - 1];
834
835 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
836
837 pack_txb_tokens(w, tp, tok_end, xd, mbmi, plane, plane_bsize, bit_depth,
838 block + i * step, offsetr, offsetc, tx_size - 1);
839 }
840 }
841}
842#endif
843
Yaowu Xuf883b422016-08-30 14:01:10 -0700844static void write_segment_id(aom_writer *w, const struct segmentation *seg,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700845 const struct segmentation_probs *segp,
846 int segment_id) {
Nathan E. Eggef627e582016-08-19 20:06:51 -0400847 if (seg->enabled && seg->update_map) {
848#if CONFIG_DAALA_EC
849 aom_write_symbol(w, segment_id, segp->tree_cdf, MAX_SEGMENTS);
850#else
Nathan E. Eggeeeedc632016-06-19 12:02:33 -0400851 aom_write_tree(w, av1_segment_tree, segp->tree_probs, segment_id, 3, 0);
Nathan E. Eggef627e582016-08-19 20:06:51 -0400852#endif
853 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700854}
855
856// This function encodes the reference frame
Yaowu Xuf883b422016-08-30 14:01:10 -0700857static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd,
858 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700859 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
860 const int is_compound = has_second_ref(mbmi);
861 const int segment_id = mbmi->segment_id;
862
863 // If segment level coding of this signal is disabled...
864 // or the segment allows multiple reference frame options
865 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
866 assert(!is_compound);
867 assert(mbmi->ref_frame[0] ==
868 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
869 } else {
870 // does the feature use compound prediction or not
871 // (if not specified at the frame/segment level)
872 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700873 aom_write(w, is_compound, av1_get_reference_mode_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700874 } else {
875 assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE));
876 }
877
878 if (is_compound) {
879#if CONFIG_EXT_REFS
880 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
881 mbmi->ref_frame[0] == LAST3_FRAME);
882 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
883#else // CONFIG_EXT_REFS
884 const int bit = mbmi->ref_frame[0] == GOLDEN_FRAME;
885#endif // CONFIG_EXT_REFS
886
Yaowu Xuf883b422016-08-30 14:01:10 -0700887 aom_write(w, bit, av1_get_pred_prob_comp_ref_p(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700888
889#if CONFIG_EXT_REFS
890 if (!bit) {
891 const int bit1 = mbmi->ref_frame[0] == LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700892 aom_write(w, bit1, av1_get_pred_prob_comp_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700893 } else {
894 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700895 aom_write(w, bit2, av1_get_pred_prob_comp_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700896 }
Yaowu Xuf883b422016-08-30 14:01:10 -0700897 aom_write(w, bit_bwd, av1_get_pred_prob_comp_bwdref_p(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700898#endif // CONFIG_EXT_REFS
899 } else {
900#if CONFIG_EXT_REFS
901 const int bit0 = (mbmi->ref_frame[0] == ALTREF_FRAME ||
902 mbmi->ref_frame[0] == BWDREF_FRAME);
Yaowu Xuf883b422016-08-30 14:01:10 -0700903 aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700904
905 if (bit0) {
906 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700907 aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700908 } else {
909 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
910 mbmi->ref_frame[0] == GOLDEN_FRAME);
Yaowu Xuf883b422016-08-30 14:01:10 -0700911 aom_write(w, bit2, av1_get_pred_prob_single_ref_p3(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700912
913 if (!bit2) {
914 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700915 aom_write(w, bit3, av1_get_pred_prob_single_ref_p4(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700916 } else {
917 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700918 aom_write(w, bit4, av1_get_pred_prob_single_ref_p5(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700919 }
920 }
921#else // CONFIG_EXT_REFS
922 const int bit0 = mbmi->ref_frame[0] != LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700923 aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700924
925 if (bit0) {
926 const int bit1 = mbmi->ref_frame[0] != GOLDEN_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -0700927 aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700928 }
929#endif // CONFIG_EXT_REFS
930 }
931 }
932}
933
hui su5db97432016-10-14 16:10:14 -0700934#if CONFIG_FILTER_INTRA
935static void write_filter_intra_mode_info(const AV1_COMMON *const cm,
936 const MB_MODE_INFO *const mbmi,
937 aom_writer *w) {
Urvang Joshib100db72016-10-12 16:28:56 -0700938 if (mbmi->mode == DC_PRED
939#if CONFIG_PALETTE
940 && mbmi->palette_mode_info.palette_size[0] == 0
941#endif // CONFIG_PALETTE
942 ) {
hui su5db97432016-10-14 16:10:14 -0700943 aom_write(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[0],
944 cm->fc->filter_intra_probs[0]);
945 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[0]) {
946 const FILTER_INTRA_MODE mode =
947 mbmi->filter_intra_mode_info.filter_intra_mode[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700948 write_uniform(w, FILTER_INTRA_MODES, mode);
949 }
950 }
951
Urvang Joshib100db72016-10-12 16:28:56 -0700952 if (mbmi->uv_mode == DC_PRED
953#if CONFIG_PALETTE
954 && mbmi->palette_mode_info.palette_size[1] == 0
955#endif // CONFIG_PALETTE
956 ) {
hui su5db97432016-10-14 16:10:14 -0700957 aom_write(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[1],
958 cm->fc->filter_intra_probs[1]);
959 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[1]) {
960 const FILTER_INTRA_MODE mode =
961 mbmi->filter_intra_mode_info.filter_intra_mode[1];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700962 write_uniform(w, FILTER_INTRA_MODES, mode);
963 }
964 }
965}
hui su5db97432016-10-14 16:10:14 -0700966#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -0700967
hui su5db97432016-10-14 16:10:14 -0700968#if CONFIG_EXT_INTRA
Yaowu Xuf883b422016-08-30 14:01:10 -0700969static void write_intra_angle_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
970 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700971 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
972 const BLOCK_SIZE bsize = mbmi->sb_type;
Yaowu Xuf883b422016-08-30 14:01:10 -0700973 const int intra_filter_ctx = av1_get_pred_context_intra_interp(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700974 int p_angle;
975
976 if (bsize < BLOCK_8X8) return;
977
978 if (mbmi->mode != DC_PRED && mbmi->mode != TM_PRED) {
979 write_uniform(w, 2 * MAX_ANGLE_DELTAS + 1,
980 MAX_ANGLE_DELTAS + mbmi->angle_delta[0]);
981 p_angle = mode_to_angle_map[mbmi->mode] + mbmi->angle_delta[0] * ANGLE_STEP;
Yaowu Xuf883b422016-08-30 14:01:10 -0700982 if (av1_is_intra_filter_switchable(p_angle)) {
983 av1_write_token(w, av1_intra_filter_tree,
984 cm->fc->intra_filter_probs[intra_filter_ctx],
985 &intra_filter_encodings[mbmi->intra_filter]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700986 }
987 }
988
989 if (mbmi->uv_mode != DC_PRED && mbmi->uv_mode != TM_PRED) {
990 write_uniform(w, 2 * MAX_ANGLE_DELTAS + 1,
991 MAX_ANGLE_DELTAS + mbmi->angle_delta[1]);
992 }
993}
994#endif // CONFIG_EXT_INTRA
995
Yaowu Xuf883b422016-08-30 14:01:10 -0700996static void write_switchable_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd,
997 aom_writer *w) {
998 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700999 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1000#if CONFIG_DUAL_FILTER
1001 int dir;
1002#endif
1003 if (cm->interp_filter == SWITCHABLE) {
1004#if CONFIG_EXT_INTERP
1005#if CONFIG_DUAL_FILTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001006 if (!av1_is_interp_needed(xd)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001007 assert(mbmi->interp_filter[0] == EIGHTTAP_REGULAR);
1008 return;
1009 }
1010#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001011 if (!av1_is_interp_needed(xd)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001012#if CONFIG_DUAL_FILTER
1013 assert(mbmi->interp_filter[0] == EIGHTTAP_REGULAR);
1014 assert(mbmi->interp_filter[1] == EIGHTTAP_REGULAR);
1015#else
1016 assert(mbmi->interp_filter == EIGHTTAP_REGULAR);
1017#endif
1018 return;
1019 }
1020#endif // CONFIG_DUAL_FILTER
1021#endif // CONFIG_EXT_INTERP
1022#if CONFIG_DUAL_FILTER
1023 for (dir = 0; dir < 2; ++dir) {
1024 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
1025 (mbmi->ref_frame[1] > INTRA_FRAME &&
1026 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001027 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
1028 av1_write_token(w, av1_switchable_interp_tree,
1029 cm->fc->switchable_interp_prob[ctx],
1030 &switchable_interp_encodings[mbmi->interp_filter[dir]]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001031 ++cpi->interp_filter_selected[0][mbmi->interp_filter[dir]];
1032 }
1033 }
1034#else
1035 {
Yaowu Xuf883b422016-08-30 14:01:10 -07001036 const int ctx = av1_get_pred_context_switchable_interp(xd);
Nathan E. Egge4947c292016-04-26 11:37:06 -04001037#if CONFIG_DAALA_EC
Nathan E. Egge56eeaa52016-07-25 10:23:33 -04001038 aom_write_symbol(w, av1_switchable_interp_ind[mbmi->interp_filter],
1039 cm->fc->switchable_interp_cdf[ctx], SWITCHABLE_FILTERS);
Nathan E. Egge4947c292016-04-26 11:37:06 -04001040#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001041 av1_write_token(w, av1_switchable_interp_tree,
1042 cm->fc->switchable_interp_prob[ctx],
1043 &switchable_interp_encodings[mbmi->interp_filter]);
Nathan E. Egge4947c292016-04-26 11:37:06 -04001044#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001045 ++cpi->interp_filter_selected[0][mbmi->interp_filter];
1046 }
1047#endif
1048 }
1049}
1050
Urvang Joshib100db72016-10-12 16:28:56 -07001051#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -07001052static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1053 const MODE_INFO *const mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001054 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1055 const MODE_INFO *const above_mi = xd->above_mi;
1056 const MODE_INFO *const left_mi = xd->left_mi;
1057 const BLOCK_SIZE bsize = mbmi->sb_type;
1058 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
1059 int palette_ctx = 0;
1060 int n, i;
1061
1062 if (mbmi->mode == DC_PRED) {
1063 n = pmi->palette_size[0];
1064 if (above_mi)
1065 palette_ctx += (above_mi->mbmi.palette_mode_info.palette_size[0] > 0);
1066 if (left_mi)
1067 palette_ctx += (left_mi->mbmi.palette_mode_info.palette_size[0] > 0);
Yaowu Xuf883b422016-08-30 14:01:10 -07001068 aom_write(w, n > 0,
1069 av1_default_palette_y_mode_prob[bsize - BLOCK_8X8][palette_ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001070 if (n > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001071 av1_write_token(w, av1_palette_size_tree,
1072 av1_default_palette_y_size_prob[bsize - BLOCK_8X8],
1073 &palette_size_encodings[n - 2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001074 for (i = 0; i < n; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07001075 aom_write_literal(w, pmi->palette_colors[i], cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001076 write_uniform(w, n, pmi->palette_first_color_idx[0]);
1077 }
1078 }
1079
1080 if (mbmi->uv_mode == DC_PRED) {
1081 n = pmi->palette_size[1];
Yaowu Xuf883b422016-08-30 14:01:10 -07001082 aom_write(w, n > 0,
1083 av1_default_palette_uv_mode_prob[pmi->palette_size[0] > 0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001084 if (n > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001085 av1_write_token(w, av1_palette_size_tree,
1086 av1_default_palette_uv_size_prob[bsize - BLOCK_8X8],
1087 &palette_size_encodings[n - 2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001088 for (i = 0; i < n; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001089 aom_write_literal(w, pmi->palette_colors[PALETTE_MAX_SIZE + i],
1090 cm->bit_depth);
1091 aom_write_literal(w, pmi->palette_colors[2 * PALETTE_MAX_SIZE + i],
1092 cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001093 }
1094 write_uniform(w, n, pmi->palette_first_color_idx[1]);
1095 }
1096 }
1097}
Urvang Joshib100db72016-10-12 16:28:56 -07001098#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001099
Yaowu Xuf883b422016-08-30 14:01:10 -07001100static void pack_inter_mode_mvs(AV1_COMP *cpi, const MODE_INFO *mi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001101#if CONFIG_SUPERTX
1102 int supertx_enabled,
1103#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001104 aom_writer *w) {
1105 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001106#if !CONFIG_REF_MV
1107 const nmv_context *nmvc = &cm->fc->nmvc;
1108#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02001109
1110#if CONFIG_DELTA_Q
1111 MACROBLOCK *const x = &cpi->td.mb;
1112 MACROBLOCKD *const xd = &x->e_mbd;
1113#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001114 const MACROBLOCK *x = &cpi->td.mb;
1115 const MACROBLOCKD *xd = &x->e_mbd;
Arild Fuldseth07441162016-08-15 15:07:52 +02001116#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001117 const struct segmentation *const seg = &cm->seg;
1118 const struct segmentation_probs *const segp = &cm->fc->seg;
1119 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1120 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
1121 const PREDICTION_MODE mode = mbmi->mode;
1122 const int segment_id = mbmi->segment_id;
1123 const BLOCK_SIZE bsize = mbmi->sb_type;
1124 const int allow_hp = cm->allow_high_precision_mv;
1125 const int is_inter = is_inter_block(mbmi);
1126 const int is_compound = has_second_ref(mbmi);
1127 int skip, ref;
1128
1129 if (seg->update_map) {
1130 if (seg->temporal_update) {
1131 const int pred_flag = mbmi->seg_id_predicted;
Yaowu Xuf883b422016-08-30 14:01:10 -07001132 aom_prob pred_prob = av1_get_pred_prob_seg_id(segp, xd);
1133 aom_write(w, pred_flag, pred_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001134 if (!pred_flag) write_segment_id(w, seg, segp, segment_id);
1135 } else {
1136 write_segment_id(w, seg, segp, segment_id);
1137 }
1138 }
1139
1140#if CONFIG_SUPERTX
1141 if (supertx_enabled)
1142 skip = mbmi->skip;
1143 else
1144 skip = write_skip(cm, xd, segment_id, mi, w);
1145#else
1146 skip = write_skip(cm, xd, segment_id, mi, w);
1147#endif // CONFIG_SUPERTX
Arild Fuldseth07441162016-08-15 15:07:52 +02001148#if CONFIG_DELTA_Q
1149 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001150 int mi_row = (-xd->mb_to_top_edge) >> (MI_SIZE_LOG2 + 3);
1151 int mi_col = (-xd->mb_to_left_edge) >> (MI_SIZE_LOG2 + 3);
1152 int super_block_upper_left =
1153 ((mi_row & MAX_MIB_MASK) == 0) && ((mi_col & MAX_MIB_MASK) == 0);
Arild Fuldseth07441162016-08-15 15:07:52 +02001154 if ((bsize != BLOCK_64X64 || skip == 0) && super_block_upper_left) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001155 int reduced_delta_qindex =
1156 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
1157 write_delta_qindex(cm, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001158 xd->prev_qindex = mbmi->current_q_index;
1159 }
1160 }
1161#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001162
1163#if CONFIG_SUPERTX
1164 if (!supertx_enabled)
1165#endif // CONFIG_SUPERTX
1166 if (!segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
Yaowu Xuf883b422016-08-30 14:01:10 -07001167 aom_write(w, is_inter, av1_get_intra_inter_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001168
1169 if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
1170#if CONFIG_SUPERTX
1171 !supertx_enabled &&
1172#endif // CONFIG_SUPERTX
1173 !(is_inter && skip) && !xd->lossless[segment_id]) {
1174#if CONFIG_VAR_TX
1175 if (is_inter) { // This implies skip flag is 0.
1176 const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
1177 const int txb_size = txsize_to_bsize[max_tx_size];
1178 const int bs = num_4x4_blocks_wide_lookup[txb_size];
1179 const int width = num_4x4_blocks_wide_lookup[bsize];
1180 const int height = num_4x4_blocks_high_lookup[bsize];
1181 int idx, idy;
Yue Chena1e48dc2016-08-29 17:29:33 -07001182
1183#if CONFIG_EXT_TX && CONFIG_RECT_TX
Yue Chen49587a72016-09-28 17:09:47 -07001184 if (is_rect_tx_allowed(xd, mbmi)) {
Yue Chena1e48dc2016-08-29 17:29:33 -07001185 int tx_size_cat = inter_tx_size_cat_lookup[bsize];
1186
1187 aom_write(w, is_rect_tx(mbmi->tx_size),
1188 cm->fc->rect_tx_prob[tx_size_cat]);
1189 }
1190
1191 if (is_rect_tx(mbmi->tx_size)) {
1192 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, xd);
1193 } else {
1194#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
1195 for (idy = 0; idy < height; idy += bs)
1196 for (idx = 0; idx < width; idx += bs)
1197 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, idy, idx, w);
1198#if CONFIG_EXT_TX && CONFIG_RECT_TX
1199 }
1200#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001201 } else {
1202 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, xd);
1203 write_selected_tx_size(cm, xd, w);
1204 }
1205 } else {
1206 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, xd);
1207#else
1208 write_selected_tx_size(cm, xd, w);
1209#endif
1210 }
1211
1212 if (!is_inter) {
1213 if (bsize >= BLOCK_8X8) {
1214 write_intra_mode(w, mode, cm->fc->y_mode_prob[size_group_lookup[bsize]]);
1215 } else {
1216 int idx, idy;
1217 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1218 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1219 for (idy = 0; idy < 2; idy += num_4x4_h) {
1220 for (idx = 0; idx < 2; idx += num_4x4_w) {
1221 const PREDICTION_MODE b_mode = mi->bmi[idy * 2 + idx].as_mode;
1222 write_intra_mode(w, b_mode, cm->fc->y_mode_prob[0]);
1223 }
1224 }
1225 }
1226 write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mode]);
1227#if CONFIG_EXT_INTRA
1228 write_intra_angle_info(cm, xd, w);
1229#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001230#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001231 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1232 write_palette_mode_info(cm, xd, mi, w);
Urvang Joshib100db72016-10-12 16:28:56 -07001233#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07001234#if CONFIG_FILTER_INTRA
1235 if (bsize >= BLOCK_8X8) write_filter_intra_mode_info(cm, mbmi, w);
1236#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001237 } else {
1238 int16_t mode_ctx = mbmi_ext->mode_context[mbmi->ref_frame[0]];
1239 write_ref_frames(cm, xd, w);
1240
1241#if CONFIG_REF_MV
1242#if CONFIG_EXT_INTER
1243 if (is_compound)
1244 mode_ctx = mbmi_ext->compound_mode_context[mbmi->ref_frame[0]];
1245 else
1246#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001247 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1248 mbmi->ref_frame, bsize, -1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001249#endif
1250
1251 // If segment skip is not enabled code the mode.
1252 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
1253 if (bsize >= BLOCK_8X8) {
1254#if CONFIG_EXT_INTER
1255 if (is_inter_compound_mode(mode))
1256 write_inter_compound_mode(cm, w, mode, mode_ctx);
1257 else if (is_inter_singleref_mode(mode))
1258#endif // CONFIG_EXT_INTER
1259 write_inter_mode(cm, w, mode,
1260#if CONFIG_REF_MV && CONFIG_EXT_INTER
1261 is_compound,
1262#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1263 mode_ctx);
1264
1265#if CONFIG_REF_MV
1266 if (mode == NEARMV || mode == NEWMV)
1267 write_drl_idx(cm, mbmi, mbmi_ext, w);
1268#endif
1269 }
1270 }
1271
1272#if !CONFIG_EXT_INTERP && !CONFIG_DUAL_FILTER
1273 write_switchable_interp_filter(cpi, xd, w);
1274#endif // !CONFIG_EXT_INTERP
1275
1276 if (bsize < BLOCK_8X8) {
1277 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1278 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1279 int idx, idy;
1280 for (idy = 0; idy < 2; idy += num_4x4_h) {
1281 for (idx = 0; idx < 2; idx += num_4x4_w) {
1282 const int j = idy * 2 + idx;
1283 const PREDICTION_MODE b_mode = mi->bmi[j].as_mode;
1284#if CONFIG_REF_MV
1285#if CONFIG_EXT_INTER
1286 if (!is_compound)
1287#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001288 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1289 mbmi->ref_frame, bsize, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001290#endif
1291#if CONFIG_EXT_INTER
1292 if (is_inter_compound_mode(b_mode))
1293 write_inter_compound_mode(cm, w, b_mode, mode_ctx);
1294 else if (is_inter_singleref_mode(b_mode))
1295#endif // CONFIG_EXT_INTER
1296 write_inter_mode(cm, w, b_mode,
1297#if CONFIG_REF_MV && CONFIG_EXT_INTER
1298 has_second_ref(mbmi),
1299#endif // CONFIG_REF_MV && CONFIG_EXT_INTER
1300 mode_ctx);
1301
1302#if CONFIG_EXT_INTER
1303 if (b_mode == NEWMV || b_mode == NEWFROMNEARMV ||
1304 b_mode == NEW_NEWMV) {
1305#else
1306 if (b_mode == NEWMV) {
1307#endif // CONFIG_EXT_INTER
1308 for (ref = 0; ref < 1 + is_compound; ++ref) {
1309#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001310 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1311 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1312 mbmi_ext->ref_mv_stack[rf_type], ref,
1313 mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001314 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1315#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001316 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001317#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001318 &mi->bmi[j].ref_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001319#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001320 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001321#endif
1322#else
1323#if CONFIG_REF_MV
Yaowu Xuf5bbbfa2016-09-26 09:13:38 -07001324 &mi->bmi[j].pred_mv[ref].as_mv, is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001325#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001326 &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001327#endif // CONFIG_REF_MV
1328#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001329 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001330 }
1331 }
1332#if CONFIG_EXT_INTER
1333 else if (b_mode == NEAREST_NEWMV || b_mode == NEAR_NEWMV) {
1334#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001335 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1336 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1337 mbmi_ext->ref_mv_stack[rf_type], 1,
1338 mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001339 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1340#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001341 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[1].as_mv,
1342 &mi->bmi[j].ref_mv[1].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001343#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001344 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001345#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001346 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001347 } else if (b_mode == NEW_NEARESTMV || b_mode == NEW_NEARMV) {
1348#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001349 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1350 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1351 mbmi_ext->ref_mv_stack[rf_type], 0,
1352 mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001353 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1354#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001355 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[0].as_mv,
1356 &mi->bmi[j].ref_mv[0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001357#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001358 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001359#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001360 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001361 }
1362#endif // CONFIG_EXT_INTER
1363 }
1364 }
1365 } else {
1366#if CONFIG_EXT_INTER
1367 if (mode == NEWMV || mode == NEWFROMNEARMV || mode == NEW_NEWMV) {
1368#else
1369 if (mode == NEWMV) {
1370#endif // CONFIG_EXT_INTER
1371 int_mv ref_mv;
1372 for (ref = 0; ref < 1 + is_compound; ++ref) {
1373#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001374 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1375 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1376 mbmi_ext->ref_mv_stack[rf_type], ref,
1377 mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001378 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1379#endif
1380 ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0];
1381#if CONFIG_EXT_INTER
1382 if (mode == NEWFROMNEARMV)
Yaowu Xuf883b422016-08-30 14:01:10 -07001383 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv,
1384 &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][1].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001385#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001386 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001387#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001388 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001389 else
1390#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001391 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001392#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001393 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001394#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001395 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001396 }
1397#if CONFIG_EXT_INTER
1398 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
1399#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001400 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1401 int nmv_ctx =
1402 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1403 mbmi_ext->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001404 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1405#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001406 av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv,
1407 &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001408#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001409 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001410#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001411 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001412 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
1413#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001414 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1415 int nmv_ctx =
1416 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1417 mbmi_ext->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001418 const nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx];
1419#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001420 av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv,
1421 &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001422#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07001423 is_compound,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001424#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001425 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001426#endif // CONFIG_EXT_INTER
1427 }
1428 }
1429
1430#if CONFIG_EXT_INTER
1431 if (cpi->common.reference_mode != COMPOUND_REFERENCE &&
1432#if CONFIG_SUPERTX
1433 !supertx_enabled &&
1434#endif // CONFIG_SUPERTX
1435 is_interintra_allowed(mbmi)) {
1436 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
1437 const int bsize_group = size_group_lookup[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -07001438 aom_write(w, interintra, cm->fc->interintra_prob[bsize_group]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001439 if (interintra) {
1440 write_interintra_mode(w, mbmi->interintra_mode,
1441 cm->fc->interintra_mode_prob[bsize_group]);
1442 if (is_interintra_wedge_used(bsize)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001443 aom_write(w, mbmi->use_wedge_interintra,
1444 cm->fc->wedge_interintra_prob[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001445 if (mbmi->use_wedge_interintra) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001446 aom_write_literal(w, mbmi->interintra_wedge_index,
1447 get_wedge_bits_lookup(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001448 assert(mbmi->interintra_wedge_sign == 0);
1449 }
1450 }
1451 }
1452 }
1453#endif // CONFIG_EXT_INTER
1454
Yue Chencb60b182016-10-13 15:18:22 -07001455#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001456#if CONFIG_SUPERTX
1457 if (!supertx_enabled)
1458#endif // CONFIG_SUPERTX
1459#if CONFIG_EXT_INTER
1460 if (mbmi->ref_frame[1] != INTRA_FRAME)
1461#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -07001462 if (is_motion_variation_allowed(mbmi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001463 // TODO(debargha): Might want to only emit this if SEG_LVL_SKIP
1464 // is not active, and assume SIMPLE_TRANSLATION in the decoder if
1465 // it is active.
Yue Chencb60b182016-10-13 15:18:22 -07001466 assert(mbmi->motion_mode < MOTION_MODES);
1467 av1_write_token(w, av1_motion_mode_tree,
1468 cm->fc->motion_mode_prob[bsize],
1469 &motion_mode_encodings[mbmi->motion_mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001470 }
Yue Chencb60b182016-10-13 15:18:22 -07001471#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001472
1473#if CONFIG_EXT_INTER
1474 if (cpi->common.reference_mode != SINGLE_REFERENCE &&
1475 is_inter_compound_mode(mbmi->mode) &&
Yue Chencb60b182016-10-13 15:18:22 -07001476#if CONFIG_MOTION_VAR
1477 !(is_motion_variation_allowed(mbmi) &&
1478 mbmi->motion_mode != SIMPLE_TRANSLATION) &&
1479#endif // CONFIG_MOTION_VAR
Yaowu Xuc27fc142016-08-22 16:08:15 -07001480 is_interinter_wedge_used(bsize)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001481 aom_write(w, mbmi->use_wedge_interinter,
1482 cm->fc->wedge_interinter_prob[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001483 if (mbmi->use_wedge_interinter) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001484 aom_write_literal(w, mbmi->interinter_wedge_index,
1485 get_wedge_bits_lookup(bsize));
1486 aom_write_bit(w, mbmi->interinter_wedge_sign);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001487 }
1488 }
1489#endif // CONFIG_EXT_INTER
1490
1491#if CONFIG_EXT_INTERP || CONFIG_DUAL_FILTER
1492 write_switchable_interp_filter(cpi, xd, w);
1493#endif // CONFIG_EXT_INTERP
1494 }
1495
1496 if (!FIXED_TX_TYPE) {
1497#if CONFIG_EXT_TX
1498 if (get_ext_tx_types(mbmi->tx_size, bsize, is_inter) > 1 &&
1499 cm->base_qindex > 0 && !mbmi->skip &&
1500#if CONFIG_SUPERTX
1501 !supertx_enabled &&
1502#endif // CONFIG_SUPERTX
1503 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1504 int eset = get_ext_tx_set(mbmi->tx_size, bsize, is_inter);
1505 if (is_inter) {
1506 assert(ext_tx_used_inter[eset][mbmi->tx_type]);
1507 if (eset > 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001508 av1_write_token(
1509 w, av1_ext_tx_inter_tree[eset],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001510 cm->fc->inter_ext_tx_prob[eset][txsize_sqr_map[mbmi->tx_size]],
1511 &ext_tx_inter_encodings[eset][mbmi->tx_type]);
1512 } else if (ALLOW_INTRA_EXT_TX) {
1513 if (eset > 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001514 av1_write_token(
1515 w, av1_ext_tx_intra_tree[eset],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001516 cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode],
1517 &ext_tx_intra_encodings[eset][mbmi->tx_type]);
1518 }
1519 }
1520#else
1521 if (mbmi->tx_size < TX_32X32 && cm->base_qindex > 0 && !mbmi->skip &&
1522#if CONFIG_SUPERTX
1523 !supertx_enabled &&
1524#endif // CONFIG_SUPERTX
1525 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1526 if (is_inter) {
Nathan E. Egge93878c42016-05-03 10:01:32 -04001527#if CONFIG_DAALA_EC
Nathan E. Egge56eeaa52016-07-25 10:23:33 -04001528 aom_write_symbol(w, av1_ext_tx_ind[mbmi->tx_type],
1529 cm->fc->inter_ext_tx_cdf[mbmi->tx_size], TX_TYPES);
Nathan E. Egge93878c42016-05-03 10:01:32 -04001530#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001531 av1_write_token(w, av1_ext_tx_tree,
1532 cm->fc->inter_ext_tx_prob[mbmi->tx_size],
1533 &ext_tx_encodings[mbmi->tx_type]);
Nathan E. Egge93878c42016-05-03 10:01:32 -04001534#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001535 } else {
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -04001536#if CONFIG_DAALA_EC
Nathan E. Egge56eeaa52016-07-25 10:23:33 -04001537 aom_write_symbol(
Nathan E. Egge93878c42016-05-03 10:01:32 -04001538 w, av1_ext_tx_ind[mbmi->tx_type],
1539 cm->fc->intra_ext_tx_cdf[mbmi->tx_size]
1540 [intra_mode_to_tx_type_context[mbmi->mode]],
1541 TX_TYPES);
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -04001542#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001543 av1_write_token(
1544 w, av1_ext_tx_tree,
clang-format67948d32016-09-07 22:40:40 -07001545 cm->fc
1546 ->intra_ext_tx_prob[mbmi->tx_size]
1547 [intra_mode_to_tx_type_context[mbmi->mode]],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001548 &ext_tx_encodings[mbmi->tx_type]);
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -04001549#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001550 }
1551 } else {
1552 if (!mbmi->skip) {
1553#if CONFIG_SUPERTX
1554 if (!supertx_enabled)
1555#endif // CONFIG_SUPERTX
1556 assert(mbmi->tx_type == DCT_DCT);
1557 }
1558 }
1559#endif // CONFIG_EXT_TX
1560 }
1561}
1562
Arild Fuldseth07441162016-08-15 15:07:52 +02001563#if CONFIG_DELTA_Q
1564static void write_mb_modes_kf(const AV1_COMMON *cm, MACROBLOCKD *xd,
1565 MODE_INFO **mi_8x8, aom_writer *w) {
1566 int skip;
1567#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001568static void write_mb_modes_kf(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1569 MODE_INFO **mi_8x8, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +02001570#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001571 const struct segmentation *const seg = &cm->seg;
1572 const struct segmentation_probs *const segp = &cm->fc->seg;
1573 const MODE_INFO *const mi = mi_8x8[0];
1574 const MODE_INFO *const above_mi = xd->above_mi;
1575 const MODE_INFO *const left_mi = xd->left_mi;
1576 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1577 const BLOCK_SIZE bsize = mbmi->sb_type;
1578
1579 if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id);
1580
Arild Fuldseth07441162016-08-15 15:07:52 +02001581#if CONFIG_DELTA_Q
1582 skip = write_skip(cm, xd, mbmi->segment_id, mi, w);
1583 if (cm->delta_q_present_flag) {
1584 int mi_row = (-xd->mb_to_top_edge) >> 6;
1585 int mi_col = (-xd->mb_to_left_edge) >> 6;
1586 int super_block_upper_left = ((mi_row & 7) == 0) && ((mi_col & 7) == 0);
1587 if ((bsize != BLOCK_64X64 || skip == 0) && super_block_upper_left) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001588 int reduced_delta_qindex =
1589 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
1590 write_delta_qindex(cm, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001591 xd->prev_qindex = mbmi->current_q_index;
1592 }
1593 }
1594#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001595 write_skip(cm, xd, mbmi->segment_id, mi, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001596#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001597
1598 if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
1599 !xd->lossless[mbmi->segment_id])
1600 write_selected_tx_size(cm, xd, w);
1601
1602 if (bsize >= BLOCK_8X8) {
1603 write_intra_mode(w, mbmi->mode,
1604 get_y_mode_probs(cm, mi, above_mi, left_mi, 0));
1605 } else {
1606 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1607 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1608 int idx, idy;
1609
1610 for (idy = 0; idy < 2; idy += num_4x4_h) {
1611 for (idx = 0; idx < 2; idx += num_4x4_w) {
1612 const int block = idy * 2 + idx;
1613 write_intra_mode(w, mi->bmi[block].as_mode,
1614 get_y_mode_probs(cm, mi, above_mi, left_mi, block));
1615 }
1616 }
1617 }
1618
1619 write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mbmi->mode]);
1620#if CONFIG_EXT_INTRA
1621 write_intra_angle_info(cm, xd, w);
1622#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001623#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001624 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1625 write_palette_mode_info(cm, xd, mi, w);
Urvang Joshib100db72016-10-12 16:28:56 -07001626#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07001627#if CONFIG_FILTER_INTRA
1628 if (bsize >= BLOCK_8X8) write_filter_intra_mode_info(cm, mbmi, w);
1629#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001630
1631 if (!FIXED_TX_TYPE) {
1632#if CONFIG_EXT_TX
1633 if (get_ext_tx_types(mbmi->tx_size, bsize, 0) > 1 && cm->base_qindex > 0 &&
1634 !mbmi->skip &&
1635 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP) &&
1636 ALLOW_INTRA_EXT_TX) {
1637 int eset = get_ext_tx_set(mbmi->tx_size, bsize, 0);
1638 if (eset > 0)
Yaowu Xuf883b422016-08-30 14:01:10 -07001639 av1_write_token(
1640 w, av1_ext_tx_intra_tree[eset],
Yaowu Xuc27fc142016-08-22 16:08:15 -07001641 cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode],
1642 &ext_tx_intra_encodings[eset][mbmi->tx_type]);
1643 }
1644#else
1645 if (mbmi->tx_size < TX_32X32 && cm->base_qindex > 0 && !mbmi->skip &&
1646 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001647 av1_write_token(
1648 w, av1_ext_tx_tree,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001649 cm->fc->intra_ext_tx_prob[mbmi->tx_size]
1650 [intra_mode_to_tx_type_context[mbmi->mode]],
1651 &ext_tx_encodings[mbmi->tx_type]);
1652 }
1653#endif // CONFIG_EXT_TX
1654 }
1655}
1656
1657#if CONFIG_SUPERTX
1658#define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1659 mi_row, mi_col) \
1660 write_modes_b(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col)
1661#else
1662#define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1663 mi_row, mi_col) \
1664 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col)
Alex Converseec6fb642016-10-19 11:31:48 -07001665#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001666
Yaowu Xuf883b422016-08-30 14:01:10 -07001667static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
1668 aom_writer *w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001669 const TOKENEXTRA *const tok_end,
1670#if CONFIG_SUPERTX
1671 int supertx_enabled,
1672#endif
1673 int mi_row, int mi_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001674 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001675 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1676 MODE_INFO *m;
1677 int plane;
1678 int bh, bw;
Alex Converseec6fb642016-10-19 11:31:48 -07001679#if CONFIG_RANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001680 (void)tok;
1681 (void)tok_end;
1682 (void)plane;
Alex Converseec6fb642016-10-19 11:31:48 -07001683#endif // !CONFIG_RANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07001684
1685 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
1686 m = xd->mi[0];
1687
1688 assert(m->mbmi.sb_type <= cm->sb_size);
1689
1690 bh = num_8x8_blocks_high_lookup[m->mbmi.sb_type];
1691 bw = num_8x8_blocks_wide_lookup[m->mbmi.sb_type];
1692
1693 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
1694
1695 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
1696 if (frame_is_intra_only(cm)) {
1697 write_mb_modes_kf(cm, xd, xd->mi, w);
1698 } else {
1699#if CONFIG_VAR_TX
1700 xd->above_txfm_context = cm->above_txfm_context + mi_col;
1701 xd->left_txfm_context =
1702 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
1703#endif
1704#if CONFIG_EXT_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -07001705 // av1_is_interp_needed needs the ref frame buffers set up to look
1706 // up if they are scaled. av1_is_interp_needed is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07001707 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
1708 set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
1709#endif // CONFIG_EXT_INTERP
1710#if 0
1711 // NOTE(zoeliu): For debug
1712 if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
1713 const PREDICTION_MODE mode = m->mbmi.mode;
1714 const int segment_id = m->mbmi.segment_id;
1715 const BLOCK_SIZE bsize = m->mbmi.sb_type;
1716
1717 // For sub8x8, simply dump out the first sub8x8 block info
1718 const PREDICTION_MODE b_mode =
1719 (bsize < BLOCK_8X8) ? m->bmi[0].as_mode : -1;
1720 const int mv_x = (bsize < BLOCK_8X8) ?
1721 m->bmi[0].as_mv[0].as_mv.row : m->mbmi.mv[0].as_mv.row;
1722 const int mv_y = (bsize < BLOCK_8X8) ?
1723 m->bmi[0].as_mv[0].as_mv.col : m->mbmi.mv[0].as_mv.col;
1724
1725 printf("Before pack_inter_mode_mvs(): "
1726 "Frame=%d, (mi_row,mi_col)=(%d,%d), "
1727 "mode=%d, segment_id=%d, bsize=%d, b_mode=%d, "
1728 "mv[0]=(%d, %d), ref[0]=%d, ref[1]=%d\n",
1729 cm->current_video_frame, mi_row, mi_col,
1730 mode, segment_id, bsize, b_mode, mv_x, mv_y,
1731 m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
1732 }
1733#endif // 0
1734 pack_inter_mode_mvs(cpi, m,
1735#if CONFIG_SUPERTX
1736 supertx_enabled,
1737#endif
1738 w);
1739 }
1740
Urvang Joshib100db72016-10-12 16:28:56 -07001741#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001742 for (plane = 0; plane <= 1; ++plane) {
1743 if (m->mbmi.palette_mode_info.palette_size[plane] > 0) {
1744 const int rows = (4 * num_4x4_blocks_high_lookup[m->mbmi.sb_type]) >>
1745 (xd->plane[plane].subsampling_y);
1746 const int cols = (4 * num_4x4_blocks_wide_lookup[m->mbmi.sb_type]) >>
1747 (xd->plane[plane].subsampling_x);
1748 assert(*tok < tok_end);
1749 pack_palette_tokens(w, tok, m->mbmi.palette_mode_info.palette_size[plane],
1750 rows * cols - 1);
1751 assert(*tok < tok_end + m->mbmi.skip);
1752 }
1753 }
Urvang Joshib100db72016-10-12 16:28:56 -07001754#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001755
1756#if CONFIG_SUPERTX
1757 if (supertx_enabled) return;
1758#endif // CONFIG_SUPERTX
1759
1760 if (!m->mbmi.skip) {
1761 assert(*tok < tok_end);
1762 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
1763#if CONFIG_VAR_TX
1764 const struct macroblockd_plane *const pd = &xd->plane[plane];
1765 MB_MODE_INFO *mbmi = &m->mbmi;
1766 BLOCK_SIZE bsize = mbmi->sb_type;
1767 const BLOCK_SIZE plane_bsize =
Yaowu Xuf883b422016-08-30 14:01:10 -07001768 get_plane_block_size(AOMMAX(bsize, BLOCK_8X8), pd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001769
1770 const int num_4x4_w = num_4x4_blocks_wide_lookup[plane_bsize];
1771 const int num_4x4_h = num_4x4_blocks_high_lookup[plane_bsize];
1772 int row, col;
1773#if CONFIG_EXT_TX && CONFIG_RECT_TX
1774 TX_SIZE tx_size =
1775 plane ? get_uv_tx_size(mbmi, &xd->plane[plane]) : mbmi->tx_size;
1776
Yue Chena1e48dc2016-08-29 17:29:33 -07001777 if (is_inter_block(mbmi) && !is_rect_tx(tx_size)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001778#else
1779 if (is_inter_block(mbmi)) {
1780#endif
1781 const TX_SIZE max_tx_size = max_txsize_lookup[plane_bsize];
1782 const BLOCK_SIZE txb_size = txsize_to_bsize[max_tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001783 int block = 0;
1784 const int step = num_4x4_blocks_txsize_lookup[max_tx_size];
Urvang Joshi368fbc92016-10-17 16:31:34 -07001785 bw = num_4x4_blocks_wide_lookup[txb_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001786 for (row = 0; row < num_4x4_h; row += bw) {
1787 for (col = 0; col < num_4x4_w; col += bw) {
1788 pack_txb_tokens(w, tok, tok_end, xd, mbmi, plane, plane_bsize,
1789 cm->bit_depth, block, row, col, max_tx_size);
1790 block += step;
1791 }
1792 }
1793 } else {
1794 TX_SIZE tx = plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane])
1795 : m->mbmi.tx_size;
1796 BLOCK_SIZE txb_size = txsize_to_bsize[tx];
Urvang Joshi368fbc92016-10-17 16:31:34 -07001797 bw = num_4x4_blocks_wide_lookup[txb_size];
1798 bh = num_4x4_blocks_high_lookup[txb_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001799
1800 for (row = 0; row < num_4x4_h; row += bh)
1801 for (col = 0; col < num_4x4_w; col += bw)
1802 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx);
1803 }
1804#else
1805 TX_SIZE tx =
1806 plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane]) : m->mbmi.tx_size;
1807 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx);
1808#endif // CONFIG_VAR_TX
1809 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
1810 (*tok)++;
1811 }
1812 }
1813}
1814
Yaowu Xuf883b422016-08-30 14:01:10 -07001815static void write_partition(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001816 const MACROBLOCKD *const xd, int hbs, int mi_row,
1817 int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize,
Yaowu Xuf883b422016-08-30 14:01:10 -07001818 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001819 const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
Yaowu Xuf883b422016-08-30 14:01:10 -07001820 const aom_prob *const probs = cm->fc->partition_prob[ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001821 const int has_rows = (mi_row + hbs) < cm->mi_rows;
1822 const int has_cols = (mi_col + hbs) < cm->mi_cols;
1823
1824 if (has_rows && has_cols) {
1825#if CONFIG_EXT_PARTITION_TYPES
1826 if (bsize <= BLOCK_8X8)
Yaowu Xuf883b422016-08-30 14:01:10 -07001827 av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001828 else
Yaowu Xuf883b422016-08-30 14:01:10 -07001829 av1_write_token(w, av1_ext_partition_tree, probs,
1830 &ext_partition_encodings[p]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001831#else
Nathan E. Eggefba2be62016-05-03 09:48:54 -04001832#if CONFIG_DAALA_EC
Nathan E. Egge56eeaa52016-07-25 10:23:33 -04001833 aom_write_symbol(w, p, cm->fc->partition_cdf[ctx], PARTITION_TYPES);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04001834#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001835 av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04001836#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001837#endif // CONFIG_EXT_PARTITION_TYPES
1838 } else if (!has_rows && has_cols) {
1839 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Yaowu Xuf883b422016-08-30 14:01:10 -07001840 aom_write(w, p == PARTITION_SPLIT, probs[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001841 } else if (has_rows && !has_cols) {
1842 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Yaowu Xuf883b422016-08-30 14:01:10 -07001843 aom_write(w, p == PARTITION_SPLIT, probs[2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001844 } else {
1845 assert(p == PARTITION_SPLIT);
1846 }
1847}
1848
1849#if CONFIG_SUPERTX
1850#define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1851 mi_row, mi_col, bsize) \
1852 write_modes_sb(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col, \
1853 bsize)
1854#else
1855#define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
1856 mi_row, mi_col, bsize) \
1857 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, bsize)
Alex Converseec6fb642016-10-19 11:31:48 -07001858#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07001859
Yaowu Xuf883b422016-08-30 14:01:10 -07001860static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile,
1861 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001862 const TOKENEXTRA *const tok_end,
1863#if CONFIG_SUPERTX
1864 int supertx_enabled,
1865#endif
1866 int mi_row, int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001867 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001868 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
1869 const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
1870 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
1871 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
1872#if CONFIG_SUPERTX
1873 const int mi_offset = mi_row * cm->mi_stride + mi_col;
1874 MB_MODE_INFO *mbmi;
1875 const int pack_token = !supertx_enabled;
1876 TX_SIZE supertx_size;
1877 int plane;
1878#endif
1879
1880 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
1881
1882 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
1883#if CONFIG_SUPERTX
1884 mbmi = &cm->mi_grid_visible[mi_offset]->mbmi;
1885 xd->mi = cm->mi_grid_visible + mi_offset;
1886 set_mi_row_col(xd, tile, mi_row, num_8x8_blocks_high_lookup[bsize], mi_col,
1887 num_8x8_blocks_wide_lookup[bsize], cm->mi_rows, cm->mi_cols);
1888 if (!supertx_enabled && !frame_is_intra_only(cm) &&
1889 partition != PARTITION_NONE && bsize <= MAX_SUPERTX_BLOCK_SIZE &&
1890 !xd->lossless[0]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001891 aom_prob prob;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001892 supertx_size = max_txsize_lookup[bsize];
1893 prob = cm->fc->supertx_prob[partition_supertx_context_lookup[partition]]
1894 [supertx_size];
1895 supertx_enabled = (xd->mi[0]->mbmi.tx_size == supertx_size);
Yaowu Xuf883b422016-08-30 14:01:10 -07001896 aom_write(w, supertx_enabled, prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001897 }
1898#endif // CONFIG_SUPERTX
1899 if (subsize < BLOCK_8X8) {
1900 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row,
1901 mi_col);
1902 } else {
1903 switch (partition) {
1904 case PARTITION_NONE:
1905 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1906 mi_row, mi_col);
1907 break;
1908 case PARTITION_HORZ:
1909 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1910 mi_row, mi_col);
1911 if (mi_row + hbs < cm->mi_rows)
1912 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1913 mi_row + hbs, mi_col);
1914 break;
1915 case PARTITION_VERT:
1916 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1917 mi_row, mi_col);
1918 if (mi_col + hbs < cm->mi_cols)
1919 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1920 mi_row, mi_col + hbs);
1921 break;
1922 case PARTITION_SPLIT:
1923 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1924 mi_row, mi_col, subsize);
1925 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1926 mi_row, mi_col + hbs, subsize);
1927 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1928 mi_row + hbs, mi_col, subsize);
1929 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1930 mi_row + hbs, mi_col + hbs, subsize);
1931 break;
1932#if CONFIG_EXT_PARTITION_TYPES
1933 case PARTITION_HORZ_A:
1934 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1935 mi_row, mi_col);
1936 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1937 mi_row, mi_col + hbs);
1938 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1939 mi_row + hbs, mi_col);
1940 break;
1941 case PARTITION_HORZ_B:
1942 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1943 mi_row, mi_col);
1944 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1945 mi_row + hbs, mi_col);
1946 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1947 mi_row + hbs, mi_col + hbs);
1948 break;
1949 case PARTITION_VERT_A:
1950 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1951 mi_row, mi_col);
1952 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1953 mi_row + hbs, mi_col);
1954 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1955 mi_row, mi_col + hbs);
1956 break;
1957 case PARTITION_VERT_B:
1958 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1959 mi_row, mi_col);
1960 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1961 mi_row, mi_col + hbs);
1962 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
1963 mi_row + hbs, mi_col + hbs);
1964 break;
1965#endif // CONFIG_EXT_PARTITION_TYPES
1966 default: assert(0);
1967 }
1968 }
1969#if CONFIG_SUPERTX
1970 if (partition != PARTITION_NONE && supertx_enabled && pack_token) {
1971 int skip;
1972 xd->mi = cm->mi_grid_visible + mi_offset;
1973 supertx_size = mbmi->tx_size;
1974 set_mi_row_col(xd, tile, mi_row, num_8x8_blocks_high_lookup[bsize], mi_col,
1975 num_8x8_blocks_wide_lookup[bsize], cm->mi_rows, cm->mi_cols);
1976
1977 assert(IMPLIES(!cm->seg.enabled, mbmi->segment_id_supertx == 0));
1978 assert(mbmi->segment_id_supertx < MAX_SEGMENTS);
1979
1980 skip = write_skip(cm, xd, mbmi->segment_id_supertx, xd->mi[0], w);
1981#if CONFIG_EXT_TX
1982 if (get_ext_tx_types(supertx_size, bsize, 1) > 1 && !skip) {
1983 int eset = get_ext_tx_set(supertx_size, bsize, 1);
1984 if (eset > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001985 av1_write_token(w, av1_ext_tx_inter_tree[eset],
1986 cm->fc->inter_ext_tx_prob[eset][supertx_size],
1987 &ext_tx_inter_encodings[eset][mbmi->tx_type]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001988 }
1989 }
1990#else
1991 if (supertx_size < TX_32X32 && !skip) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001992 av1_write_token(w, av1_ext_tx_tree,
1993 cm->fc->inter_ext_tx_prob[supertx_size],
1994 &ext_tx_encodings[mbmi->tx_type]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001995 }
1996#endif // CONFIG_EXT_TX
1997
1998 if (!skip) {
1999 assert(*tok < tok_end);
2000 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
2001 const int mbmi_txb_size = txsize_to_bsize[mbmi->tx_size];
2002 const int num_4x4_w = num_4x4_blocks_wide_lookup[mbmi_txb_size];
2003 const int num_4x4_h = num_4x4_blocks_high_lookup[mbmi_txb_size];
2004 int row, col;
2005 TX_SIZE tx =
2006 plane ? get_uv_tx_size(mbmi, &xd->plane[plane]) : mbmi->tx_size;
2007 BLOCK_SIZE txb_size = txsize_to_bsize[tx];
2008 int bw = num_4x4_blocks_wide_lookup[txb_size];
2009
2010 for (row = 0; row < num_4x4_h; row += bw)
2011 for (col = 0; col < num_4x4_w; col += bw)
2012 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx);
2013 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2014 (*tok)++;
2015 }
2016 }
2017 }
2018#endif // CONFIG_SUPERTX
2019
2020// update partition context
2021#if CONFIG_EXT_PARTITION_TYPES
2022 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
2023#else
2024 if (bsize >= BLOCK_8X8 &&
2025 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
2026 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
2027
Yaowu Xud71be782016-10-14 08:47:03 -07002028#if CONFIG_CLPF
2029 if (bsize == BLOCK_64X64 && cm->clpf_blocks && cm->clpf_strength_y &&
2030 cm->clpf_size != CLPF_NOSIZE) {
2031 const int tl = mi_row * MI_SIZE / MIN_FB_SIZE * cm->clpf_stride +
2032 mi_col * MI_SIZE / MIN_FB_SIZE;
2033 const int tr = tl + 1;
2034 const int bl = tl + cm->clpf_stride;
2035 const int br = tr + cm->clpf_stride;
2036
2037 // Up to four bits per SB.
2038 // When clpf_size indicates a size larger than the SB size
2039 // (CLPF_128X128), one bit for every fourth SB will be transmitted
2040 // regardless of skip blocks.
2041 if (cm->clpf_blocks[tl] != CLPF_NOFLAG)
2042 aom_write_literal(w, cm->clpf_blocks[tl], 1);
2043
2044 if (mi_col + MI_SIZE / 2 < cm->mi_cols &&
2045 cm->clpf_blocks[tr] != CLPF_NOFLAG)
2046 aom_write_literal(w, cm->clpf_blocks[tr], 1);
2047
2048 if (mi_row + MI_SIZE / 2 < cm->mi_rows &&
2049 cm->clpf_blocks[bl] != CLPF_NOFLAG)
2050 aom_write_literal(w, cm->clpf_blocks[bl], 1);
2051
2052 if (mi_row + MI_SIZE / 2 < cm->mi_rows &&
2053 mi_col + MI_SIZE / 2 < cm->mi_cols &&
2054 cm->clpf_blocks[br] != CLPF_NOFLAG)
2055 aom_write_literal(w, cm->clpf_blocks[br], 1);
2056 }
2057#endif
2058
Jean-Marc Valine874ce02016-08-22 15:27:46 -04002059#if CONFIG_DERING
Yaowu Xuc27fc142016-08-22 16:08:15 -07002060 if (bsize == BLOCK_64X64 && cm->dering_level != 0 &&
2061 !sb_all_skip(cm, mi_row, mi_col)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002062 aom_write_literal(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002063 w,
2064 cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain,
2065 DERING_REFINEMENT_BITS);
2066 }
2067#endif
2068#endif // CONFIG_EXT_PARTITION_TYPES
2069}
2070
Yaowu Xuf883b422016-08-30 14:01:10 -07002071static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile,
2072 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002073 const TOKENEXTRA *const tok_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002074 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002075 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
2076 const int mi_row_start = tile->mi_row_start;
2077 const int mi_row_end = tile->mi_row_end;
2078 const int mi_col_start = tile->mi_col_start;
2079 const int mi_col_end = tile->mi_col_end;
2080 int mi_row, mi_col;
Yaowu Xuf883b422016-08-30 14:01:10 -07002081 av1_zero_above_context(cm, mi_col_start, mi_col_end);
Arild Fuldseth07441162016-08-15 15:07:52 +02002082#if CONFIG_DELTA_Q
2083 if (cpi->common.delta_q_present_flag) {
2084 xd->prev_qindex = cpi->common.base_qindex;
2085 }
2086#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002087
2088 for (mi_row = mi_row_start; mi_row < mi_row_end; mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002089 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002090
2091 for (mi_col = mi_col_start; mi_col < mi_col_end; mi_col += cm->mib_size) {
2092 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, 0, mi_row, mi_col,
2093 cm->sb_size);
2094 }
2095 }
2096}
2097
Yaowu Xuf883b422016-08-30 14:01:10 -07002098static void build_tree_distribution(AV1_COMP *cpi, TX_SIZE tx_size,
2099 av1_coeff_stats *coef_branch_ct,
2100 av1_coeff_probs_model *coef_probs) {
2101 av1_coeff_count *coef_counts = cpi->td.rd_counts.coef_counts[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002102 unsigned int(*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
2103 cpi->common.counts.eob_branch[tx_size];
2104 int i, j, k, l, m;
2105
2106 for (i = 0; i < PLANE_TYPES; ++i) {
2107 for (j = 0; j < REF_TYPES; ++j) {
2108 for (k = 0; k < COEF_BANDS; ++k) {
2109 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002110 av1_tree_probs_from_distribution(av1_coef_tree,
2111 coef_branch_ct[i][j][k][l],
2112 coef_counts[i][j][k][l]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002113 coef_branch_ct[i][j][k][l][0][1] =
2114 eob_branch_ct[i][j][k][l] - coef_branch_ct[i][j][k][l][0][0];
2115 for (m = 0; m < UNCONSTRAINED_NODES; ++m)
2116 coef_probs[i][j][k][l][m] =
2117 get_binary_prob(coef_branch_ct[i][j][k][l][m][0],
2118 coef_branch_ct[i][j][k][l][m][1]);
2119 }
2120 }
2121 }
2122 }
2123}
2124
Yaowu Xuf883b422016-08-30 14:01:10 -07002125static void update_coef_probs_common(aom_writer *const bc, AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002126 TX_SIZE tx_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07002127 av1_coeff_stats *frame_branch_ct,
2128 av1_coeff_probs_model *new_coef_probs) {
2129 av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
2130 const aom_prob upd = DIFF_UPDATE_PROB;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002131 const int entropy_nodes_update = UNCONSTRAINED_NODES;
2132 int i, j, k, l, t;
2133 int stepsize = cpi->sf.coeff_prob_appx_step;
2134
2135 switch (cpi->sf.use_fast_coef_updates) {
2136 case TWO_LOOP: {
2137 /* dry run to see if there is any update at all needed */
2138 int savings = 0;
2139 int update[2] = { 0, 0 };
2140 for (i = 0; i < PLANE_TYPES; ++i) {
2141 for (j = 0; j < REF_TYPES; ++j) {
2142 for (k = 0; k < COEF_BANDS; ++k) {
2143 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2144 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002145 aom_prob newp = new_coef_probs[i][j][k][l][t];
2146 const aom_prob oldp = old_coef_probs[i][j][k][l][t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002147 int s;
2148 int u = 0;
2149 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002150 s = av1_prob_diff_update_savings_search_model(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002151 frame_branch_ct[i][j][k][l][0],
2152 old_coef_probs[i][j][k][l], &newp, upd, stepsize);
2153 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002154 s = av1_prob_diff_update_savings_search(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002155 frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
2156 if (s > 0 && newp != oldp) u = 1;
2157 if (u)
Yaowu Xuf883b422016-08-30 14:01:10 -07002158 savings += s - (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002159 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002160 savings -= (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002161 update[u]++;
2162 }
2163 }
2164 }
2165 }
2166 }
2167
2168 /* Is coef updated at all */
2169 if (update[1] == 0 || savings < 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002170 aom_write_bit(bc, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002171 return;
2172 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002173 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002174 for (i = 0; i < PLANE_TYPES; ++i) {
2175 for (j = 0; j < REF_TYPES; ++j) {
2176 for (k = 0; k < COEF_BANDS; ++k) {
2177 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2178 // calc probs and branch cts for this frame only
2179 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002180 aom_prob newp = new_coef_probs[i][j][k][l][t];
2181 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002182 int s;
2183 int u = 0;
2184 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002185 s = av1_prob_diff_update_savings_search_model(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002186 frame_branch_ct[i][j][k][l][0],
2187 old_coef_probs[i][j][k][l], &newp, upd, stepsize);
2188 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002189 s = av1_prob_diff_update_savings_search(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002190 frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd);
2191 if (s > 0 && newp != *oldp) u = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002192 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002193 if (u) {
2194 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002195 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002196 *oldp = newp;
2197 }
2198 }
2199 }
2200 }
2201 }
2202 }
2203 return;
2204 }
2205
2206 case ONE_LOOP_REDUCED: {
2207 int updates = 0;
2208 int noupdates_before_first = 0;
2209 for (i = 0; i < PLANE_TYPES; ++i) {
2210 for (j = 0; j < REF_TYPES; ++j) {
2211 for (k = 0; k < COEF_BANDS; ++k) {
2212 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2213 // calc probs and branch cts for this frame only
2214 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002215 aom_prob newp = new_coef_probs[i][j][k][l][t];
2216 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002217 int s;
2218 int u = 0;
2219
2220 if (t == PIVOT_NODE) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002221 s = av1_prob_diff_update_savings_search_model(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002222 frame_branch_ct[i][j][k][l][0],
2223 old_coef_probs[i][j][k][l], &newp, upd, stepsize);
2224 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002225 s = av1_prob_diff_update_savings_search(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002226 frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd);
2227 }
2228
2229 if (s > 0 && newp != *oldp) u = 1;
2230 updates += u;
2231 if (u == 0 && updates == 0) {
2232 noupdates_before_first++;
2233 continue;
2234 }
2235 if (u == 1 && updates == 1) {
2236 int v;
2237 // first update
Yaowu Xuf883b422016-08-30 14:01:10 -07002238 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002239 for (v = 0; v < noupdates_before_first; ++v)
Yaowu Xuf883b422016-08-30 14:01:10 -07002240 aom_write(bc, 0, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002241 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002242 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002243 if (u) {
2244 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002245 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002246 *oldp = newp;
2247 }
2248 }
2249 }
2250 }
2251 }
2252 }
2253 if (updates == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002254 aom_write_bit(bc, 0); // no updates
Yaowu Xuc27fc142016-08-22 16:08:15 -07002255 }
2256 return;
2257 }
2258 default: assert(0);
2259 }
2260}
2261
2262#if CONFIG_ENTROPY
2263// Calculate the token counts between subsequent subframe updates.
clang-format67948d32016-09-07 22:40:40 -07002264static void get_coef_counts_diff(AV1_COMP *cpi, int index,
2265 av1_coeff_count coef_counts[TX_SIZES]
2266 [PLANE_TYPES],
2267 unsigned int eob_counts[TX_SIZES][PLANE_TYPES]
2268 [REF_TYPES][COEF_BANDS]
2269 [COEFF_CONTEXTS]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002270 int i, j, k, l, m, tx_size, val;
2271 const int max_idx = cpi->common.coef_probs_update_idx;
2272 const TX_MODE tx_mode = cpi->common.tx_mode;
Urvang Joshicb586f32016-09-20 11:36:33 -07002273 const int max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002274 const SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats;
2275
2276 assert(max_idx < COEF_PROBS_BUFS);
2277
2278 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
2279 for (i = 0; i < PLANE_TYPES; ++i)
2280 for (j = 0; j < REF_TYPES; ++j)
2281 for (k = 0; k < COEF_BANDS; ++k)
2282 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2283 if (index == max_idx) {
2284 val =
2285 cpi->common.counts.eob_branch[tx_size][i][j][k][l] -
2286 subframe_stats->eob_counts_buf[max_idx][tx_size][i][j][k][l];
2287 } else {
clang-format67948d32016-09-07 22:40:40 -07002288 val = subframe_stats->eob_counts_buf[index + 1][tx_size][i][j][k]
2289 [l] -
Yaowu Xuc27fc142016-08-22 16:08:15 -07002290 subframe_stats->eob_counts_buf[index][tx_size][i][j][k][l];
2291 }
2292 assert(val >= 0);
2293 eob_counts[tx_size][i][j][k][l] = val;
2294
2295 for (m = 0; m < ENTROPY_TOKENS; ++m) {
2296 if (index == max_idx) {
2297 val = cpi->td.rd_counts.coef_counts[tx_size][i][j][k][l][m] -
clang-format67948d32016-09-07 22:40:40 -07002298 subframe_stats->coef_counts_buf[max_idx][tx_size][i][j][k]
2299 [l][m];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002300 } else {
clang-format67948d32016-09-07 22:40:40 -07002301 val = subframe_stats->coef_counts_buf[index + 1][tx_size][i][j]
2302 [k][l][m] -
2303 subframe_stats->coef_counts_buf[index][tx_size][i][j][k]
2304 [l][m];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002305 }
2306 assert(val >= 0);
2307 coef_counts[tx_size][i][j][k][l][m] = val;
2308 }
2309 }
2310}
2311
2312static void update_coef_probs_subframe(
Yaowu Xuf883b422016-08-30 14:01:10 -07002313 aom_writer *const bc, AV1_COMP *cpi, TX_SIZE tx_size,
2314 av1_coeff_stats branch_ct[COEF_PROBS_BUFS][TX_SIZES][PLANE_TYPES],
2315 av1_coeff_probs_model *new_coef_probs) {
2316 av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
2317 const aom_prob upd = DIFF_UPDATE_PROB;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002318 const int entropy_nodes_update = UNCONSTRAINED_NODES;
2319 int i, j, k, l, t;
2320 int stepsize = cpi->sf.coeff_prob_appx_step;
2321 const int max_idx = cpi->common.coef_probs_update_idx;
2322 int idx;
2323 unsigned int this_branch_ct[ENTROPY_NODES][COEF_PROBS_BUFS][2];
2324
2325 switch (cpi->sf.use_fast_coef_updates) {
2326 case TWO_LOOP: {
2327 /* dry run to see if there is any update at all needed */
2328 int savings = 0;
2329 int update[2] = { 0, 0 };
2330 for (i = 0; i < PLANE_TYPES; ++i) {
2331 for (j = 0; j < REF_TYPES; ++j) {
2332 for (k = 0; k < COEF_BANDS; ++k) {
2333 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2334 for (t = 0; t < ENTROPY_NODES; ++t) {
2335 for (idx = 0; idx <= max_idx; ++idx) {
2336 memcpy(this_branch_ct[t][idx],
2337 branch_ct[idx][tx_size][i][j][k][l][t],
2338 2 * sizeof(this_branch_ct[t][idx][0]));
2339 }
2340 }
2341 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002342 aom_prob newp = new_coef_probs[i][j][k][l][t];
2343 const aom_prob oldp = old_coef_probs[i][j][k][l][t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002344 int s, u = 0;
2345
2346 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002347 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002348 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2349 stepsize, max_idx);
2350 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002351 s = av1_prob_update_search_subframe(this_branch_ct[t], oldp,
2352 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002353 if (s > 0 && newp != oldp) u = 1;
2354 if (u)
Yaowu Xuf883b422016-08-30 14:01:10 -07002355 savings += s - (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002356 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002357 savings -= (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002358 update[u]++;
2359 }
2360 }
2361 }
2362 }
2363 }
2364
2365 /* Is coef updated at all */
2366 if (update[1] == 0 || savings < 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002367 aom_write_bit(bc, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002368 return;
2369 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002370 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002371 for (i = 0; i < PLANE_TYPES; ++i) {
2372 for (j = 0; j < REF_TYPES; ++j) {
2373 for (k = 0; k < COEF_BANDS; ++k) {
2374 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2375 for (t = 0; t < ENTROPY_NODES; ++t) {
2376 for (idx = 0; idx <= max_idx; ++idx) {
2377 memcpy(this_branch_ct[t][idx],
2378 branch_ct[idx][tx_size][i][j][k][l][t],
2379 2 * sizeof(this_branch_ct[t][idx][0]));
2380 }
2381 }
2382 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002383 aom_prob newp = new_coef_probs[i][j][k][l][t];
2384 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002385 int s;
2386 int u = 0;
2387
2388 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002389 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002390 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2391 stepsize, max_idx);
2392 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002393 s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp,
2394 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002395 if (s > 0 && newp != *oldp) u = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07002396 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002397 if (u) {
2398 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002399 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002400 *oldp = newp;
2401 }
2402 }
2403 }
2404 }
2405 }
2406 }
2407 return;
2408 }
2409
2410 case ONE_LOOP_REDUCED: {
2411 int updates = 0;
2412 int noupdates_before_first = 0;
2413 for (i = 0; i < PLANE_TYPES; ++i) {
2414 for (j = 0; j < REF_TYPES; ++j) {
2415 for (k = 0; k < COEF_BANDS; ++k) {
2416 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
2417 for (t = 0; t < ENTROPY_NODES; ++t) {
2418 for (idx = 0; idx <= max_idx; ++idx) {
2419 memcpy(this_branch_ct[t][idx],
2420 branch_ct[idx][tx_size][i][j][k][l][t],
2421 2 * sizeof(this_branch_ct[t][idx][0]));
2422 }
2423 }
2424 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002425 aom_prob newp = new_coef_probs[i][j][k][l][t];
2426 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002427 int s;
2428 int u = 0;
2429
2430 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002431 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07002432 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
2433 stepsize, max_idx);
2434 else
Yaowu Xuf883b422016-08-30 14:01:10 -07002435 s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp,
2436 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002437 if (s > 0 && newp != *oldp) u = 1;
2438 updates += u;
2439 if (u == 0 && updates == 0) {
2440 noupdates_before_first++;
2441 continue;
2442 }
2443 if (u == 1 && updates == 1) {
2444 int v;
2445 // first update
Yaowu Xuf883b422016-08-30 14:01:10 -07002446 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002447 for (v = 0; v < noupdates_before_first; ++v)
Yaowu Xuf883b422016-08-30 14:01:10 -07002448 aom_write(bc, 0, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002449 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002450 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002451 if (u) {
2452 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07002453 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002454 *oldp = newp;
2455 }
2456 }
2457 }
2458 }
2459 }
2460 }
2461 if (updates == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002462 aom_write_bit(bc, 0); // no updates
Yaowu Xuc27fc142016-08-22 16:08:15 -07002463 }
2464 return;
2465 }
2466 default: assert(0);
2467 }
2468}
2469#endif // CONFIG_ENTROPY
2470
Yaowu Xuf883b422016-08-30 14:01:10 -07002471static void update_coef_probs(AV1_COMP *cpi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002472 const TX_MODE tx_mode = cpi->common.tx_mode;
2473 const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
2474 TX_SIZE tx_size;
Alex Converseec6fb642016-10-19 11:31:48 -07002475#if CONFIG_RANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002476 int update = 0;
Alex Converseec6fb642016-10-19 11:31:48 -07002477#endif // CONFIG_RANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002478#if CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07002479 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002480 SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats;
2481 unsigned int eob_counts_copy[TX_SIZES][PLANE_TYPES][REF_TYPES][COEF_BANDS]
2482 [COEFF_CONTEXTS];
2483 int i;
Yaowu Xuf883b422016-08-30 14:01:10 -07002484 av1_coeff_probs_model dummy_frame_coef_probs[PLANE_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002485
2486 if (cm->do_subframe_update &&
2487 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002488 av1_copy(cpi->common.fc->coef_probs,
2489 subframe_stats->enc_starting_coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002490 for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) {
2491 get_coef_counts_diff(cpi, i, cpi->wholeframe_stats.coef_counts_buf[i],
2492 cpi->wholeframe_stats.eob_counts_buf[i]);
2493 }
2494 }
2495#endif // CONFIG_ENTROPY
2496
2497 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002498 av1_coeff_stats frame_branch_ct[PLANE_TYPES];
2499 av1_coeff_probs_model frame_coef_probs[PLANE_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002500 if (cpi->td.counts->tx_size_totals[tx_size] <= 20 ||
2501 (tx_size >= TX_16X16 && cpi->sf.tx_size_search_method == USE_TX_8X8)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002502 aom_write_bit(w, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002503 } else {
2504#if CONFIG_ENTROPY
2505 if (cm->do_subframe_update &&
2506 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002507 av1_coeff_count coef_counts_copy[PLANE_TYPES];
2508 av1_copy(eob_counts_copy, cpi->common.counts.eob_branch[tx_size]);
2509 av1_copy(coef_counts_copy, cpi->td.rd_counts.coef_counts[tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002510 build_tree_distribution(cpi, tx_size, frame_branch_ct,
2511 frame_coef_probs);
2512 for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002513 av1_copy(cpi->common.counts.eob_branch[tx_size],
2514 cpi->wholeframe_stats.eob_counts_buf[i][tx_size]);
2515 av1_copy(cpi->td.rd_counts.coef_counts[tx_size],
2516 cpi->wholeframe_stats.coef_counts_buf[i][tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002517 build_tree_distribution(cpi, tx_size, cpi->branch_ct_buf[i][tx_size],
2518 dummy_frame_coef_probs);
2519 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002520 av1_copy(cpi->common.counts.eob_branch[tx_size], eob_counts_copy);
2521 av1_copy(cpi->td.rd_counts.coef_counts[tx_size], coef_counts_copy);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002522
2523 update_coef_probs_subframe(w, cpi, tx_size, cpi->branch_ct_buf,
2524 frame_coef_probs);
Alex Converseec6fb642016-10-19 11:31:48 -07002525#if CONFIG_RANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002526 update = 1;
Alex Converseec6fb642016-10-19 11:31:48 -07002527#endif // CONFIG_RANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002528 } else {
2529#endif // CONFIG_ENTROPY
2530 build_tree_distribution(cpi, tx_size, frame_branch_ct,
2531 frame_coef_probs);
2532 update_coef_probs_common(w, cpi, tx_size, frame_branch_ct,
2533 frame_coef_probs);
Alex Converseec6fb642016-10-19 11:31:48 -07002534#if CONFIG_RANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002535 update = 1;
Alex Converseec6fb642016-10-19 11:31:48 -07002536#endif // CONFIG_RANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002537#if CONFIG_ENTROPY
2538 }
2539#endif // CONFIG_ENTROPY
2540 }
2541 }
2542
2543#if CONFIG_ENTROPY
Yaowu Xuf883b422016-08-30 14:01:10 -07002544 av1_copy(cm->starting_coef_probs, cm->fc->coef_probs);
2545 av1_copy(subframe_stats->coef_probs_buf[0], cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002546 if (cm->do_subframe_update &&
2547 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002548 av1_copy(eob_counts_copy, cm->counts.eob_branch);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002549 for (i = 1; i <= cpi->common.coef_probs_update_idx; ++i) {
2550 for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
Yaowu Xuf883b422016-08-30 14:01:10 -07002551 av1_full_to_model_counts(cm->counts.coef[tx_size],
2552 subframe_stats->coef_counts_buf[i][tx_size]);
2553 av1_copy(cm->counts.eob_branch, subframe_stats->eob_counts_buf[i]);
2554 av1_partial_adapt_probs(cm, 0, 0);
2555 av1_copy(subframe_stats->coef_probs_buf[i], cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002556 }
Yaowu Xuf883b422016-08-30 14:01:10 -07002557 av1_copy(cm->fc->coef_probs, subframe_stats->coef_probs_buf[0]);
2558 av1_copy(cm->counts.eob_branch, eob_counts_copy);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002559 }
2560#endif // CONFIG_ENTROPY
Alex Converseec6fb642016-10-19 11:31:48 -07002561#if CONFIG_RANS
Yaowu Xuf883b422016-08-30 14:01:10 -07002562 if (update) av1_coef_pareto_cdfs(cpi->common.fc);
Alex Converseec6fb642016-10-19 11:31:48 -07002563#endif // CONFIG_RANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07002564}
2565
2566#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002567static void encode_restoration_mode(AV1_COMMON *cm,
2568 struct aom_write_bit_buffer *wb) {
2569 RestorationInfo *rst = &cm->rst_info;
2570 switch (rst->frame_restoration_type) {
2571 case RESTORE_NONE:
2572 aom_wb_write_bit(wb, 0);
2573 aom_wb_write_bit(wb, 0);
2574 break;
2575 case RESTORE_SWITCHABLE:
2576 aom_wb_write_bit(wb, 0);
2577 aom_wb_write_bit(wb, 1);
2578 break;
2579 case RESTORE_BILATERAL:
2580 aom_wb_write_bit(wb, 1);
2581 aom_wb_write_bit(wb, 0);
2582 break;
2583 case RESTORE_WIENER:
2584 aom_wb_write_bit(wb, 1);
2585 aom_wb_write_bit(wb, 1);
2586 break;
2587 default: assert(0);
2588 }
2589}
2590
2591static void encode_restoration(AV1_COMMON *cm, aom_writer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002592 int i;
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002593 RestorationInfo *rsi = &cm->rst_info;
2594 if (rsi->frame_restoration_type != RESTORE_NONE) {
2595 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002596 // RESTORE_SWITCHABLE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002597 for (i = 0; i < cm->rst_internal.ntiles; ++i) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002598 av1_write_token(
clang-formatbda8d612016-09-19 15:55:46 -07002599 wb, av1_switchable_restore_tree, cm->fc->switchable_restore_prob,
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002600 &switchable_restore_encodings[rsi->restoration_type[i]]);
2601 if (rsi->restoration_type[i] == RESTORE_BILATERAL) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002602 int s;
2603 for (s = 0; s < BILATERAL_SUBTILES; ++s) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002604#if BILATERAL_SUBTILES == 0
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002605 aom_write_literal(wb, rsi->bilateral_info[i].level[s],
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002606 av1_bilateral_level_bits(cm));
2607#else
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002608 aom_write(wb, rsi->bilateral_info[i].level[s] >= 0,
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002609 RESTORE_NONE_BILATERAL_PROB);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002610 if (rsi->bilateral_info[i].level[s] >= 0) {
2611 aom_write_literal(wb, rsi->bilateral_info[i].level[s],
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002612 av1_bilateral_level_bits(cm));
2613 }
2614#endif
2615 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002616 } else if (rsi->restoration_type[i] == RESTORE_WIENER) {
2617 aom_write_literal(
2618 wb, rsi->wiener_info[i].vfilter[0] - WIENER_FILT_TAP0_MINV,
2619 WIENER_FILT_TAP0_BITS);
2620 aom_write_literal(
2621 wb, rsi->wiener_info[i].vfilter[1] - WIENER_FILT_TAP1_MINV,
2622 WIENER_FILT_TAP1_BITS);
2623 aom_write_literal(
2624 wb, rsi->wiener_info[i].vfilter[2] - WIENER_FILT_TAP2_MINV,
2625 WIENER_FILT_TAP2_BITS);
2626 aom_write_literal(
2627 wb, rsi->wiener_info[i].hfilter[0] - WIENER_FILT_TAP0_MINV,
2628 WIENER_FILT_TAP0_BITS);
2629 aom_write_literal(
2630 wb, rsi->wiener_info[i].hfilter[1] - WIENER_FILT_TAP1_MINV,
2631 WIENER_FILT_TAP1_BITS);
2632 aom_write_literal(
2633 wb, rsi->wiener_info[i].hfilter[2] - WIENER_FILT_TAP2_MINV,
2634 WIENER_FILT_TAP2_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002635 }
2636 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002637 } else if (rsi->frame_restoration_type == RESTORE_BILATERAL) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002638 for (i = 0; i < cm->rst_internal.ntiles; ++i) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002639 int s;
2640 for (s = 0; s < BILATERAL_SUBTILES; ++s) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002641 aom_write(wb, rsi->bilateral_info[i].level[s] >= 0,
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002642 RESTORE_NONE_BILATERAL_PROB);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002643 if (rsi->bilateral_info[i].level[s] >= 0) {
2644 aom_write_literal(wb, rsi->bilateral_info[i].level[s],
clang-formatbda8d612016-09-19 15:55:46 -07002645 av1_bilateral_level_bits(cm));
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002646 }
2647 }
2648 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002649 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07002650 for (i = 0; i < cm->rst_internal.ntiles; ++i) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07002651 aom_write(wb, rsi->wiener_info[i].level != 0, RESTORE_NONE_WIENER_PROB);
2652 if (rsi->wiener_info[i].level) {
2653 aom_write_literal(
2654 wb, rsi->wiener_info[i].vfilter[0] - WIENER_FILT_TAP0_MINV,
2655 WIENER_FILT_TAP0_BITS);
2656 aom_write_literal(
2657 wb, rsi->wiener_info[i].vfilter[1] - WIENER_FILT_TAP1_MINV,
2658 WIENER_FILT_TAP1_BITS);
2659 aom_write_literal(
2660 wb, rsi->wiener_info[i].vfilter[2] - WIENER_FILT_TAP2_MINV,
2661 WIENER_FILT_TAP2_BITS);
2662 aom_write_literal(
2663 wb, rsi->wiener_info[i].hfilter[0] - WIENER_FILT_TAP0_MINV,
2664 WIENER_FILT_TAP0_BITS);
2665 aom_write_literal(
2666 wb, rsi->wiener_info[i].hfilter[1] - WIENER_FILT_TAP1_MINV,
2667 WIENER_FILT_TAP1_BITS);
2668 aom_write_literal(
2669 wb, rsi->wiener_info[i].hfilter[2] - WIENER_FILT_TAP2_MINV,
2670 WIENER_FILT_TAP2_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002671 }
2672 }
2673 }
2674 }
2675}
2676#endif // CONFIG_LOOP_RESTORATION
2677
Yaowu Xuf883b422016-08-30 14:01:10 -07002678static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002679 int i;
2680 struct loopfilter *lf = &cm->lf;
2681
2682 // Encode the loop filter level and type
Yaowu Xuf883b422016-08-30 14:01:10 -07002683 aom_wb_write_literal(wb, lf->filter_level, 6);
2684 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002685
2686 // Write out loop filter deltas applied at the MB level based on mode or
2687 // ref frame (if they are enabled).
Yaowu Xuf883b422016-08-30 14:01:10 -07002688 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002689
2690 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002691 aom_wb_write_bit(wb, lf->mode_ref_delta_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002692 if (lf->mode_ref_delta_update) {
2693 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
2694 const int delta = lf->ref_deltas[i];
2695 const int changed = delta != lf->last_ref_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002696 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002697 if (changed) {
2698 lf->last_ref_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002699 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002700 }
2701 }
2702
2703 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
2704 const int delta = lf->mode_deltas[i];
2705 const int changed = delta != lf->last_mode_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07002706 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002707 if (changed) {
2708 lf->last_mode_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07002709 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002710 }
2711 }
2712 }
2713 }
2714}
2715
2716#if CONFIG_CLPF
Yaowu Xuf883b422016-08-30 14:01:10 -07002717static void encode_clpf(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Steinar Midtskogenecf9a0c2016-09-13 16:37:13 +02002718 aom_wb_write_literal(wb, cm->clpf_strength_y, 2);
2719 aom_wb_write_literal(wb, cm->clpf_strength_u, 2);
2720 aom_wb_write_literal(wb, cm->clpf_strength_v, 2);
2721 if (cm->clpf_strength_y) {
Steinar Midtskogend06588a2016-05-06 13:48:20 +02002722 aom_wb_write_literal(wb, cm->clpf_size, 2);
Steinar Midtskogend06588a2016-05-06 13:48:20 +02002723 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002724}
2725#endif
2726
2727#if CONFIG_DERING
Yaowu Xuf883b422016-08-30 14:01:10 -07002728static void encode_dering(int level, struct aom_write_bit_buffer *wb) {
2729 aom_wb_write_literal(wb, level, DERING_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002730}
2731#endif // CONFIG_DERING
2732
Yaowu Xuf883b422016-08-30 14:01:10 -07002733static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002734 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002735 aom_wb_write_bit(wb, 1);
2736 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002737 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002738 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002739 }
2740}
2741
Yaowu Xuf883b422016-08-30 14:01:10 -07002742static void encode_quantization(const AV1_COMMON *const cm,
2743 struct aom_write_bit_buffer *wb) {
2744 aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002745 write_delta_q(wb, cm->y_dc_delta_q);
2746 write_delta_q(wb, cm->uv_dc_delta_q);
2747 write_delta_q(wb, cm->uv_ac_delta_q);
2748#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07002749 aom_wb_write_bit(wb, cm->using_qmatrix);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002750 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002751 aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS);
2752 aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002753 }
2754#endif
2755}
2756
Yaowu Xuf883b422016-08-30 14:01:10 -07002757static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
2758 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002759 int i, j;
2760 const struct segmentation *seg = &cm->seg;
2761
Yaowu Xuf883b422016-08-30 14:01:10 -07002762 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002763 if (!seg->enabled) return;
2764
2765 // Segmentation map
2766 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002767 aom_wb_write_bit(wb, seg->update_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002768 } else {
2769 assert(seg->update_map == 1);
2770 }
2771 if (seg->update_map) {
2772 // Select the coding strategy (temporal or spatial)
Yaowu Xuf883b422016-08-30 14:01:10 -07002773 av1_choose_segmap_coding_method(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002774
2775 // Write out the chosen coding method.
2776 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002777 aom_wb_write_bit(wb, seg->temporal_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002778 } else {
2779 assert(seg->temporal_update == 0);
2780 }
2781 }
2782
2783 // Segmentation data
Yaowu Xuf883b422016-08-30 14:01:10 -07002784 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002785 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002786 aom_wb_write_bit(wb, seg->abs_delta);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002787
2788 for (i = 0; i < MAX_SEGMENTS; i++) {
2789 for (j = 0; j < SEG_LVL_MAX; j++) {
2790 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002791 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002792 if (active) {
2793 const int data = get_segdata(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07002794 const int data_max = av1_seg_feature_data_max(j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002795
Yaowu Xuf883b422016-08-30 14:01:10 -07002796 if (av1_is_segfeature_signed(j)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002797 encode_unsigned_max(wb, abs(data), data_max);
Yaowu Xuf883b422016-08-30 14:01:10 -07002798 aom_wb_write_bit(wb, data < 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002799 } else {
2800 encode_unsigned_max(wb, data, data_max);
2801 }
2802 }
2803 }
2804 }
2805 }
2806}
2807
Yaowu Xuf883b422016-08-30 14:01:10 -07002808static void update_seg_probs(AV1_COMP *cpi, aom_writer *w) {
2809 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002810
2811 if (!cm->seg.enabled || !cm->seg.update_map) return;
2812
2813 if (cm->seg.temporal_update) {
2814 int i;
2815
2816 for (i = 0; i < PREDICTION_PROBS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07002817 av1_cond_prob_diff_update(w, &cm->fc->seg.pred_probs[i],
2818 cm->counts.seg.pred[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002819
Yaowu Xuf883b422016-08-30 14:01:10 -07002820 prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002821 cm->counts.seg.tree_mispred, MAX_SEGMENTS, w);
2822 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07002823 prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002824 cm->counts.seg.tree_total, MAX_SEGMENTS, w);
2825 }
Nathan E. Eggef627e582016-08-19 20:06:51 -04002826#if CONFIG_DAALA_EC
2827 av1_tree_to_cdf(av1_segment_tree, cm->fc->seg.tree_probs,
2828 cm->fc->seg.tree_cdf);
2829#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002830}
2831
Yaowu Xuf883b422016-08-30 14:01:10 -07002832static void write_txfm_mode(TX_MODE mode, struct aom_write_bit_buffer *wb) {
2833 aom_wb_write_bit(wb, mode == TX_MODE_SELECT);
2834 if (mode != TX_MODE_SELECT) aom_wb_write_literal(wb, mode, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002835}
2836
Yaowu Xuf883b422016-08-30 14:01:10 -07002837static void update_txfm_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002838 FRAME_COUNTS *counts) {
2839 if (cm->tx_mode == TX_MODE_SELECT) {
2840 int i, j;
2841 for (i = 0; i < TX_SIZES - 1; ++i)
2842 for (j = 0; j < TX_SIZE_CONTEXTS; ++j)
Yaowu Xuf883b422016-08-30 14:01:10 -07002843 prob_diff_update(av1_tx_size_tree[i], cm->fc->tx_size_probs[i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -07002844 counts->tx_size[i][j], i + 2, w);
2845 }
2846}
2847
James Zern7b9407a2016-05-18 23:48:05 -07002848static void write_interp_filter(InterpFilter filter,
Yaowu Xuf883b422016-08-30 14:01:10 -07002849 struct aom_write_bit_buffer *wb) {
2850 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002851 if (filter != SWITCHABLE)
Yaowu Xuf883b422016-08-30 14:01:10 -07002852 aom_wb_write_literal(wb, filter, 2 + CONFIG_EXT_INTERP);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002853}
2854
Yaowu Xuf883b422016-08-30 14:01:10 -07002855static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002856 if (cm->interp_filter == SWITCHABLE) {
2857 // Check to see if only one of the filters is actually used
2858 int count[SWITCHABLE_FILTERS];
2859 int i, j, c = 0;
2860 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2861 count[i] = 0;
2862 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
2863 count[i] += counts->switchable_interp[j][i];
2864 c += (count[i] > 0);
2865 }
2866 if (c == 1) {
2867 // Only one filter is used. So set the filter at frame level
2868 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
2869 if (count[i]) {
2870 cm->interp_filter = i;
2871 break;
2872 }
2873 }
2874 }
2875 }
2876}
2877
Yaowu Xuf883b422016-08-30 14:01:10 -07002878static void write_tile_info(const AV1_COMMON *const cm,
2879 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002880#if CONFIG_EXT_TILE
2881 const int tile_width =
2882 ALIGN_POWER_OF_TWO(cm->tile_width, cm->mib_size_log2) >>
2883 cm->mib_size_log2;
2884 const int tile_height =
2885 ALIGN_POWER_OF_TWO(cm->tile_height, cm->mib_size_log2) >>
2886 cm->mib_size_log2;
2887
2888 assert(tile_width > 0);
2889 assert(tile_height > 0);
2890
2891// Write the tile sizes
2892#if CONFIG_EXT_PARTITION
2893 if (cm->sb_size == BLOCK_128X128) {
2894 assert(tile_width <= 32);
2895 assert(tile_height <= 32);
Yaowu Xuf883b422016-08-30 14:01:10 -07002896 aom_wb_write_literal(wb, tile_width - 1, 5);
2897 aom_wb_write_literal(wb, tile_height - 1, 5);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002898 } else
2899#endif // CONFIG_EXT_PARTITION
2900 {
2901 assert(tile_width <= 64);
2902 assert(tile_height <= 64);
Yaowu Xuf883b422016-08-30 14:01:10 -07002903 aom_wb_write_literal(wb, tile_width - 1, 6);
2904 aom_wb_write_literal(wb, tile_height - 1, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002905 }
2906#else
2907 int min_log2_tile_cols, max_log2_tile_cols, ones;
Yaowu Xuf883b422016-08-30 14:01:10 -07002908 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002909
2910 // columns
2911 ones = cm->log2_tile_cols - min_log2_tile_cols;
Yaowu Xuf883b422016-08-30 14:01:10 -07002912 while (ones--) aom_wb_write_bit(wb, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002913
Yaowu Xuf883b422016-08-30 14:01:10 -07002914 if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002915
2916 // rows
Yaowu Xuf883b422016-08-30 14:01:10 -07002917 aom_wb_write_bit(wb, cm->log2_tile_rows != 0);
2918 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002919#endif // CONFIG_EXT_TILE
2920}
2921
Yaowu Xuf883b422016-08-30 14:01:10 -07002922static int get_refresh_mask(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002923 int refresh_mask = 0;
2924
2925#if CONFIG_EXT_REFS
2926 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
2927 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
2928 // the 3 LAST reference frames will be updated accordingly, i.e.:
2929 // (1) The original virtual index for LAST3_FRAME will become the new virtual
2930 // index for LAST_FRAME; and
2931 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
2932 // shifted and become the new virtual indexes for LAST2_FRAME and
2933 // LAST3_FRAME.
2934 refresh_mask |=
2935 (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]);
2936 if (cpi->rc.is_bwd_ref_frame && cpi->num_extra_arfs) {
2937 // We have swapped the virtual indices
2938 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->arf_map[0]);
2939 } else {
2940 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
2941 }
2942#else
2943 refresh_mask |= (cpi->refresh_last_frame << cpi->lst_fb_idx);
2944#endif // CONFIG_EXT_REFS
2945
Yaowu Xuf883b422016-08-30 14:01:10 -07002946 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002947 // We have decided to preserve the previously existing golden frame as our
2948 // new ARF frame. However, in the short term we leave it in the GF slot and,
2949 // if we're updating the GF with the current decoded frame, we save it
2950 // instead to the ARF slot.
Yaowu Xuf883b422016-08-30 14:01:10 -07002951 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
Yaowu Xuc27fc142016-08-22 16:08:15 -07002952 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
2953 // there so that it can be done outside of the recode loop.
2954 // Note: This is highly specific to the use of ARF as a forward reference,
2955 // and this needs to be generalized as other uses are implemented
2956 // (like RTC/temporal scalability).
2957 return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx);
2958 } else {
2959 int arf_idx = cpi->alt_fb_idx;
2960#if CONFIG_EXT_REFS
2961 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2962 arf_idx = cpi->arf_map[gf_group->arf_update_idx[gf_group->index]];
2963#else
2964 if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
2965 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
2966 arf_idx = gf_group->arf_update_idx[gf_group->index];
2967 }
2968#endif // CONFIG_EXT_REFS
2969 return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) |
2970 (cpi->refresh_alt_ref_frame << arf_idx);
2971 }
2972}
2973
2974#if CONFIG_EXT_TILE
2975static INLINE int find_identical_tile(
2976 const int tile_row, const int tile_col,
2977 TileBufferEnc (*const tile_buffers)[1024]) {
2978 const MV32 candidate_offset[1] = { { 1, 0 } };
2979 const uint8_t *const cur_tile_data =
2980 tile_buffers[tile_row][tile_col].data + 4;
2981 const unsigned int cur_tile_size = tile_buffers[tile_row][tile_col].size;
2982
2983 int i;
2984
2985 if (tile_row == 0) return 0;
2986
2987 // (TODO: yunqingwang) For now, only above tile is checked and used.
2988 // More candidates such as left tile can be added later.
2989 for (i = 0; i < 1; i++) {
2990 int row_offset = candidate_offset[0].row;
2991 int col_offset = candidate_offset[0].col;
2992 int row = tile_row - row_offset;
2993 int col = tile_col - col_offset;
2994 uint8_t tile_hdr;
2995 const uint8_t *tile_data;
2996 TileBufferEnc *candidate;
2997
2998 if (row < 0 || col < 0) continue;
2999
3000 tile_hdr = *(tile_buffers[row][col].data);
3001
3002 // Read out tcm bit
3003 if ((tile_hdr >> 7) == 1) {
3004 // The candidate is a copy tile itself
3005 row_offset += tile_hdr & 0x7f;
3006 row = tile_row - row_offset;
3007 }
3008
3009 candidate = &tile_buffers[row][col];
3010
3011 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
3012
3013 tile_data = candidate->data + 4;
3014
3015 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
3016
3017 // Identical tile found
3018 assert(row_offset > 0);
3019 return row_offset;
3020 }
3021
3022 // No identical tile found
3023 return 0;
3024}
3025#endif // CONFIG_EXT_TILE
3026
Yaowu Xuf883b422016-08-30 14:01:10 -07003027static uint32_t write_tiles(AV1_COMP *const cpi, uint8_t *const dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003028 unsigned int *max_tile_size,
3029 unsigned int *max_tile_col_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003030 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003031#if CONFIG_ANS
3032 struct AnsCoder token_ans;
3033#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003034 aom_writer mode_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003035#endif // CONFIG_ANS
3036 int tile_row, tile_col;
3037 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
clang-format67948d32016-09-07 22:40:40 -07003038 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003039 size_t total_size = 0;
3040 const int tile_cols = cm->tile_cols;
3041 const int tile_rows = cm->tile_rows;
3042#if CONFIG_EXT_TILE
3043 const int have_tiles = tile_cols * tile_rows > 1;
3044#endif // CONFIG_EXT_TILE
3045#if CONFIG_ANS
Alex Converse080a2cc2016-09-20 16:39:01 -07003046 struct BufAnsCoder *buf_ans = &cpi->buf_ans;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003047#endif // CONFIG_ANS
3048
3049 *max_tile_size = 0;
3050 *max_tile_col_size = 0;
3051
3052// All tile size fields are output on 4 bytes. A call to remux_tiles will
3053// later compact the data if smaller headers are adequate.
3054
3055#if CONFIG_EXT_TILE
3056 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3057 TileInfo tile_info;
3058 const int is_last_col = (tile_col == tile_cols - 1);
3059 const size_t col_offset = total_size;
3060
Yaowu Xuf883b422016-08-30 14:01:10 -07003061 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003062
3063 // The last column does not have a column header
3064 if (!is_last_col) total_size += 4;
3065
3066 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3067 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
3068 unsigned int tile_size;
3069 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3070 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3071 const int data_offset = have_tiles ? 4 : 0;
3072
Yaowu Xuf883b422016-08-30 14:01:10 -07003073 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003074
3075 buf->data = dst + total_size;
3076
3077 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
3078 // even for the last one, unless no tiling is used at all.
3079 total_size += data_offset;
3080#if !CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -07003081 aom_start_encode(&mode_bc, buf->data + data_offset);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003082 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
3083 assert(tok == tok_end);
Yaowu Xuf883b422016-08-30 14:01:10 -07003084 aom_stop_encode(&mode_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003085 tile_size = mode_bc.pos;
3086#else
3087 buf_ans_write_reset(buf_ans);
3088 write_modes(cpi, &tile_info, buf_ans, &tok, tok_end);
3089 assert(tok == tok_end);
3090 ans_write_init(&token_ans, buf->data + data_offset);
3091 buf_ans_flush(buf_ans, &token_ans);
3092 tile_size = ans_write_end(&token_ans);
3093#endif // !CONFIG_ANS
3094
3095 buf->size = tile_size;
3096
3097 // Record the maximum tile size we see, so we can compact headers later.
Yaowu Xuf883b422016-08-30 14:01:10 -07003098 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003099
3100 if (have_tiles) {
3101 // tile header: size of this tile, or copy offset
3102 uint32_t tile_header = tile_size;
3103
3104 // Check if this tile is a copy tile.
3105 // Very low chances to have copy tiles on the key frames, so don't
3106 // search on key frames to reduce unnecessary search.
3107 if (cm->frame_type != KEY_FRAME) {
3108 const int idendical_tile_offset =
3109 find_identical_tile(tile_row, tile_col, tile_buffers);
3110
3111 if (idendical_tile_offset > 0) {
3112 tile_size = 0;
3113 tile_header = idendical_tile_offset | 0x80;
3114 tile_header <<= 24;
3115 }
3116 }
3117
3118 mem_put_le32(buf->data, tile_header);
3119 }
3120
3121 total_size += tile_size;
3122 }
3123
3124 if (!is_last_col) {
3125 size_t col_size = total_size - col_offset - 4;
3126 mem_put_le32(dst + col_offset, col_size);
3127
3128 // If it is not final packing, record the maximum tile column size we see,
3129 // otherwise, check if the tile size is out of the range.
Yaowu Xuf883b422016-08-30 14:01:10 -07003130 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003131 }
3132 }
3133#else
3134 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
3135 TileInfo tile_info;
3136 const int is_last_row = (tile_row == tile_rows - 1);
3137
Yaowu Xuf883b422016-08-30 14:01:10 -07003138 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003139
3140 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
3141 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
3142 const int is_last_col = (tile_col == tile_cols - 1);
3143 const int is_last_tile = is_last_col && is_last_row;
3144 unsigned int tile_size;
3145 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
3146 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
3147
Yaowu Xuf883b422016-08-30 14:01:10 -07003148 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003149
3150 buf->data = dst + total_size;
3151
3152 // The last tile does not have a header.
3153 if (!is_last_tile) total_size += 4;
3154
3155#if !CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -07003156 aom_start_encode(&mode_bc, dst + total_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003157 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
3158 assert(tok == tok_end);
Yaowu Xuf883b422016-08-30 14:01:10 -07003159 aom_stop_encode(&mode_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003160 tile_size = mode_bc.pos;
3161#else
3162 buf_ans_write_reset(buf_ans);
3163 write_modes(cpi, &tile_info, buf_ans, &tok, tok_end);
3164 assert(tok == tok_end);
3165 ans_write_init(&token_ans, dst + total_size);
3166 buf_ans_flush(buf_ans, &token_ans);
3167 tile_size = ans_write_end(&token_ans);
3168#endif // !CONFIG_ANS
3169
3170 assert(tile_size > 0);
3171
3172 buf->size = tile_size;
3173
3174 if (!is_last_tile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003175 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003176 // size of this tile
3177 mem_put_le32(buf->data, tile_size);
3178 }
3179
3180 total_size += tile_size;
3181 }
3182 }
3183#endif // CONFIG_EXT_TILE
3184 return (uint32_t)total_size;
3185}
3186
Yaowu Xuf883b422016-08-30 14:01:10 -07003187static void write_render_size(const AV1_COMMON *cm,
3188 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003189 const int scaling_active =
3190 cm->width != cm->render_width || cm->height != cm->render_height;
Yaowu Xuf883b422016-08-30 14:01:10 -07003191 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003192 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003193 aom_wb_write_literal(wb, cm->render_width - 1, 16);
3194 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003195 }
3196}
3197
Yaowu Xuf883b422016-08-30 14:01:10 -07003198static void write_frame_size(const AV1_COMMON *cm,
3199 struct aom_write_bit_buffer *wb) {
3200 aom_wb_write_literal(wb, cm->width - 1, 16);
3201 aom_wb_write_literal(wb, cm->height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003202
3203 write_render_size(cm, wb);
3204}
3205
Yaowu Xuf883b422016-08-30 14:01:10 -07003206static void write_frame_size_with_refs(AV1_COMP *cpi,
3207 struct aom_write_bit_buffer *wb) {
3208 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003209 int found = 0;
3210
3211 MV_REFERENCE_FRAME ref_frame;
3212 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3213 YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
3214
3215 if (cfg != NULL) {
3216 found =
3217 cm->width == cfg->y_crop_width && cm->height == cfg->y_crop_height;
3218 found &= cm->render_width == cfg->render_width &&
3219 cm->render_height == cfg->render_height;
3220 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003221 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003222 if (found) {
3223 break;
3224 }
3225 }
3226
3227 if (!found) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003228 aom_wb_write_literal(wb, cm->width - 1, 16);
3229 aom_wb_write_literal(wb, cm->height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003230 write_render_size(cm, wb);
3231 }
3232}
3233
Yaowu Xuf883b422016-08-30 14:01:10 -07003234static void write_sync_code(struct aom_write_bit_buffer *wb) {
3235 aom_wb_write_literal(wb, AV1_SYNC_CODE_0, 8);
3236 aom_wb_write_literal(wb, AV1_SYNC_CODE_1, 8);
3237 aom_wb_write_literal(wb, AV1_SYNC_CODE_2, 8);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003238}
3239
3240static void write_profile(BITSTREAM_PROFILE profile,
Yaowu Xuf883b422016-08-30 14:01:10 -07003241 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003242 switch (profile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003243 case PROFILE_0: aom_wb_write_literal(wb, 0, 2); break;
3244 case PROFILE_1: aom_wb_write_literal(wb, 2, 2); break;
3245 case PROFILE_2: aom_wb_write_literal(wb, 1, 2); break;
3246 case PROFILE_3: aom_wb_write_literal(wb, 6, 3); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003247 default: assert(0);
3248 }
3249}
3250
3251static void write_bitdepth_colorspace_sampling(
Yaowu Xuf883b422016-08-30 14:01:10 -07003252 AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003253 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003254 assert(cm->bit_depth > AOM_BITS_8);
3255 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003256 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003257 aom_wb_write_literal(wb, cm->color_space, 3);
3258 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003259 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Yaowu Xuf883b422016-08-30 14:01:10 -07003260 aom_wb_write_bit(wb, cm->color_range);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003261 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
3262 assert(cm->subsampling_x != 1 || cm->subsampling_y != 1);
Yaowu Xuf883b422016-08-30 14:01:10 -07003263 aom_wb_write_bit(wb, cm->subsampling_x);
3264 aom_wb_write_bit(wb, cm->subsampling_y);
3265 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003266 } else {
3267 assert(cm->subsampling_x == 1 && cm->subsampling_y == 1);
3268 }
3269 } else {
3270 assert(cm->profile == PROFILE_1 || cm->profile == PROFILE_3);
Yaowu Xuf883b422016-08-30 14:01:10 -07003271 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07003272 }
3273}
3274
Yaowu Xuf883b422016-08-30 14:01:10 -07003275static void write_uncompressed_header(AV1_COMP *cpi,
3276 struct aom_write_bit_buffer *wb) {
3277 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003278 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3279
Yaowu Xuf883b422016-08-30 14:01:10 -07003280 aom_wb_write_literal(wb, AOM_FRAME_MARKER, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003281
3282 write_profile(cm->profile, wb);
3283
3284#if CONFIG_EXT_REFS
3285 // NOTE: By default all coded frames to be used as a reference
3286 cm->is_reference_frame = 1;
3287
3288 if (cm->show_existing_frame) {
3289 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
3290 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
3291
3292 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003293 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003294 "Buffer %d does not contain a reconstructed frame",
3295 frame_to_show);
3296 }
3297 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
3298
Yaowu Xuf883b422016-08-30 14:01:10 -07003299 aom_wb_write_bit(wb, 1); // show_existing_frame
3300 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003301
3302 return;
3303 } else {
3304#endif // CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07003305 aom_wb_write_bit(wb, 0); // show_existing_frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07003306#if CONFIG_EXT_REFS
3307 }
3308#endif // CONFIG_EXT_REFS
3309
Yaowu Xuf883b422016-08-30 14:01:10 -07003310 aom_wb_write_bit(wb, cm->frame_type);
3311 aom_wb_write_bit(wb, cm->show_frame);
3312 aom_wb_write_bit(wb, cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003313
3314 if (cm->frame_type == KEY_FRAME) {
3315 write_sync_code(wb);
3316 write_bitdepth_colorspace_sampling(cm, wb);
3317 write_frame_size(cm, wb);
Urvang Joshib100db72016-10-12 16:28:56 -07003318#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07003319 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Urvang Joshib100db72016-10-12 16:28:56 -07003320#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003321 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003322 if (!cm->show_frame) aom_wb_write_bit(wb, cm->intra_only);
Urvang Joshib100db72016-10-12 16:28:56 -07003323#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07003324 if (cm->intra_only) aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Urvang Joshib100db72016-10-12 16:28:56 -07003325#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003326 if (!cm->error_resilient_mode) {
3327 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003328 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003329 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3330 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003331 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003332 cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
3333 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003334 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003335 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
3336 }
3337 }
3338
3339#if CONFIG_EXT_REFS
3340 cpi->refresh_frame_mask = get_refresh_mask(cpi);
3341#endif // CONFIG_EXT_REFS
3342
3343 if (cm->intra_only) {
3344 write_sync_code(wb);
3345 write_bitdepth_colorspace_sampling(cm, wb);
3346
3347#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07003348 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003349#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003350 aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003351#endif // CONFIG_EXT_REFS
3352 write_frame_size(cm, wb);
3353 } else {
3354 MV_REFERENCE_FRAME ref_frame;
3355
3356#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07003357 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003358#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003359 aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003360#endif // CONFIG_EXT_REFS
3361
3362#if CONFIG_EXT_REFS
3363 if (!cpi->refresh_frame_mask) {
3364 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
3365 // will not be used as a reference
3366 cm->is_reference_frame = 0;
3367 }
3368#endif // CONFIG_EXT_REFS
3369
3370 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
3371 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
Yaowu Xuf883b422016-08-30 14:01:10 -07003372 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003373 REF_FRAMES_LOG2);
Yaowu Xuf883b422016-08-30 14:01:10 -07003374 aom_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003375 }
3376
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003377#if CONFIG_FRAME_SIZE
3378 if (cm->error_resilient_mode == 0) {
3379 write_frame_size_with_refs(cpi, wb);
3380 } else {
3381 write_frame_size(cm, wb);
3382 }
3383#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003384 write_frame_size_with_refs(cpi, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02003385#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003386
Yaowu Xuf883b422016-08-30 14:01:10 -07003387 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003388
3389 fix_interp_filter(cm, cpi->td.counts);
3390 write_interp_filter(cm->interp_filter, wb);
3391 }
3392 }
3393
3394 if (!cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003395 aom_wb_write_bit(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003396 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
3397 }
3398
Yaowu Xuf883b422016-08-30 14:01:10 -07003399 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003400
3401 assert(cm->mib_size == num_8x8_blocks_wide_lookup[cm->sb_size]);
3402 assert(cm->mib_size == 1 << cm->mib_size_log2);
3403#if CONFIG_EXT_PARTITION
3404 assert(cm->sb_size == BLOCK_128X128 || cm->sb_size == BLOCK_64X64);
Yaowu Xuf883b422016-08-30 14:01:10 -07003405 aom_wb_write_bit(wb, cm->sb_size == BLOCK_128X128 ? 1 : 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003406#else
3407 assert(cm->sb_size == BLOCK_64X64);
3408#endif // CONFIG_EXT_PARTITION
3409
3410 encode_loopfilter(cm, wb);
3411#if CONFIG_CLPF
3412 encode_clpf(cm, wb);
3413#endif
3414#if CONFIG_DERING
3415 encode_dering(cm->dering_level, wb);
3416#endif // CONFIG_DERING
3417#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003418 encode_restoration_mode(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003419#endif // CONFIG_LOOP_RESTORATION
3420 encode_quantization(cm, wb);
3421 encode_segmentation(cm, xd, wb);
Arild Fuldseth07441162016-08-15 15:07:52 +02003422#if CONFIG_DELTA_Q
3423 {
3424 int i;
3425 struct segmentation *const seg = &cm->seg;
3426 int segment_quantizer_active = 0;
3427 for (i = 0; i < MAX_SEGMENTS; i++) {
3428 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
3429 segment_quantizer_active = 1;
3430 }
3431 }
3432 if (segment_quantizer_active == 0) {
3433 cm->delta_q_present_flag = cpi->oxcf.aq_mode == DELTA_AQ;
3434 aom_wb_write_bit(wb, cm->delta_q_present_flag);
3435 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01003436 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
Arild Fuldseth07441162016-08-15 15:07:52 +02003437 xd->prev_qindex = cm->base_qindex;
3438 }
3439 }
3440 }
3441#endif
3442
Yaowu Xuc27fc142016-08-22 16:08:15 -07003443 if (!cm->seg.enabled && xd->lossless[0])
Urvang Joshicb586f32016-09-20 11:36:33 -07003444 cm->tx_mode = ONLY_4X4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003445 else
3446 write_txfm_mode(cm->tx_mode, wb);
3447
3448 if (cpi->allow_comp_inter_inter) {
3449 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
3450 const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE;
3451
Yaowu Xuf883b422016-08-30 14:01:10 -07003452 aom_wb_write_bit(wb, use_hybrid_pred);
3453 if (!use_hybrid_pred) aom_wb_write_bit(wb, use_compound_pred);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003454 }
3455
3456 write_tile_info(cm, wb);
3457}
3458
3459#if CONFIG_GLOBAL_MOTION
3460static void write_global_motion_params(Global_Motion_Params *params,
Yaowu Xuf883b422016-08-30 14:01:10 -07003461 aom_prob *probs, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003462 GLOBAL_MOTION_TYPE gmtype = get_gmtype(params);
Yaowu Xuf883b422016-08-30 14:01:10 -07003463 av1_write_token(w, av1_global_motion_types_tree, probs,
3464 &global_motion_types_encodings[gmtype]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003465 switch (gmtype) {
3466 case GLOBAL_ZERO: break;
3467 case GLOBAL_AFFINE:
Sarah Parkere5299862016-08-16 14:57:37 -07003468 aom_write_primitive_symmetric(
Sarah Parkerc4bcb502016-09-07 13:24:53 -07003469 w, (params->motion_params.wmmat[2].as_mv.row >> GM_ALPHA_PREC_DIFF),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003470 GM_ABS_ALPHA_BITS);
Sarah Parkere5299862016-08-16 14:57:37 -07003471 aom_write_primitive_symmetric(
Sarah Parkerc4bcb502016-09-07 13:24:53 -07003472 w, (params->motion_params.wmmat[2].as_mv.col >> GM_ALPHA_PREC_DIFF) -
3473 (1 << GM_ALPHA_PREC_BITS),
Sarah Parkere5299862016-08-16 14:57:37 -07003474 GM_ABS_ALPHA_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003475 // fallthrough intended
3476 case GLOBAL_ROTZOOM:
Yaowu Xuf883b422016-08-30 14:01:10 -07003477 aom_write_primitive_symmetric(
Sarah Parkere5299862016-08-16 14:57:37 -07003478 w, (params->motion_params.wmmat[1].as_mv.row >> GM_ALPHA_PREC_DIFF),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003479 GM_ABS_ALPHA_BITS);
Yaowu Xuf883b422016-08-30 14:01:10 -07003480 aom_write_primitive_symmetric(
Sarah Parkere5299862016-08-16 14:57:37 -07003481 w, (params->motion_params.wmmat[1].as_mv.col >> GM_ALPHA_PREC_DIFF) -
3482 (1 << GM_ALPHA_PREC_BITS),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003483 GM_ABS_ALPHA_BITS);
3484 // fallthrough intended
3485 case GLOBAL_TRANSLATION:
Yaowu Xuf883b422016-08-30 14:01:10 -07003486 aom_write_primitive_symmetric(
Sarah Parkere5299862016-08-16 14:57:37 -07003487 w, (params->motion_params.wmmat[0].as_mv.row >> GM_TRANS_PREC_DIFF),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003488 GM_ABS_TRANS_BITS);
Yaowu Xuf883b422016-08-30 14:01:10 -07003489 aom_write_primitive_symmetric(
Sarah Parkere5299862016-08-16 14:57:37 -07003490 w, (params->motion_params.wmmat[0].as_mv.col >> GM_TRANS_PREC_DIFF),
Yaowu Xuc27fc142016-08-22 16:08:15 -07003491 GM_ABS_TRANS_BITS);
3492 break;
3493 default: assert(0);
3494 }
3495}
3496
Yaowu Xuf883b422016-08-30 14:01:10 -07003497static void write_global_motion(AV1_COMP *cpi, aom_writer *w) {
3498 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003499 int frame;
3500 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
3501 if (!cpi->global_motion_used[frame]) {
3502 memset(&cm->global_motion[frame], 0, sizeof(*cm->global_motion));
3503 }
3504 write_global_motion_params(&cm->global_motion[frame],
3505 cm->fc->global_motion_types_prob, w);
Sarah Parkere5299862016-08-16 14:57:37 -07003506 /*
3507 printf("Enc Ref %d [%d] (used %d): %d %d %d %d\n",
3508 frame, cm->current_video_frame, cpi->global_motion_used[frame],
3509 cm->global_motion[frame].motion_params.wmmat[0].as_mv.row,
3510 cm->global_motion[frame].motion_params.wmmat[0].as_mv.col,
3511 cm->global_motion[frame].motion_params.wmmat[1].as_mv.row,
3512 cm->global_motion[frame].motion_params.wmmat[1].as_mv.col);
3513 */
Yaowu Xuc27fc142016-08-22 16:08:15 -07003514 }
3515}
3516#endif
3517
Yaowu Xuf883b422016-08-30 14:01:10 -07003518static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data) {
3519 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003520#if CONFIG_SUPERTX
3521 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3522#endif // CONFIG_SUPERTX
3523 FRAME_CONTEXT *const fc = cm->fc;
3524 FRAME_COUNTS *counts = cpi->td.counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07003525 aom_writer *header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003526 int i, j;
3527
3528#if CONFIG_ANS
3529 struct AnsCoder header_ans;
3530 int header_size;
3531 header_bc = &cpi->buf_ans;
3532 buf_ans_write_reset(header_bc);
3533#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003534 aom_writer real_header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003535 header_bc = &real_header_bc;
Yaowu Xuf883b422016-08-30 14:01:10 -07003536 aom_start_encode(header_bc, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003537#endif
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003538
3539#if CONFIG_LOOP_RESTORATION
3540 encode_restoration(cm, header_bc);
3541#endif // CONFIG_LOOP_RESTORATION
3542
Yaowu Xuc27fc142016-08-22 16:08:15 -07003543 update_txfm_probs(cm, header_bc, counts);
3544 update_coef_probs(cpi, header_bc);
3545
3546#if CONFIG_VAR_TX
3547 update_txfm_partition_probs(cm, header_bc, counts);
Yue Chena1e48dc2016-08-29 17:29:33 -07003548#if CONFIG_EXT_TX && CONFIG_RECT_TX
3549 if (cm->tx_mode == TX_MODE_SELECT) {
3550 for (i = 1; i < TX_SIZES - 1; ++i)
3551 av1_cond_prob_diff_update(header_bc, &fc->rect_tx_prob[i],
3552 counts->rect_tx[i]);
3553 }
3554#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003555#endif
3556
3557 update_skip_probs(cm, header_bc, counts);
Thomas Daviesf6936102016-09-05 16:51:31 +01003558#if CONFIG_DELTA_Q
3559 update_delta_q_probs(cm, header_bc, counts);
3560#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003561 update_seg_probs(cpi, header_bc);
3562
3563 for (i = 0; i < INTRA_MODES; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003564 prob_diff_update(av1_intra_mode_tree, fc->uv_mode_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003565 counts->uv_mode[i], INTRA_MODES, header_bc);
3566
3567#if CONFIG_EXT_PARTITION_TYPES
Yaowu Xuf883b422016-08-30 14:01:10 -07003568 prob_diff_update(av1_partition_tree, fc->partition_prob[0],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003569 counts->partition[0], PARTITION_TYPES, header_bc);
3570 for (i = 1; i < PARTITION_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003571 prob_diff_update(av1_ext_partition_tree, fc->partition_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003572 counts->partition[i], EXT_PARTITION_TYPES, header_bc);
3573#else
Nathan E. Eggefba2be62016-05-03 09:48:54 -04003574 for (i = 0; i < PARTITION_CONTEXTS; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003575 prob_diff_update(av1_partition_tree, fc->partition_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003576 counts->partition[i], PARTITION_TYPES, header_bc);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04003577#if CONFIG_DAALA_EC
3578 av1_tree_to_cdf(av1_partition_tree, cm->fc->partition_prob[i],
3579 cm->fc->partition_cdf[i]);
3580#endif
3581 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003582#endif // CONFIG_EXT_PARTITION_TYPES
3583
3584#if CONFIG_EXT_INTRA
3585 for (i = 0; i < INTRA_FILTERS + 1; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003586 prob_diff_update(av1_intra_filter_tree, fc->intra_filter_probs[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003587 counts->intra_filter[i], INTRA_FILTERS, header_bc);
3588#endif // CONFIG_EXT_INTRA
3589
3590 if (frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003591 av1_copy(cm->kf_y_prob, av1_kf_y_mode_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003592 for (i = 0; i < INTRA_MODES; ++i)
3593 for (j = 0; j < INTRA_MODES; ++j)
Yaowu Xuf883b422016-08-30 14:01:10 -07003594 prob_diff_update(av1_intra_mode_tree, cm->kf_y_prob[i][j],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003595 counts->kf_y_mode[i][j], INTRA_MODES, header_bc);
3596 } else {
3597#if CONFIG_REF_MV
3598 update_inter_mode_probs(cm, header_bc, counts);
3599#else
3600 for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003601 prob_diff_update(av1_inter_mode_tree, cm->fc->inter_mode_probs[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003602 counts->inter_mode[i], INTER_MODES, header_bc);
3603#endif
3604
3605#if CONFIG_EXT_INTER
3606 update_inter_compound_mode_probs(cm, header_bc);
3607
3608 if (cm->reference_mode != COMPOUND_REFERENCE) {
3609 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
3610 if (is_interintra_allowed_bsize_group(i)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003611 av1_cond_prob_diff_update(header_bc, &fc->interintra_prob[i],
3612 cm->counts.interintra[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003613 }
3614 }
3615 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
3616 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -07003617 av1_interintra_mode_tree, cm->fc->interintra_mode_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003618 counts->interintra_mode[i], INTERINTRA_MODES, header_bc);
3619 }
3620 for (i = 0; i < BLOCK_SIZES; i++) {
3621 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i))
Yaowu Xuf883b422016-08-30 14:01:10 -07003622 av1_cond_prob_diff_update(header_bc, &fc->wedge_interintra_prob[i],
3623 cm->counts.wedge_interintra[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003624 }
3625 }
3626 if (cm->reference_mode != SINGLE_REFERENCE) {
3627 for (i = 0; i < BLOCK_SIZES; i++)
3628 if (is_interinter_wedge_used(i))
Yaowu Xuf883b422016-08-30 14:01:10 -07003629 av1_cond_prob_diff_update(header_bc, &fc->wedge_interinter_prob[i],
3630 cm->counts.wedge_interinter[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003631 }
3632#endif // CONFIG_EXT_INTER
3633
Yue Chencb60b182016-10-13 15:18:22 -07003634#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07003635 for (i = BLOCK_8X8; i < BLOCK_SIZES; ++i)
Yue Chencb60b182016-10-13 15:18:22 -07003636 prob_diff_update(av1_motion_mode_tree, fc->motion_mode_prob[i],
3637 counts->motion_mode[i], MOTION_MODES, header_bc);
3638#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07003639
3640 if (cm->interp_filter == SWITCHABLE)
3641 update_switchable_interp_probs(cm, header_bc, counts);
3642
3643 for (i = 0; i < INTRA_INTER_CONTEXTS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07003644 av1_cond_prob_diff_update(header_bc, &fc->intra_inter_prob[i],
3645 counts->intra_inter[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003646
3647 if (cpi->allow_comp_inter_inter) {
3648 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
3649 if (use_hybrid_pred)
3650 for (i = 0; i < COMP_INTER_CONTEXTS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07003651 av1_cond_prob_diff_update(header_bc, &fc->comp_inter_prob[i],
3652 counts->comp_inter[i]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003653 }
3654
3655 if (cm->reference_mode != COMPOUND_REFERENCE) {
3656 for (i = 0; i < REF_CONTEXTS; i++) {
3657 for (j = 0; j < (SINGLE_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003658 av1_cond_prob_diff_update(header_bc, &fc->single_ref_prob[i][j],
3659 counts->single_ref[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003660 }
3661 }
3662 }
3663
3664 if (cm->reference_mode != SINGLE_REFERENCE) {
3665 for (i = 0; i < REF_CONTEXTS; i++) {
3666#if CONFIG_EXT_REFS
3667 for (j = 0; j < (FWD_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003668 av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
3669 counts->comp_ref[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003670 }
3671 for (j = 0; j < (BWD_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003672 av1_cond_prob_diff_update(header_bc, &fc->comp_bwdref_prob[i][j],
3673 counts->comp_bwdref[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003674 }
3675#else
3676 for (j = 0; j < (COMP_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003677 av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
3678 counts->comp_ref[i][j]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003679 }
3680#endif // CONFIG_EXT_REFS
3681 }
3682 }
3683
3684 for (i = 0; i < BLOCK_SIZE_GROUPS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07003685 prob_diff_update(av1_intra_mode_tree, cm->fc->y_mode_prob[i],
Yaowu Xuc27fc142016-08-22 16:08:15 -07003686 counts->y_mode[i], INTRA_MODES, header_bc);
3687
Jingning Hanfd0cf162016-09-30 10:33:50 -07003688 av1_write_nmv_probs(cm, cm->allow_high_precision_mv, header_bc,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003689#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07003690 counts->mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003691#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003692 &counts->mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003693#endif
3694 update_ext_tx_probs(cm, header_bc);
3695#if CONFIG_SUPERTX
3696 if (!xd->lossless[0]) update_supertx_probs(cm, header_bc);
3697#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003698#if CONFIG_GLOBAL_MOTION
Sarah Parker689b0ca2016-10-11 12:06:33 -07003699 write_global_motion(cpi, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003700#endif // CONFIG_GLOBAL_MOTION
Sarah Parker689b0ca2016-10-11 12:06:33 -07003701 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003702#if CONFIG_ANS
3703 ans_write_init(&header_ans, data);
3704 buf_ans_flush(header_bc, &header_ans);
3705 header_size = ans_write_end(&header_ans);
3706 assert(header_size <= 0xffff);
3707 return header_size;
3708#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003709 aom_stop_encode(header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003710 assert(header_bc->pos <= 0xffff);
3711 return header_bc->pos;
3712#endif // CONFIG_ANS
3713}
3714
3715static int choose_size_bytes(uint32_t size, int spare_msbs) {
3716 // Choose the number of bytes required to represent size, without
3717 // using the 'spare_msbs' number of most significant bits.
3718
3719 // Make sure we will fit in 4 bytes to start with..
3720 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
3721
3722 // Normalise to 32 bits
3723 size <<= spare_msbs;
3724
3725 if (size >> 24 != 0)
3726 return 4;
3727 else if (size >> 16 != 0)
3728 return 3;
3729 else if (size >> 8 != 0)
3730 return 2;
3731 else
3732 return 1;
3733}
3734
3735static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) {
3736 switch (sz) {
3737 case 1: dst[0] = (uint8_t)(val & 0xff); break;
3738 case 2: mem_put_le16(dst, val); break;
3739 case 3: mem_put_le24(dst, val); break;
3740 case 4: mem_put_le32(dst, val); break;
3741 default: assert("Invalid size" && 0); break;
3742 }
3743}
3744
Yaowu Xuf883b422016-08-30 14:01:10 -07003745static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003746 const uint32_t data_size, const uint32_t max_tile_size,
3747 const uint32_t max_tile_col_size,
3748 int *const tile_size_bytes,
3749 int *const tile_col_size_bytes) {
3750// Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
3751#if CONFIG_EXT_TILE
3752 // The top bit in the tile size field indicates tile copy mode, so we
3753 // have 1 less bit to code the tile size
3754 const int tsb = choose_size_bytes(max_tile_size, 1);
3755 const int tcsb = choose_size_bytes(max_tile_col_size, 0);
3756#else
3757 const int tsb = choose_size_bytes(max_tile_size, 0);
3758 const int tcsb = 4; // This is ignored
3759 (void)max_tile_col_size;
3760#endif // CONFIG_EXT_TILE
3761
3762 assert(tsb > 0);
3763 assert(tcsb > 0);
3764
3765 *tile_size_bytes = tsb;
3766 *tile_col_size_bytes = tcsb;
3767
3768 if (tsb == 4 && tcsb == 4) {
3769 return data_size;
3770 } else {
3771 uint32_t wpos = 0;
3772 uint32_t rpos = 0;
3773
3774#if CONFIG_EXT_TILE
3775 int tile_row;
3776 int tile_col;
3777
3778 for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
3779 // All but the last column has a column header
3780 if (tile_col < cm->tile_cols - 1) {
3781 uint32_t tile_col_size = mem_get_le32(dst + rpos);
3782 rpos += 4;
3783
3784 // Adjust the tile column size by the number of bytes removed
3785 // from the tile size fields.
3786 tile_col_size -= (4 - tsb) * cm->tile_rows;
3787
3788 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
3789 wpos += tcsb;
3790 }
3791
3792 for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
3793 // All, including the last row has a header
3794 uint32_t tile_header = mem_get_le32(dst + rpos);
3795 rpos += 4;
3796
3797 // If this is a copy tile, we need to shift the MSB to the
3798 // top bit of the new width, and there is no data to copy.
3799 if (tile_header >> 31 != 0) {
3800 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
3801 mem_put_varsize(dst + wpos, tsb, tile_header);
3802 wpos += tsb;
3803 } else {
3804 mem_put_varsize(dst + wpos, tsb, tile_header);
3805 wpos += tsb;
3806
3807 memmove(dst + wpos, dst + rpos, tile_header);
3808 rpos += tile_header;
3809 wpos += tile_header;
3810 }
3811 }
3812 }
3813#else
3814 const int n_tiles = cm->tile_cols * cm->tile_rows;
3815 int n;
3816
3817 for (n = 0; n < n_tiles; n++) {
3818 int tile_size;
3819
3820 if (n == n_tiles - 1) {
3821 tile_size = data_size - rpos;
3822 } else {
3823 tile_size = mem_get_le32(dst + rpos);
3824 rpos += 4;
3825 mem_put_varsize(dst + wpos, tsb, tile_size);
3826 wpos += tsb;
3827 }
3828
3829 memmove(dst + wpos, dst + rpos, tile_size);
3830
3831 rpos += tile_size;
3832 wpos += tile_size;
3833 }
3834#endif // CONFIG_EXT_TILE
3835
3836 assert(rpos > wpos);
3837 assert(rpos == data_size);
3838
3839 return wpos;
3840 }
3841}
3842
Yaowu Xuf883b422016-08-30 14:01:10 -07003843void av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003844 uint8_t *data = dst;
3845 uint32_t compressed_header_size;
3846 uint32_t uncompressed_header_size;
3847 uint32_t data_size;
Yaowu Xuf883b422016-08-30 14:01:10 -07003848 struct aom_write_bit_buffer wb = { data, 0 };
3849 struct aom_write_bit_buffer saved_wb;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003850 unsigned int max_tile_size;
3851 unsigned int max_tile_col_size;
3852 int tile_size_bytes;
3853 int tile_col_size_bytes;
3854
Yaowu Xuf883b422016-08-30 14:01:10 -07003855 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003856 const int have_tiles = cm->tile_cols * cm->tile_rows > 1;
3857
3858#if CONFIG_BITSTREAM_DEBUG
3859 bitstream_queue_reset_write();
3860#endif
3861
3862 // Write the uncompressed header
3863 write_uncompressed_header(cpi, &wb);
3864
3865#if CONFIG_EXT_REFS
3866 if (cm->show_existing_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003867 *size = aom_wb_bytes_written(&wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003868 return;
3869 }
3870#endif // CONFIG_EXT_REFS
3871
3872 // We do not know these in advance. Output placeholder bit.
3873 saved_wb = wb;
3874 // Write tile size magnitudes
3875 if (have_tiles) {
3876// Note that the last item in the uncompressed header is the data
3877// describing tile configuration.
3878#if CONFIG_EXT_TILE
3879 // Number of bytes in tile column size - 1
Yaowu Xuf883b422016-08-30 14:01:10 -07003880 aom_wb_write_literal(&wb, 0, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003881#endif // CONFIG_EXT_TILE
3882 // Number of bytes in tile size - 1
Yaowu Xuf883b422016-08-30 14:01:10 -07003883 aom_wb_write_literal(&wb, 0, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003884 }
3885 // Size of compressed header
Yaowu Xuf883b422016-08-30 14:01:10 -07003886 aom_wb_write_literal(&wb, 0, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003887
Yaowu Xuf883b422016-08-30 14:01:10 -07003888 uncompressed_header_size = (uint32_t)aom_wb_bytes_written(&wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003889 data += uncompressed_header_size;
3890
Yaowu Xuf883b422016-08-30 14:01:10 -07003891 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07003892
3893 // Write the compressed header
3894 compressed_header_size = write_compressed_header(cpi, data);
3895 data += compressed_header_size;
3896
3897 // Write the encoded tile data
3898 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
3899
3900 if (have_tiles) {
3901 data_size =
3902 remux_tiles(cm, data, data_size, max_tile_size, max_tile_col_size,
3903 &tile_size_bytes, &tile_col_size_bytes);
3904 }
3905
3906 data += data_size;
3907
3908 // Now fill in the gaps in the uncompressed header.
3909 if (have_tiles) {
3910#if CONFIG_EXT_TILE
3911 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
Yaowu Xuf883b422016-08-30 14:01:10 -07003912 aom_wb_write_literal(&saved_wb, tile_col_size_bytes - 1, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003913#endif // CONFIG_EXT_TILE
3914 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
Yaowu Xuf883b422016-08-30 14:01:10 -07003915 aom_wb_write_literal(&saved_wb, tile_size_bytes - 1, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003916 }
3917 // TODO(jbb): Figure out what to do if compressed_header_size > 16 bits.
3918 assert(compressed_header_size <= 0xffff);
Yaowu Xuf883b422016-08-30 14:01:10 -07003919 aom_wb_write_literal(&saved_wb, compressed_header_size, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003920
3921 *size = data - dst;
3922}