blob: e1f6ba6e222436a7886afa8c212fc925cf5e489c [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <limits.h>
14#include <stdio.h>
15
Yaowu Xuf883b422016-08-30 14:01:10 -070016#include "aom/aom_encoder.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070017#include "aom_dsp/bitwriter_buffer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070018#include "aom_dsp/aom_dsp_common.h"
Debargha Mukherjee47748b52017-03-24 12:20:49 -070019#include "aom_dsp/binary_codes_writer.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070020#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070021#include "aom_ports/mem_ops.h"
22#include "aom_ports/system_state.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070023#if CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "aom_util/debug_util.h"
Angie Chiang6062a8b2016-09-21 16:01:04 -070025#endif // CONFIG_BITSTREAM_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -070026
Jean-Marc Valin01435132017-02-18 14:12:53 -050027#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +010028#include "av1/common/cdef.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070029#include "av1/common/clpf.h"
Jean-Marc Valin01435132017-02-18 14:12:53 -050030#endif // CONFIG_CDEF
Yaowu Xuc27fc142016-08-22 16:08:15 -070031#include "av1/common/entropy.h"
32#include "av1/common/entropymode.h"
33#include "av1/common/entropymv.h"
34#include "av1/common/mvref_common.h"
Thomas Daviesf6936102016-09-05 16:51:31 +010035#include "av1/common/odintrin.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070036#include "av1/common/pred_common.h"
37#include "av1/common/reconinter.h"
hui su45dc5972016-12-08 17:42:50 -080038#if CONFIG_EXT_INTRA
39#include "av1/common/reconintra.h"
40#endif // CONFIG_EXT_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -070041#include "av1/common/seg_common.h"
42#include "av1/common/tile_common.h"
43
44#if CONFIG_ANS
Alex Converse1ac1ae72016-09-17 15:11:16 -070045#include "aom_dsp/buf_ans.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070046#endif // CONFIG_ANS
Angie Chiangc8af6112017-03-16 16:11:22 -070047#if CONFIG_LV_MAP
48#include "av1/encoder/encodetxb.h"
49#endif // CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -070050#include "av1/encoder/bitstream.h"
51#include "av1/encoder/cost.h"
52#include "av1/encoder/encodemv.h"
53#include "av1/encoder/mcomp.h"
hui sud13c24a2017-04-07 16:13:07 -070054#if CONFIG_PALETTE && CONFIG_PALETTE_DELTA_ENCODING
55#include "av1/encoder/palette.h"
56#endif // CONFIG_PALETTE && CONFIG_PALETTE_DELTA_ENCODING
Yaowu Xuc27fc142016-08-22 16:08:15 -070057#include "av1/encoder/segmentation.h"
58#include "av1/encoder/subexp.h"
59#include "av1/encoder/tokenize.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070060#if CONFIG_PVQ
61#include "av1/encoder/pvq_encoder.h"
62#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070063
Nathan E. Egge3c056792016-05-20 08:58:44 -040064static struct av1_token intra_mode_encodings[INTRA_MODES];
65static struct av1_token switchable_interp_encodings[SWITCHABLE_FILTERS];
Alex Converse57795a42017-03-14 12:18:25 -070066#if CONFIG_EXT_PARTITION_TYPES && !CONFIG_EC_MULTISYMBOL
Yaowu Xuf883b422016-08-30 14:01:10 -070067static const struct av1_token ext_partition_encodings[EXT_PARTITION_TYPES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070068 { 0, 1 }, { 4, 3 }, { 12, 4 }, { 7, 3 },
69 { 10, 4 }, { 11, 4 }, { 26, 5 }, { 27, 5 }
70};
71#endif
Nathan E. Egge3c056792016-05-20 08:58:44 -040072static struct av1_token partition_encodings[PARTITION_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -070073#if !CONFIG_REF_MV
Nathan E. Egge3c056792016-05-20 08:58:44 -040074static struct av1_token inter_mode_encodings[INTER_MODES];
Yaowu Xuc27fc142016-08-22 16:08:15 -070075#endif
76#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -070077static const struct av1_token
Yaowu Xuc27fc142016-08-22 16:08:15 -070078 inter_compound_mode_encodings[INTER_COMPOUND_MODES] = {
79 { 2, 2 }, { 50, 6 }, { 51, 6 }, { 24, 5 }, { 52, 6 },
80 { 53, 6 }, { 54, 6 }, { 55, 6 }, { 0, 1 }, { 7, 3 }
81 };
82#endif // CONFIG_EXT_INTER
Urvang Joshib100db72016-10-12 16:28:56 -070083#if CONFIG_PALETTE
Alex Converse92109812017-02-22 10:21:40 -080084static struct av1_token palette_size_encodings[PALETTE_SIZES];
85static struct av1_token palette_color_index_encodings[PALETTE_SIZES]
86 [PALETTE_COLORS];
Urvang Joshib100db72016-10-12 16:28:56 -070087#endif // CONFIG_PALETTE
Nathan E. Egge2ea519e2017-02-16 18:18:15 -050088#if !CONFIG_EC_MULTISYMBOL
Jingning Hanaae72a62016-10-25 15:35:29 -070089static const struct av1_token tx_size_encodings[MAX_TX_DEPTH][TX_SIZES] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -070090 { { 0, 1 }, { 1, 1 } }, // Max tx_size is 8X8
91 { { 0, 1 }, { 2, 2 }, { 3, 2 } }, // Max tx_size is 16X16
92 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 7, 3 } }, // Max tx_size is 32X32
Debargha Mukherjee25ed5302016-11-22 12:13:41 -080093#if CONFIG_TX64X64
94 { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 14, 4 }, { 15, 4 } }, // Max tx_size 64X64
95#endif // CONFIG_TX64X64
Yaowu Xuc27fc142016-08-22 16:08:15 -070096};
Nathan E. Egge2ea519e2017-02-16 18:18:15 -050097#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070098
hui su5db97432016-10-14 16:10:14 -070099#if CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -0700100static INLINE void write_uniform(aom_writer *w, int n, int v) {
hui su37499292017-04-26 09:49:53 -0700101 const int l = get_unsigned_bits(n);
102 const int m = (1 << l) - n;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700103 if (l == 0) return;
104 if (v < m) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700105 aom_write_literal(w, v, l - 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700106 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -0700107 aom_write_literal(w, m + ((v - m) >> 1), l - 1);
108 aom_write_literal(w, (v - m) & 1, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700109 }
110}
hui su5db97432016-10-14 16:10:14 -0700111#endif // CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700112
113#if CONFIG_EXT_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700114static struct av1_token ext_tx_inter_encodings[EXT_TX_SETS_INTER][TX_TYPES];
115static struct av1_token ext_tx_intra_encodings[EXT_TX_SETS_INTRA][TX_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700116#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700117static struct av1_token ext_tx_encodings[TX_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700118#endif // CONFIG_EXT_TX
119#if CONFIG_GLOBAL_MOTION
David Barkercf3d0b02016-11-10 10:14:49 +0000120static struct av1_token global_motion_types_encodings[GLOBAL_TRANS_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700121#endif // CONFIG_GLOBAL_MOTION
122#if CONFIG_EXT_INTRA
hui sueda3d762016-12-06 16:58:23 -0800123#if CONFIG_INTRA_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -0700124static struct av1_token intra_filter_encodings[INTRA_FILTERS];
hui sueda3d762016-12-06 16:58:23 -0800125#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700126#endif // CONFIG_EXT_INTRA
127#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700128static struct av1_token interintra_mode_encodings[INTERINTRA_MODES];
Debargha Mukherjeeb9c33d92017-04-27 12:28:35 -0700129#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Sarah Parker6fddd182016-11-10 20:57:20 -0800130static struct av1_token compound_type_encodings[COMPOUND_TYPES];
Debargha Mukherjeeb9c33d92017-04-27 12:28:35 -0700131#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700132#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -0700133#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
134static struct av1_token motion_mode_encodings[MOTION_MODES];
135#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700136#if CONFIG_LOOP_RESTORATION
137static struct av1_token switchable_restore_encodings[RESTORE_SWITCHABLE_TYPES];
138#endif // CONFIG_LOOP_RESTORATION
Thomas Davies80188d12016-10-26 16:08:35 -0700139static void write_uncompressed_header(AV1_COMP *cpi,
140 struct aom_write_bit_buffer *wb);
141static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data);
Thomas Daviesdbfc4f92017-01-18 16:46:09 +0000142static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
143 const uint32_t data_size, const uint32_t max_tile_size,
144 const uint32_t max_tile_col_size,
145 int *const tile_size_bytes,
146 int *const tile_col_size_bytes);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700147
Yaowu Xuf883b422016-08-30 14:01:10 -0700148void av1_encode_token_init(void) {
Urvang Joshi0b325972016-10-24 14:06:43 -0700149#if CONFIG_EXT_TX || CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700150 int s;
Urvang Joshi0b325972016-10-24 14:06:43 -0700151#endif // CONFIG_EXT_TX || CONFIG_PALETTE
152#if CONFIG_EXT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -0700153 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700154 av1_tokens_from_tree(ext_tx_inter_encodings[s], av1_ext_tx_inter_tree[s]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700155 }
156 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700157 av1_tokens_from_tree(ext_tx_intra_encodings[s], av1_ext_tx_intra_tree[s]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700158 }
159#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700160 av1_tokens_from_tree(ext_tx_encodings, av1_ext_tx_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700161#endif // CONFIG_EXT_TX
Nathan E. Egge3c056792016-05-20 08:58:44 -0400162 av1_tokens_from_tree(intra_mode_encodings, av1_intra_mode_tree);
163 av1_tokens_from_tree(switchable_interp_encodings, av1_switchable_interp_tree);
164 av1_tokens_from_tree(partition_encodings, av1_partition_tree);
165#if !CONFIG_REF_MV
166 av1_tokens_from_tree(inter_mode_encodings, av1_inter_mode_tree);
167#endif
168
Urvang Joshi0b325972016-10-24 14:06:43 -0700169#if CONFIG_PALETTE
170 av1_tokens_from_tree(palette_size_encodings, av1_palette_size_tree);
Alex Converse92109812017-02-22 10:21:40 -0800171 for (s = 0; s < PALETTE_SIZES; ++s) {
Urvang Joshi23a61112017-01-30 14:59:27 -0800172 av1_tokens_from_tree(palette_color_index_encodings[s],
173 av1_palette_color_index_tree[s]);
Urvang Joshi0b325972016-10-24 14:06:43 -0700174 }
175#endif // CONFIG_PALETTE
176
hui sub4e25d22017-03-09 15:32:30 -0800177#if CONFIG_EXT_INTRA && CONFIG_INTRA_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -0700178 av1_tokens_from_tree(intra_filter_encodings, av1_intra_filter_tree);
hui sub4e25d22017-03-09 15:32:30 -0800179#endif // CONFIG_EXT_INTRA && CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -0700180#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700181 av1_tokens_from_tree(interintra_mode_encodings, av1_interintra_mode_tree);
Debargha Mukherjeeb9c33d92017-04-27 12:28:35 -0700182#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Sarah Parker6fddd182016-11-10 20:57:20 -0800183 av1_tokens_from_tree(compound_type_encodings, av1_compound_type_tree);
Debargha Mukherjeeb9c33d92017-04-27 12:28:35 -0700184#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700185#endif // CONFIG_EXT_INTER
Yue Chencb60b182016-10-13 15:18:22 -0700186#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
187 av1_tokens_from_tree(motion_mode_encodings, av1_motion_mode_tree);
188#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -0700189#if CONFIG_GLOBAL_MOTION
Yaowu Xuf883b422016-08-30 14:01:10 -0700190 av1_tokens_from_tree(global_motion_types_encodings,
191 av1_global_motion_types_tree);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700192#endif // CONFIG_GLOBAL_MOTION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700193#if CONFIG_LOOP_RESTORATION
194 av1_tokens_from_tree(switchable_restore_encodings,
195 av1_switchable_restore_tree);
196#endif // CONFIG_LOOP_RESTORATION
Nathan E. Egge4947c292016-04-26 11:37:06 -0400197
Nathan E. Eggedfa33f22016-11-16 09:44:26 -0500198#if CONFIG_EC_MULTISYMBOL
Angie Chiang1733f6b2017-01-05 09:52:20 -0800199 /* This hack is necessary when CONFIG_DUAL_FILTER is enabled because the five
Nathan E. Egge4947c292016-04-26 11:37:06 -0400200 SWITCHABLE_FILTERS are not consecutive, e.g., 0, 1, 2, 3, 4, when doing
201 an in-order traversal of the av1_switchable_interp_tree structure. */
202 av1_indices_from_tree(av1_switchable_interp_ind, av1_switchable_interp_inv,
Jingning Han8e67c052017-03-23 15:47:33 -0700203 av1_switchable_interp_tree);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700204/* This hack is necessary because the four TX_TYPES are not consecutive,
205 e.g., 0, 1, 2, 3, when doing an in-order traversal of the av1_ext_tx_tree
206 structure. */
Thomas Daviesb1bedf52017-03-17 14:03:28 +0000207#if CONFIG_EXT_TX
208 for (s = 1; s < EXT_TX_SETS_INTRA; ++s)
209 av1_indices_from_tree(av1_ext_tx_intra_ind[s], av1_ext_tx_intra_inv[s],
Jingning Han8e67c052017-03-23 15:47:33 -0700210 av1_ext_tx_intra_tree[s]);
Thomas Daviesb1bedf52017-03-17 14:03:28 +0000211 for (s = 1; s < EXT_TX_SETS_INTER; ++s)
212 av1_indices_from_tree(av1_ext_tx_inter_ind[s], av1_ext_tx_inter_inv[s],
Jingning Han8e67c052017-03-23 15:47:33 -0700213 av1_ext_tx_inter_tree[s]);
Thomas Daviesb1bedf52017-03-17 14:03:28 +0000214#else
Jingning Han8e67c052017-03-23 15:47:33 -0700215 av1_indices_from_tree(av1_ext_tx_ind, av1_ext_tx_inv, av1_ext_tx_tree);
David Barkerf5419322016-11-10 12:04:21 +0000216#endif
Jingning Han8e67c052017-03-23 15:47:33 -0700217 av1_indices_from_tree(av1_intra_mode_ind, av1_intra_mode_inv,
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400218 av1_intra_mode_tree);
Jingning Han8e67c052017-03-23 15:47:33 -0700219 av1_indices_from_tree(av1_inter_mode_ind, av1_inter_mode_inv,
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400220 av1_inter_mode_tree);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400221#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700222}
223
Jingning Hanf04254f2017-03-08 10:51:35 -0800224static void write_intra_mode_kf(const AV1_COMMON *cm, FRAME_CONTEXT *frame_ctx,
225 const MODE_INFO *mi, const MODE_INFO *above_mi,
226 const MODE_INFO *left_mi, int block,
227 PREDICTION_MODE mode, aom_writer *w) {
Alex Converse28744302017-04-13 14:46:22 -0700228#if CONFIG_INTRABC
229 assert(!is_intrabc_block(&mi->mbmi));
230#endif // CONFIG_INTRABC
Jingning Hanf04254f2017-03-08 10:51:35 -0800231#if CONFIG_EC_MULTISYMBOL
232 aom_write_symbol(w, av1_intra_mode_ind[mode],
233 get_y_mode_cdf(frame_ctx, mi, above_mi, left_mi, block),
234 INTRA_MODES);
235 (void)cm;
236#else
237 av1_write_token(w, av1_intra_mode_tree,
238 get_y_mode_probs(cm, mi, above_mi, left_mi, block),
239 &intra_mode_encodings[mode]);
240 (void)frame_ctx;
Nathan E. Egge380cb1a2016-09-08 10:13:42 -0400241#endif
Jingning Hanf04254f2017-03-08 10:51:35 -0800242}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700243
244#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700245static void write_interintra_mode(aom_writer *w, INTERINTRA_MODE mode,
246 const aom_prob *probs) {
247 av1_write_token(w, av1_interintra_mode_tree, probs,
248 &interintra_mode_encodings[mode]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700249}
250#endif // CONFIG_EXT_INTER
251
Thomas Davies1de6c882017-01-11 17:47:49 +0000252static void write_inter_mode(aom_writer *w, PREDICTION_MODE mode,
Zoe Liu7f24e1b2017-03-17 17:42:05 -0700253 FRAME_CONTEXT *ec_ctx, const int16_t mode_ctx) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700254#if CONFIG_REF_MV
255 const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK;
Thomas Davies1de6c882017-01-11 17:47:49 +0000256 const aom_prob newmv_prob = ec_ctx->newmv_prob[newmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700257
Zoe Liu7f24e1b2017-03-17 17:42:05 -0700258#define IS_NEWMV_MODE(mode) ((mode) == NEWMV)
259 aom_write(w, !IS_NEWMV_MODE(mode), newmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700260
Zoe Liu7f24e1b2017-03-17 17:42:05 -0700261 if (!IS_NEWMV_MODE(mode)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700262 const int16_t zeromv_ctx = (mode_ctx >> ZEROMV_OFFSET) & ZEROMV_CTX_MASK;
Thomas Davies1de6c882017-01-11 17:47:49 +0000263 const aom_prob zeromv_prob = ec_ctx->zeromv_prob[zeromv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700264
265 if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) {
266 assert(mode == ZEROMV);
267 return;
268 }
269
Yaowu Xuf883b422016-08-30 14:01:10 -0700270 aom_write(w, mode != ZEROMV, zeromv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700271
272 if (mode != ZEROMV) {
273 int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK;
Yaowu Xuf883b422016-08-30 14:01:10 -0700274 aom_prob refmv_prob;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700275
276 if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6;
277 if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7;
278 if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8;
279
Thomas Davies1de6c882017-01-11 17:47:49 +0000280 refmv_prob = ec_ctx->refmv_prob[refmv_ctx];
Yaowu Xuf883b422016-08-30 14:01:10 -0700281 aom_write(w, mode != NEARESTMV, refmv_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700282 }
283 }
Zoe Liu7f24e1b2017-03-17 17:42:05 -0700284
285#undef IS_NEWMV_MODE
286
287#else // !CONFIG_REF_MV
Yaowu Xuc27fc142016-08-22 16:08:15 -0700288 assert(is_inter_mode(mode));
Nathan E. Eggea59b23d2016-11-16 09:44:26 -0500289#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400290 aom_write_symbol(w, av1_inter_mode_ind[INTER_OFFSET(mode)],
Thomas Davies1de6c882017-01-11 17:47:49 +0000291 ec_ctx->inter_mode_cdf[mode_ctx], INTER_MODES);
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400292#else
293 {
Thomas Davies1de6c882017-01-11 17:47:49 +0000294 const aom_prob *const inter_probs = ec_ctx->inter_mode_probs[mode_ctx];
Nathan E. Egge6ec4d102016-09-08 10:41:20 -0400295 av1_write_token(w, av1_inter_mode_tree, inter_probs,
296 &inter_mode_encodings[INTER_OFFSET(mode)]);
297 }
298#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700299#endif
300}
301
302#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700303static void write_drl_idx(const AV1_COMMON *cm, const MB_MODE_INFO *mbmi,
304 const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) {
305 uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700306
307 assert(mbmi->ref_mv_idx < 3);
308
David Barker404b2e82017-03-27 13:07:47 +0100309#if CONFIG_EXT_INTER
310 if (mbmi->mode == NEWMV || mbmi->mode == NEW_NEWMV) {
311#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700312 if (mbmi->mode == NEWMV) {
David Barker404b2e82017-03-27 13:07:47 +0100313#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700314 int idx;
315 for (idx = 0; idx < 2; ++idx) {
316 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
317 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700318 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
319 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700320
Yaowu Xuf883b422016-08-30 14:01:10 -0700321 aom_write(w, mbmi->ref_mv_idx != idx, drl_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700322 if (mbmi->ref_mv_idx == idx) return;
323 }
324 }
325 return;
326 }
327
David Barker3dfba992017-04-03 16:10:09 +0100328 if (have_nearmv_in_inter_mode(mbmi->mode)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700329 int idx;
330 // TODO(jingning): Temporary solution to compensate the NEARESTMV offset.
331 for (idx = 1; idx < 3; ++idx) {
332 if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) {
333 uint8_t drl_ctx =
Yaowu Xuf883b422016-08-30 14:01:10 -0700334 av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx);
335 aom_prob drl_prob = cm->fc->drl_prob[drl_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700336
Yaowu Xuf883b422016-08-30 14:01:10 -0700337 aom_write(w, mbmi->ref_mv_idx != (idx - 1), drl_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700338 if (mbmi->ref_mv_idx == (idx - 1)) return;
339 }
340 }
341 return;
342 }
343}
344#endif
345
346#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -0700347static void write_inter_compound_mode(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700348 PREDICTION_MODE mode,
349 const int16_t mode_ctx) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700350 const aom_prob *const inter_compound_probs =
Yaowu Xuc27fc142016-08-22 16:08:15 -0700351 cm->fc->inter_compound_mode_probs[mode_ctx];
352
353 assert(is_inter_compound_mode(mode));
Yaowu Xuf883b422016-08-30 14:01:10 -0700354 av1_write_token(w, av1_inter_compound_mode_tree, inter_compound_probs,
355 &inter_compound_mode_encodings[INTER_COMPOUND_OFFSET(mode)]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700356}
357#endif // CONFIG_EXT_INTER
358
Yaowu Xuf883b422016-08-30 14:01:10 -0700359static void encode_unsigned_max(struct aom_write_bit_buffer *wb, int data,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700360 int max) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700361 aom_wb_write_literal(wb, data, get_unsigned_bits(max));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700362}
363
Thomas Davies493623e2017-03-31 16:12:25 +0100364#if !CONFIG_EC_ADAPT || \
365 (CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION || CONFIG_EXT_INTER)
Yaowu Xuf883b422016-08-30 14:01:10 -0700366static void prob_diff_update(const aom_tree_index *tree,
367 aom_prob probs[/*n - 1*/],
Yaowu Xuc27fc142016-08-22 16:08:15 -0700368 const unsigned int counts[/*n - 1*/], int n,
Thomas Davies80188d12016-10-26 16:08:35 -0700369 int probwt, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700370 int i;
371 unsigned int branch_ct[32][2];
372
373 // Assuming max number of probabilities <= 32
374 assert(n <= 32);
375
Yaowu Xuf883b422016-08-30 14:01:10 -0700376 av1_tree_probs_from_distribution(tree, branch_ct, counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700377 for (i = 0; i < n - 1; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700378 av1_cond_prob_diff_update(w, &probs[i], branch_ct[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700379}
Nathan E. Eggeb353a8e2017-02-17 10:27:37 -0500380#endif
381
Sarah Parkere2e591c2017-03-03 15:21:19 -0800382#if CONFIG_EXT_INTER || !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700383static int prob_diff_update_savings(const aom_tree_index *tree,
384 aom_prob probs[/*n - 1*/],
Thomas Davies80188d12016-10-26 16:08:35 -0700385 const unsigned int counts[/*n - 1*/], int n,
386 int probwt) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700387 int i;
388 unsigned int branch_ct[32][2];
389 int savings = 0;
390
391 // Assuming max number of probabilities <= 32
392 assert(n <= 32);
Yaowu Xuf883b422016-08-30 14:01:10 -0700393 av1_tree_probs_from_distribution(tree, branch_ct, counts);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700394 for (i = 0; i < n - 1; ++i) {
Thomas Davies80188d12016-10-26 16:08:35 -0700395 savings +=
396 av1_cond_prob_diff_update_savings(&probs[i], branch_ct[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700397 }
398 return savings;
399}
Sarah Parkere2e591c2017-03-03 15:21:19 -0800400#endif // CONFIG_EXT_INTER || !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700401
402#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700403static void write_tx_size_vartx(const AV1_COMMON *cm, const MACROBLOCKD *xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700404 const MB_MODE_INFO *mbmi, TX_SIZE tx_size,
Jingning Han94d5bfc2016-10-21 10:14:36 -0700405 int depth, int blk_row, int blk_col,
406 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700407 const int tx_row = blk_row >> 1;
408 const int tx_col = blk_col >> 1;
Jingning Hanf65b8702016-10-31 12:13:20 -0700409 const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0);
410 const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0);
411
Yaowu Xuc27fc142016-08-22 16:08:15 -0700412 int ctx = txfm_partition_context(xd->above_txfm_context + tx_col,
Jingning Hanc8b89362016-11-01 10:28:53 -0700413 xd->left_txfm_context + tx_row,
414 mbmi->sb_type, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700415
Yaowu Xuc27fc142016-08-22 16:08:15 -0700416 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
417
Jingning Han571189c2016-10-24 10:38:43 -0700418 if (depth == MAX_VARTX_DEPTH) {
Jingning Han94d5bfc2016-10-21 10:14:36 -0700419 txfm_partition_update(xd->above_txfm_context + tx_col,
Jingning Han581d1692017-01-05 16:03:54 -0800420 xd->left_txfm_context + tx_row, tx_size, tx_size);
Jingning Han94d5bfc2016-10-21 10:14:36 -0700421 return;
422 }
423
Yaowu Xuc27fc142016-08-22 16:08:15 -0700424 if (tx_size == mbmi->inter_tx_size[tx_row][tx_col]) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700425 aom_write(w, 0, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700426 txfm_partition_update(xd->above_txfm_context + tx_col,
Jingning Han581d1692017-01-05 16:03:54 -0800427 xd->left_txfm_context + tx_row, tx_size, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700428 } else {
Jingning Hanf64062f2016-11-02 16:22:18 -0700429 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
430 const int bsl = tx_size_wide_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700431 int i;
Jingning Hanf64062f2016-11-02 16:22:18 -0700432
Yaowu Xuf883b422016-08-30 14:01:10 -0700433 aom_write(w, 1, cm->fc->txfm_partition_prob[ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700434
435 if (tx_size == TX_8X8) {
436 txfm_partition_update(xd->above_txfm_context + tx_col,
Jingning Hanab9ecba2017-01-13 09:11:58 -0800437 xd->left_txfm_context + tx_row, sub_txs, tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700438 return;
439 }
440
441 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700442 for (i = 0; i < 4; ++i) {
Jingning Hanf64062f2016-11-02 16:22:18 -0700443 int offsetr = blk_row + (i >> 1) * bsl;
444 int offsetc = blk_col + (i & 0x01) * bsl;
445 write_tx_size_vartx(cm, xd, mbmi, sub_txs, depth + 1, offsetr, offsetc,
446 w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700447 }
448 }
449}
450
Yaowu Xuf883b422016-08-30 14:01:10 -0700451static void update_txfm_partition_probs(AV1_COMMON *cm, aom_writer *w,
Thomas Davies80188d12016-10-26 16:08:35 -0700452 FRAME_COUNTS *counts, int probwt) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700453 int k;
454 for (k = 0; k < TXFM_PARTITION_CONTEXTS; ++k)
Yaowu Xuf883b422016-08-30 14:01:10 -0700455 av1_cond_prob_diff_update(w, &cm->fc->txfm_partition_prob[k],
Thomas Davies80188d12016-10-26 16:08:35 -0700456 counts->txfm_partition[k], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700457}
458#endif
459
Yaowu Xuf883b422016-08-30 14:01:10 -0700460static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd,
461 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700462 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
463 const BLOCK_SIZE bsize = mbmi->sb_type;
Thomas Davies15580c52017-03-09 13:53:42 +0000464#if CONFIG_EC_ADAPT
465 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
466 (void)cm;
467#else
468 FRAME_CONTEXT *ec_ctx = cm->fc;
469#endif
Jingning Han3daa4fd2017-01-20 10:33:50 -0800470// For sub8x8 blocks the tx_size symbol does not need to be sent
Debargha Mukherjee428bbb22017-03-17 07:30:24 -0700471#if CONFIG_CB4X4 && (CONFIG_VAR_TX || CONFIG_EXT_TX) && CONFIG_RECT_TX
Jingning Han3daa4fd2017-01-20 10:33:50 -0800472 if (bsize > BLOCK_4X4) {
473#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700474 if (bsize >= BLOCK_8X8) {
Jingning Han3daa4fd2017-01-20 10:33:50 -0800475#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700476 const TX_SIZE tx_size = mbmi->tx_size;
477 const int is_inter = is_inter_block(mbmi);
478 const int tx_size_ctx = get_tx_size_context(xd);
479 const int tx_size_cat = is_inter ? inter_tx_size_cat_lookup[bsize]
480 : intra_tx_size_cat_lookup[bsize];
481 const TX_SIZE coded_tx_size = txsize_sqr_up_map[tx_size];
Jingning Han4e1737a2016-10-25 16:05:02 -0700482 const int depth = tx_size_to_depth(coded_tx_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700483#if CONFIG_EXT_TX && CONFIG_RECT_TX
Yue Chen49587a72016-09-28 17:09:47 -0700484 assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi)));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700485 assert(
486 IMPLIES(is_rect_tx(tx_size), tx_size == max_txsize_rect_lookup[bsize]));
487#endif // CONFIG_EXT_TX && CONFIG_RECT_TX
488
Nathan E. Egge2ea519e2017-02-16 18:18:15 -0500489#if CONFIG_EC_MULTISYMBOL
Thomas Davies15580c52017-03-09 13:53:42 +0000490 aom_write_symbol(w, depth, ec_ctx->tx_size_cdf[tx_size_cat][tx_size_ctx],
Nathan E. Egge2ea519e2017-02-16 18:18:15 -0500491 tx_size_cat + 2);
492#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700493 av1_write_token(w, av1_tx_size_tree[tx_size_cat],
Thomas Davies15580c52017-03-09 13:53:42 +0000494 ec_ctx->tx_size_probs[tx_size_cat][tx_size_ctx],
Jingning Han4e1737a2016-10-25 16:05:02 -0700495 &tx_size_encodings[tx_size_cat][depth]);
Nathan E. Egge2ea519e2017-02-16 18:18:15 -0500496#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700497 }
498}
499
500#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -0700501static void update_inter_mode_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700502 FRAME_COUNTS *counts) {
503 int i;
Thomas Davies80188d12016-10-26 16:08:35 -0700504#if CONFIG_TILE_GROUPS
505 const int probwt = cm->num_tg;
506#else
507 const int probwt = 1;
508#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700509 for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700510 av1_cond_prob_diff_update(w, &cm->fc->newmv_prob[i], counts->newmv_mode[i],
511 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700512 for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -0700513 av1_cond_prob_diff_update(w, &cm->fc->zeromv_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700514 counts->zeromv_mode[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700515 for (i = 0; i < REFMV_MODE_CONTEXTS; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700516 av1_cond_prob_diff_update(w, &cm->fc->refmv_prob[i], counts->refmv_mode[i],
517 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700518 for (i = 0; i < DRL_MODE_CONTEXTS; ++i)
Thomas Davies80188d12016-10-26 16:08:35 -0700519 av1_cond_prob_diff_update(w, &cm->fc->drl_prob[i], counts->drl_mode[i],
520 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700521}
522#endif
523
524#if CONFIG_EXT_INTER
Thomas Davies80188d12016-10-26 16:08:35 -0700525static void update_inter_compound_mode_probs(AV1_COMMON *cm, int probwt,
526 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700527 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
528 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700529 int i;
530 int savings = 0;
531 int do_update = 0;
532 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
533 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700534 av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700535 cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700536 }
537 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700538 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700539 if (do_update) {
540 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
541 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -0700542 av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700543 cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700544 }
545 }
546}
547#endif // CONFIG_EXT_INTER
548
Yaowu Xuf883b422016-08-30 14:01:10 -0700549static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd,
550 int segment_id, const MODE_INFO *mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700551 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) {
552 return 1;
553 } else {
554 const int skip = mi->mbmi.skip;
Yaowu Xuf883b422016-08-30 14:01:10 -0700555 aom_write(w, skip, av1_get_skip_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -0700556 return skip;
557 }
558}
559
Yue Chen69f18e12016-09-08 14:48:15 -0700560#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Sarah Parker19234cc2017-03-10 16:43:25 -0800561static void write_motion_mode(const AV1_COMMON *cm, const MODE_INFO *mi,
Yue Chen69f18e12016-09-08 14:48:15 -0700562 aom_writer *w) {
Sarah Parker19234cc2017-03-10 16:43:25 -0800563 const MB_MODE_INFO *mbmi = &mi->mbmi;
564 MOTION_MODE last_motion_mode_allowed = motion_mode_allowed(
565#if CONFIG_GLOBAL_MOTION && SEPARATE_GLOBAL_MOTION
566 0, cm->global_motion,
567#endif // CONFIG_GLOBAL_MOTION && SEPARATE_GLOBAL_MOTION
568 mi);
Yue Chen69f18e12016-09-08 14:48:15 -0700569
570 if (last_motion_mode_allowed == SIMPLE_TRANSLATION) return;
571#if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
572 if (last_motion_mode_allowed == OBMC_CAUSAL) {
573 aom_write(w, mbmi->motion_mode == OBMC_CAUSAL,
574 cm->fc->obmc_prob[mbmi->sb_type]);
575 } else {
576#endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
577 av1_write_token(w, av1_motion_mode_tree,
578 cm->fc->motion_mode_prob[mbmi->sb_type],
579 &motion_mode_encodings[mbmi->motion_mode]);
580#if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
581 }
582#endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION
583}
584#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
585
Arild Fuldseth07441162016-08-15 15:07:52 +0200586#if CONFIG_DELTA_Q
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000587static void write_delta_qindex(const AV1_COMMON *cm, const MACROBLOCKD *xd,
588 int delta_qindex, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +0200589 int sign = delta_qindex < 0;
590 int abs = sign ? -delta_qindex : delta_qindex;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000591 int rem_bits, thr;
Thomas Daviesf6936102016-09-05 16:51:31 +0100592 int smallval = abs < DELTA_Q_SMALL ? 1 : 0;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000593#if CONFIG_EC_ADAPT
594 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
595 (void)cm;
596#else
597 FRAME_CONTEXT *ec_ctx = cm->fc;
598 (void)xd;
599#endif
Thomas Daviesf6936102016-09-05 16:51:31 +0100600
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000601#if CONFIG_EC_MULTISYMBOL
602 aom_write_symbol(w, AOMMIN(abs, DELTA_Q_SMALL), ec_ctx->delta_q_cdf,
603 DELTA_Q_PROBS + 1);
604#else
605 int i = 0;
Thomas Daviesf6936102016-09-05 16:51:31 +0100606 while (i < DELTA_Q_SMALL && i <= abs) {
607 int bit = (i < abs);
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000608 aom_write(w, bit, ec_ctx->delta_q_prob[i]);
Thomas Daviesf6936102016-09-05 16:51:31 +0100609 i++;
610 }
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000611#endif
Thomas Daviesf6936102016-09-05 16:51:31 +0100612
613 if (!smallval) {
614 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
615 thr = (1 << rem_bits) + 1;
616 aom_write_literal(w, rem_bits, 3);
617 aom_write_literal(w, abs - thr, rem_bits);
Arild Fuldseth07441162016-08-15 15:07:52 +0200618 }
619 if (abs > 0) {
620 aom_write_bit(w, sign);
621 }
622}
Thomas Daviesf6936102016-09-05 16:51:31 +0100623
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000624#if !CONFIG_EC_ADAPT
Thomas Daviesf6936102016-09-05 16:51:31 +0100625static void update_delta_q_probs(AV1_COMMON *cm, aom_writer *w,
626 FRAME_COUNTS *counts) {
627 int k;
Jingning Hanbe44c5f2016-09-30 11:35:22 -0700628#if CONFIG_TILE_GROUPS
629 const int probwt = cm->num_tg;
630#else
631 const int probwt = 1;
632#endif
Fangwen Fu6160df22017-04-24 09:45:51 -0700633#if CONFIG_EXT_DELTA_Q
634 if (!cm->delta_q_present_flag) return;
635#endif // CONFIG_EXT_DELTA_Q
Thomas Davies665cd702017-03-02 10:20:30 +0000636 for (k = 0; k < DELTA_Q_PROBS; ++k) {
Jingning Hanbe44c5f2016-09-30 11:35:22 -0700637 av1_cond_prob_diff_update(w, &cm->fc->delta_q_prob[k], counts->delta_q[k],
638 probwt);
Thomas Daviesf6936102016-09-05 16:51:31 +0100639 }
640}
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000641#endif // CONFIG_EC_ADAPT
Fangwen Fu231fe422017-04-24 17:52:29 -0700642
643#if CONFIG_EXT_DELTA_Q
644static void write_delta_lflevel(const AV1_COMMON *cm, const MACROBLOCKD *xd,
645 int delta_lflevel, aom_writer *w) {
646 int sign = delta_lflevel < 0;
647 int abs = sign ? -delta_lflevel : delta_lflevel;
648 int rem_bits, thr;
649 int smallval = abs < DELTA_LF_SMALL ? 1 : 0;
650#if CONFIG_EC_ADAPT
651 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
652 (void)cm;
653#else
654 FRAME_CONTEXT *ec_ctx = cm->fc;
655 (void)xd;
656#endif
657
658#if CONFIG_EC_MULTISYMBOL
659 aom_write_symbol(w, AOMMIN(abs, DELTA_LF_SMALL), ec_ctx->delta_lf_cdf,
660 DELTA_LF_PROBS + 1);
661#else
662 int i = 0;
663 while (i < DELTA_LF_SMALL && i <= abs) {
664 int bit = (i < abs);
665 aom_write(w, bit, ec_ctx->delta_lf_prob[i]);
666 i++;
667 }
668#endif // CONFIG_EC_MULTISYMBOL
669
670 if (!smallval) {
671 rem_bits = OD_ILOG_NZ(abs - 1) - 1;
672 thr = (1 << rem_bits) + 1;
673 aom_write_literal(w, rem_bits, 3);
674 aom_write_literal(w, abs - thr, rem_bits);
675 }
676 if (abs > 0) {
677 aom_write_bit(w, sign);
678 }
679}
680
681#if !CONFIG_EC_ADAPT
682static void update_delta_lf_probs(AV1_COMMON *cm, aom_writer *w,
683 FRAME_COUNTS *counts) {
684 int k;
685#if CONFIG_TILE_GROUPS
686 const int probwt = cm->num_tg;
687#else
688 const int probwt = 1;
689#endif
690 if (!cm->delta_lf_present_flag) return;
691 for (k = 0; k < DELTA_LF_PROBS; ++k) {
692 av1_cond_prob_diff_update(w, &cm->fc->delta_lf_prob[k], counts->delta_lf[k],
693 probwt);
694 }
695}
696#endif // CONFIG_EC_ADAPT
697#endif // CONFIG_EXT_DELTA_Q
Thomas Daviesd6ee8a82017-03-02 14:42:50 +0000698#endif // CONFIG_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +0200699
Yaowu Xuf883b422016-08-30 14:01:10 -0700700static void update_skip_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700701 FRAME_COUNTS *counts) {
702 int k;
Thomas Davies80188d12016-10-26 16:08:35 -0700703#if CONFIG_TILE_GROUPS
704 const int probwt = cm->num_tg;
705#else
706 const int probwt = 1;
707#endif
708 for (k = 0; k < SKIP_CONTEXTS; ++k) {
709 av1_cond_prob_diff_update(w, &cm->fc->skip_probs[k], counts->skip[k],
710 probwt);
711 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700712}
713
Nathan E. Eggebaaaa162016-10-24 09:50:52 -0400714#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -0700715static void update_switchable_interp_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700716 FRAME_COUNTS *counts) {
717 int j;
Nathan E. Egge4947c292016-04-26 11:37:06 -0400718 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) {
Thomas Davies80188d12016-10-26 16:08:35 -0700719#if CONFIG_TILE_GROUPS
720 const int probwt = cm->num_tg;
721#else
722 const int probwt = 1;
723#endif
724 prob_diff_update(
725 av1_switchable_interp_tree, cm->fc->switchable_interp_prob[j],
726 counts->switchable_interp[j], SWITCHABLE_FILTERS, probwt, w);
Nathan E. Egge4947c292016-04-26 11:37:06 -0400727 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700728}
Thomas Davies6519beb2016-10-19 14:46:07 +0100729#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700730
Sarah Parkere2e591c2017-03-03 15:21:19 -0800731#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -0700732#if CONFIG_EXT_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700733static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) {
734 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
735 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700736 int i, j;
737 int s;
Thomas Davies80188d12016-10-26 16:08:35 -0700738#if CONFIG_TILE_GROUPS
739 const int probwt = cm->num_tg;
740#else
741 const int probwt = 1;
742#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700743 for (s = 1; s < EXT_TX_SETS_INTER; ++s) {
744 int savings = 0;
745 int do_update = 0;
746 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
747 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
748 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700749 av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i],
Debargha Mukherjee08542b92017-02-21 01:08:14 -0800750 cm->counts.inter_ext_tx[s][i],
751 num_ext_tx_set[ext_tx_set_type_inter[s]], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700752 }
753 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700754 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700755 if (do_update) {
756 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
757 if (!use_inter_ext_tx_for_txsize[s][i]) continue;
Debargha Mukherjee08542b92017-02-21 01:08:14 -0800758 prob_diff_update(av1_ext_tx_inter_tree[s],
759 cm->fc->inter_ext_tx_prob[s][i],
760 cm->counts.inter_ext_tx[s][i],
761 num_ext_tx_set[ext_tx_set_type_inter[s]], probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700762 }
763 }
764 }
765
766 for (s = 1; s < EXT_TX_SETS_INTRA; ++s) {
767 int savings = 0;
768 int do_update = 0;
769 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
770 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
771 for (j = 0; j < INTRA_MODES; ++j)
772 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700773 av1_ext_tx_intra_tree[s], cm->fc->intra_ext_tx_prob[s][i][j],
Debargha Mukherjee08542b92017-02-21 01:08:14 -0800774 cm->counts.intra_ext_tx[s][i][j],
775 num_ext_tx_set[ext_tx_set_type_intra[s]], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700776 }
777 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700778 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700779 if (do_update) {
780 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
781 if (!use_intra_ext_tx_for_txsize[s][i]) continue;
782 for (j = 0; j < INTRA_MODES; ++j)
Thomas Davies80188d12016-10-26 16:08:35 -0700783 prob_diff_update(av1_ext_tx_intra_tree[s],
784 cm->fc->intra_ext_tx_prob[s][i][j],
785 cm->counts.intra_ext_tx[s][i][j],
Debargha Mukherjee08542b92017-02-21 01:08:14 -0800786 num_ext_tx_set[ext_tx_set_type_intra[s]], probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700787 }
788 }
789 }
790}
791
792#else
Yaowu Xuf883b422016-08-30 14:01:10 -0700793static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) {
794 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
795 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700796 int i, j;
797
798 int savings = 0;
799 int do_update = 0;
Thomas Davies80188d12016-10-26 16:08:35 -0700800#if CONFIG_TILE_GROUPS
801 const int probwt = cm->num_tg;
802#else
803 const int probwt = 1;
804#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700805 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
806 for (j = 0; j < TX_TYPES; ++j)
807 savings += prob_diff_update_savings(
Yaowu Xuf883b422016-08-30 14:01:10 -0700808 av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700809 cm->counts.intra_ext_tx[i][j], TX_TYPES, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700810 }
811 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700812 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700813 if (do_update) {
814 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400815 for (j = 0; j < TX_TYPES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700816 prob_diff_update(av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700817 cm->counts.intra_ext_tx[i][j], TX_TYPES, probwt, w);
Nathan E. Egge7c5b4c12016-04-26 12:31:14 -0400818 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700819 }
820 }
Thomas Davies6519beb2016-10-19 14:46:07 +0100821
Yaowu Xuc27fc142016-08-22 16:08:15 -0700822 savings = 0;
823 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
824 savings +=
Yaowu Xuf883b422016-08-30 14:01:10 -0700825 prob_diff_update_savings(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700826 cm->counts.inter_ext_tx[i], TX_TYPES, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700827 }
828 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700829 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700830 if (do_update) {
831 for (i = TX_4X4; i < EXT_TX_SIZES; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700832 prob_diff_update(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -0700833 cm->counts.inter_ext_tx[i], TX_TYPES, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700834 }
835 }
836}
837#endif // CONFIG_EXT_TX
Sarah Parkere2e591c2017-03-03 15:21:19 -0800838#endif // !CONFIG_EC_ADAPT
Urvang Joshib100db72016-10-12 16:28:56 -0700839#if CONFIG_PALETTE
Yaowu Xuf883b422016-08-30 14:01:10 -0700840static void pack_palette_tokens(aom_writer *w, const TOKENEXTRA **tp, int n,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700841 int num) {
842 int i;
843 const TOKENEXTRA *p = *tp;
844
845 for (i = 0; i < num; ++i) {
Alex Converse92109812017-02-22 10:21:40 -0800846 av1_write_token(
847 w, av1_palette_color_index_tree[n - PALETTE_MIN_SIZE], p->context_tree,
848 &palette_color_index_encodings[n - PALETTE_MIN_SIZE][p->token]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700849 ++p;
850 }
851
852 *tp = p;
853}
Urvang Joshib100db72016-10-12 16:28:56 -0700854#endif // CONFIG_PALETTE
Yushin Cho258a0242017-03-06 13:53:01 -0800855
Yushin Cho77bba8d2016-11-04 16:36:56 -0700856#if !CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -0700857#if CONFIG_SUPERTX
Thomas Davies80188d12016-10-26 16:08:35 -0700858static void update_supertx_probs(AV1_COMMON *cm, int probwt, aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700859 const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) -
860 av1_cost_zero(GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700861 int i, j;
862 int savings = 0;
863 int do_update = 0;
864 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
Jingning Han5b7706a2016-12-21 09:55:10 -0800865 for (j = TX_8X8; j < TX_SIZES; ++j) {
Thomas Davies80188d12016-10-26 16:08:35 -0700866 savings += av1_cond_prob_diff_update_savings(
867 &cm->fc->supertx_prob[i][j], cm->counts.supertx[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700868 }
869 }
870 do_update = savings > savings_thresh;
Yaowu Xuf883b422016-08-30 14:01:10 -0700871 aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700872 if (do_update) {
873 for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) {
Jingning Han5b7706a2016-12-21 09:55:10 -0800874 for (j = TX_8X8; j < TX_SIZES; ++j) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700875 av1_cond_prob_diff_update(w, &cm->fc->supertx_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -0700876 cm->counts.supertx[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700877 }
878 }
879 }
880}
881#endif // CONFIG_SUPERTX
882
Thomas Davies9b8393f2017-03-21 11:04:08 +0000883#if CONFIG_NEW_MULTISYMBOL
884static INLINE void write_coeff_extra(const aom_cdf_prob *const *cdf, int val,
885 int n, aom_writer *w) {
886 // Code the extra bits from LSB to MSB in groups of 4
887 int i = 0;
888 int count = 0;
889 while (count < n) {
890 const int size = AOMMIN(n - count, 4);
891 const int mask = (1 << size) - 1;
892 aom_write_cdf(w, val & mask, cdf[i++], 1 << size);
893 val >>= size;
894 count += size;
895 }
896}
897#else
898static INLINE void write_coeff_extra(const aom_prob *pb, int value,
899 int num_bits, int skip_bits, aom_writer *w,
900 TOKEN_STATS *token_stats) {
901 // Code the extra bits from MSB to LSB 1 bit at a time
902 int index;
903 for (index = skip_bits; index < num_bits; ++index) {
904 const int shift = num_bits - index - 1;
905 const int bb = (value >> shift) & 1;
906 aom_write_record(w, bb, pb[index], token_stats);
907 }
908}
909#endif
910
Thomas Davieseb649722017-03-31 16:12:57 +0100911#if CONFIG_NEW_TOKENSET && !CONFIG_LV_MAP
Yaowu Xuf883b422016-08-30 14:01:10 -0700912static void pack_mb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700913 const TOKENEXTRA *const stop,
Angie Chiangd4022822016-11-02 18:30:25 -0700914 aom_bit_depth_t bit_depth, const TX_SIZE tx_size,
915 TOKEN_STATS *token_stats) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700916 const TOKENEXTRA *p = *tp;
Fangwen Fub3be9262017-03-06 15:34:28 -0800917#if CONFIG_VAR_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -0700918 int count = 0;
Jingning Han7e992972016-10-31 11:03:06 -0700919 const int seg_eob = tx_size_2d[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700920#endif
921
922 while (p < stop && p->token != EOSB_TOKEN) {
Urvang Joshi454280d2016-10-14 16:51:44 -0700923 const int token = p->token;
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000924 if (token == BLOCK_Z_TOKEN) {
Thomas Davies1c05c632017-03-15 09:58:34 +0000925 aom_write_symbol(w, 0, *p->head_cdf, HEAD_TOKENS + 1);
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000926 p++;
927 continue;
928 }
Yaowu Xuc8ab0bc2017-04-11 21:53:22 -0700929
930 const av1_extra_bit *const extra_bits = &av1_extra_bits[token];
Thomas Davies04bdd522017-03-13 22:34:14 +0000931 if (p->eob_val == LAST_EOB) {
932 // Just code a flag indicating whether the value is >1 or 1.
933 aom_write_bit(w, token != ONE_TOKEN);
934 } else {
Thomas Davies1c05c632017-03-15 09:58:34 +0000935 int comb_symb = 2 * AOMMIN(token, TWO_TOKEN) - p->eob_val + p->first_val;
936 aom_write_symbol(w, comb_symb, *p->head_cdf, HEAD_TOKENS + p->first_val);
Thomas Davies04bdd522017-03-13 22:34:14 +0000937 }
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000938 if (token > ONE_TOKEN) {
Thomas Davies1c05c632017-03-15 09:58:34 +0000939 aom_write_symbol(w, token - TWO_TOKEN, *p->tail_cdf, TAIL_TOKENS);
Alex Conversedc62b092016-10-11 16:50:56 -0700940 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700941
Alex Conversed8fdfaa2016-07-26 16:27:51 -0700942 if (extra_bits->base_val) {
943 const int bit_string = p->extra;
944 const int bit_string_length = extra_bits->len; // Length of extra bits to
Thomas Davies9b8393f2017-03-21 11:04:08 +0000945 const int is_cat6 = (extra_bits->base_val == CAT6_MIN_VAL);
Thomas Daviesfc1598a2017-01-13 17:07:25 +0000946 // be written excluding
947 // the sign bit.
Thomas Davies9b8393f2017-03-21 11:04:08 +0000948 int skip_bits = is_cat6
Alex Converseda3d94f2017-03-15 14:54:29 -0700949 ? (int)sizeof(av1_cat6_prob) -
950 av1_get_cat6_extrabits_size(tx_size, bit_depth)
951 : 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700952
Thomas Davies9b8393f2017-03-21 11:04:08 +0000953 assert(!(bit_string >> (bit_string_length - skip_bits + 1)));
954 if (bit_string_length > 0)
955#if CONFIG_NEW_MULTISYMBOL
956 write_coeff_extra(extra_bits->cdf, bit_string >> 1,
957 bit_string_length - skip_bits, w);
958#else
959 write_coeff_extra(extra_bits->prob, bit_string >> 1, bit_string_length,
960 skip_bits, w, token_stats);
961#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700962
Angie Chiangd4022822016-11-02 18:30:25 -0700963 aom_write_bit_record(w, bit_string & 1, token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700964 }
965 ++p;
966
Fangwen Fub3be9262017-03-06 15:34:28 -0800967#if CONFIG_VAR_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -0700968 ++count;
Urvang Joshi454280d2016-10-14 16:51:44 -0700969 if (token == EOB_TOKEN || count == seg_eob) break;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700970#endif
971 }
972
973 *tp = p;
974}
Angie Chiangc8af6112017-03-16 16:11:22 -0700975#else // CONFIG_NEW_TOKENSET
976#if !CONFIG_LV_MAP
Thomas Daviesf6390322017-01-13 14:31:43 +0000977static void pack_mb_tokens(aom_writer *w, const TOKENEXTRA **tp,
978 const TOKENEXTRA *const stop,
979 aom_bit_depth_t bit_depth, const TX_SIZE tx_size,
980 TOKEN_STATS *token_stats) {
981 const TOKENEXTRA *p = *tp;
Fangwen Fub3be9262017-03-06 15:34:28 -0800982#if CONFIG_VAR_TX
Thomas Daviesf6390322017-01-13 14:31:43 +0000983 int count = 0;
984 const int seg_eob = tx_size_2d[tx_size];
985#endif
Thomas Daviesf6390322017-01-13 14:31:43 +0000986
987 while (p < stop && p->token != EOSB_TOKEN) {
988 const int token = p->token;
Alex Conversea9598cd2017-02-03 14:18:05 -0800989#if !CONFIG_EC_MULTISYMBOL
Thomas Daviesf6390322017-01-13 14:31:43 +0000990 const struct av1_token *const coef_encoding = &av1_coef_encodings[token];
991 int coef_value = coef_encoding->value;
992 int coef_length = coef_encoding->len;
Alex Conversea9598cd2017-02-03 14:18:05 -0800993#endif // !CONFIG_EC_MULTISYMBOL
Alex Conversee063e2d2017-03-15 12:35:24 -0700994 const av1_extra_bit *const extra_bits = &av1_extra_bits[token];
Thomas Daviesf6390322017-01-13 14:31:43 +0000995
Alex Conversea9598cd2017-02-03 14:18:05 -0800996#if CONFIG_EC_MULTISYMBOL
997 /* skip one or two nodes */
998 if (!p->skip_eob_node)
999 aom_write_record(w, token != EOB_TOKEN, p->context_tree[0], token_stats);
1000 if (token != EOB_TOKEN) {
1001 aom_write_record(w, token != ZERO_TOKEN, p->context_tree[1], token_stats);
1002 if (token != ZERO_TOKEN) {
1003 aom_write_symbol(w, token - ONE_TOKEN, *p->token_cdf,
1004 CATEGORY6_TOKEN - ONE_TOKEN + 1);
1005 }
1006 }
1007#else
Thomas Daviesf6390322017-01-13 14:31:43 +00001008 /* skip one or two nodes */
1009 if (p->skip_eob_node)
1010 coef_length -= p->skip_eob_node;
1011 else
1012 aom_write_record(w, token != EOB_TOKEN, p->context_tree[0], token_stats);
1013
1014 if (token != EOB_TOKEN) {
1015 aom_write_record(w, token != ZERO_TOKEN, p->context_tree[1], token_stats);
1016
1017 if (token != ZERO_TOKEN) {
1018 aom_write_record(w, token != ONE_TOKEN, p->context_tree[2],
1019 token_stats);
1020
1021 if (token != ONE_TOKEN) {
1022 const int unconstrained_len = UNCONSTRAINED_NODES - p->skip_eob_node;
1023 aom_write_tree_record(
1024 w, av1_coef_con_tree,
1025 av1_pareto8_full[p->context_tree[PIVOT_NODE] - 1], coef_value,
1026 coef_length - unconstrained_len, 0, token_stats);
1027 }
1028 }
1029 }
Alex Conversea9598cd2017-02-03 14:18:05 -08001030#endif // CONFIG_EC_MULTISYMBOL
Thomas Daviesf6390322017-01-13 14:31:43 +00001031
1032 if (extra_bits->base_val) {
1033 const int bit_string = p->extra;
1034 const int bit_string_length = extra_bits->len; // Length of extra bits to
Thomas Davies9b8393f2017-03-21 11:04:08 +00001035 // be written excluding
1036 // the sign bit.
Alex Converseda3d94f2017-03-15 14:54:29 -07001037 int skip_bits = (extra_bits->base_val == CAT6_MIN_VAL)
1038 ? (int)sizeof(av1_cat6_prob) -
1039 av1_get_cat6_extrabits_size(tx_size, bit_depth)
1040 : 0;
Thomas Davies9b8393f2017-03-21 11:04:08 +00001041
1042 assert(!(bit_string >> (bit_string_length - skip_bits + 1)));
Thomas Daviesf6390322017-01-13 14:31:43 +00001043 if (bit_string_length > 0) {
Thomas Davies9b8393f2017-03-21 11:04:08 +00001044#if CONFIG_NEW_MULTISYMBOL
1045 skip_bits &= ~3;
1046 write_coeff_extra(extra_bits->cdf, bit_string >> 1,
1047 bit_string_length - skip_bits, w);
1048#else
1049 write_coeff_extra(extra_bits->prob, bit_string >> 1, bit_string_length,
1050 skip_bits, w, token_stats);
1051#endif
Thomas Daviesf6390322017-01-13 14:31:43 +00001052 }
Thomas Daviesf6390322017-01-13 14:31:43 +00001053 aom_write_bit_record(w, bit_string & 1, token_stats);
1054 }
1055 ++p;
1056
Fangwen Fub3be9262017-03-06 15:34:28 -08001057#if CONFIG_VAR_TX
Thomas Daviesf6390322017-01-13 14:31:43 +00001058 ++count;
1059 if (token == EOB_TOKEN || count == seg_eob) break;
1060#endif
1061 }
1062
1063 *tp = p;
1064}
Angie Chiangc8af6112017-03-16 16:11:22 -07001065#endif // !CONFIG_LV_MAP
1066#endif // CONFIG_NEW_TOKENSET
Yushin Cho258a0242017-03-06 13:53:01 -08001067#else // !CONFIG_PVQ
1068static PVQ_INFO *get_pvq_block(PVQ_QUEUE *pvq_q) {
1069 PVQ_INFO *pvq;
1070
1071 assert(pvq_q->curr_pos <= pvq_q->last_pos);
1072 assert(pvq_q->curr_pos < pvq_q->buf_len);
1073
1074 pvq = pvq_q->buf + pvq_q->curr_pos;
1075 ++pvq_q->curr_pos;
1076
1077 return pvq;
1078}
1079
1080static void pack_pvq_tokens(aom_writer *w, MACROBLOCK *const x,
1081 MACROBLOCKD *const xd, int plane, BLOCK_SIZE bsize,
1082 const TX_SIZE tx_size) {
1083 PVQ_INFO *pvq;
1084 int idx, idy;
1085 const struct macroblockd_plane *const pd = &xd->plane[plane];
1086 od_adapt_ctx *adapt;
1087 int max_blocks_wide;
1088 int max_blocks_high;
1089 int step = (1 << tx_size);
1090 const BLOCK_SIZE plane_bsize =
1091 get_plane_block_size(AOMMAX(bsize, BLOCK_8X8), pd);
1092
Yushin Choc49ef3a2017-03-13 17:27:25 -07001093 adapt = x->daala_enc.state.adapt;
Yushin Cho258a0242017-03-06 13:53:01 -08001094
1095 max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
1096 max_blocks_high = max_block_high(xd, plane_bsize, plane);
1097
1098 for (idy = 0; idy < max_blocks_high; idy += step) {
1099 for (idx = 0; idx < max_blocks_wide; idx += step) {
1100 const int is_keyframe = 0;
1101 const int encode_flip = 0;
1102 const int flip = 0;
Yushin Cho258a0242017-03-06 13:53:01 -08001103 int i;
1104 const int has_dc_skip = 1;
1105 int *exg = &adapt->pvq.pvq_exg[plane][tx_size][0];
1106 int *ext = adapt->pvq.pvq_ext + tx_size * PVQ_MAX_PARTITIONS;
1107 generic_encoder *model = adapt->pvq.pvq_param_model;
1108
1109 pvq = get_pvq_block(x->pvq_q);
1110
1111 // encode block skip info
Yushin Cho00779272017-02-21 10:38:16 -08001112 aom_write_symbol(w, pvq->ac_dc_coded,
1113 adapt->skip_cdf[2 * tx_size + (plane != 0)], 4);
Yushin Cho258a0242017-03-06 13:53:01 -08001114
1115 // AC coeffs coded?
1116 if (pvq->ac_dc_coded & AC_CODED) {
1117 assert(pvq->bs == tx_size);
1118 for (i = 0; i < pvq->nb_bands; i++) {
1119 if (i == 0 ||
1120 (!pvq->skip_rest && !(pvq->skip_dir & (1 << ((i - 1) % 3))))) {
1121 pvq_encode_partition(
Timothy B. Terriberry44bb6d02017-04-07 15:44:14 -07001122 w, pvq->qg[i], pvq->theta[i], pvq->y + pvq->off[i],
1123 pvq->size[i], pvq->k[i], model, adapt, exg + i, ext + i,
Yushin Cho258a0242017-03-06 13:53:01 -08001124 (plane != 0) * OD_TXSIZES * PVQ_MAX_PARTITIONS +
1125 pvq->bs * PVQ_MAX_PARTITIONS + i,
1126 is_keyframe, i == 0 && (i < pvq->nb_bands - 1), pvq->skip_rest,
1127 encode_flip, flip);
1128 }
1129 if (i == 0 && !pvq->skip_rest && pvq->bs > 0) {
Yushin Choac4535c2017-02-21 14:59:46 -08001130 aom_write_symbol(
Yushin Cho258a0242017-03-06 13:53:01 -08001131 w, pvq->skip_dir,
1132 &adapt->pvq
1133 .pvq_skip_dir_cdf[(plane != 0) + 2 * (pvq->bs - 1)][0],
Yushin Choac4535c2017-02-21 14:59:46 -08001134 7);
Yushin Cho258a0242017-03-06 13:53:01 -08001135 }
1136 }
1137 }
1138 // Encode residue of DC coeff, if exist.
1139 if (!has_dc_skip || (pvq->ac_dc_coded & DC_CODED)) {
1140 generic_encode(w, &adapt->model_dc[plane],
Timothy B. Terriberry44bb6d02017-04-07 15:44:14 -07001141 abs(pvq->dq_dc_residue) - has_dc_skip,
Yushin Cho258a0242017-03-06 13:53:01 -08001142 &adapt->ex_dc[plane][pvq->bs][0], 2);
1143 }
1144 if ((pvq->ac_dc_coded & DC_CODED)) {
1145 aom_write_bit(w, pvq->dq_dc_residue < 0);
1146 }
1147 }
1148 } // for (idy = 0;
1149}
Yushin Cho77bba8d2016-11-04 16:36:56 -07001150#endif // !CONFIG_PVG
Yushin Cho258a0242017-03-06 13:53:01 -08001151
1152#if CONFIG_VAR_TX && !CONFIG_COEF_INTERLEAVE
Yaowu Xuf883b422016-08-30 14:01:10 -07001153static void pack_txb_tokens(aom_writer *w, const TOKENEXTRA **tp,
Yushin Cho258a0242017-03-06 13:53:01 -08001154 const TOKENEXTRA *const tok_end,
1155#if CONFIG_PVQ
1156 MACROBLOCK *const x,
1157#endif
1158 MACROBLOCKD *xd, MB_MODE_INFO *mbmi, int plane,
Yaowu Xuf883b422016-08-30 14:01:10 -07001159 BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001160 int block, int blk_row, int blk_col,
Angie Chiangd4022822016-11-02 18:30:25 -07001161 TX_SIZE tx_size, TOKEN_STATS *token_stats) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001162 const struct macroblockd_plane *const pd = &xd->plane[plane];
1163 const BLOCK_SIZE bsize = txsize_to_bsize[tx_size];
1164 const int tx_row = blk_row >> (1 - pd->subsampling_y);
1165 const int tx_col = blk_col >> (1 - pd->subsampling_x);
1166 TX_SIZE plane_tx_size;
Jingning Hanf65b8702016-10-31 12:13:20 -07001167 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
1168 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001169
1170 if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return;
1171
Debargha Mukherjee2f123402016-08-30 17:43:38 -07001172 plane_tx_size =
1173 plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0]
1174 : mbmi->inter_tx_size[tx_row][tx_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001175
1176 if (tx_size == plane_tx_size) {
Angie Chiangd02001d2016-11-06 15:31:49 -08001177 TOKEN_STATS tmp_token_stats;
1178 init_token_stats(&tmp_token_stats);
Yushin Cho258a0242017-03-06 13:53:01 -08001179#if !CONFIG_PVQ
Angie Chiangd02001d2016-11-06 15:31:49 -08001180 pack_mb_tokens(w, tp, tok_end, bit_depth, tx_size, &tmp_token_stats);
Yushin Cho258a0242017-03-06 13:53:01 -08001181#else
1182 pack_pvq_tokens(w, x, xd, plane, bsize, tx_size);
1183#endif
Angie Chiangd02001d2016-11-06 15:31:49 -08001184#if CONFIG_RD_DEBUG
1185 token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost;
1186 token_stats->cost += tmp_token_stats.cost;
1187#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001188 } else {
Jingning Han1807fdc2016-11-08 15:17:58 -08001189 const TX_SIZE sub_txs = sub_tx_size_map[tx_size];
1190 const int bsl = tx_size_wide_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001191 int i;
1192
1193 assert(bsl > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001194
1195 for (i = 0; i < 4; ++i) {
Jingning Han42a0fb32016-10-31 10:43:31 -07001196 const int offsetr = blk_row + (i >> 1) * bsl;
1197 const int offsetc = blk_col + (i & 0x01) * bsl;
Jingning Han42a0fb32016-10-31 10:43:31 -07001198 const int step = tx_size_wide_unit[sub_txs] * tx_size_high_unit[sub_txs];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001199
1200 if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue;
1201
Yushin Cho258a0242017-03-06 13:53:01 -08001202 pack_txb_tokens(w, tp, tok_end,
1203#if CONFIG_PVQ
1204 x,
1205#endif
1206 xd, mbmi, plane, plane_bsize, bit_depth, block, offsetr,
1207 offsetc, sub_txs, token_stats);
Jingning Han98d6a1f2016-11-03 12:47:47 -07001208 block += step;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001209 }
1210 }
1211}
1212#endif
1213
Yaowu Xuf883b422016-08-30 14:01:10 -07001214static void write_segment_id(aom_writer *w, const struct segmentation *seg,
Thomas9ac55082016-09-23 18:04:17 +01001215 struct segmentation_probs *segp, int segment_id) {
Nathan E. Eggef627e582016-08-19 20:06:51 -04001216 if (seg->enabled && seg->update_map) {
Nathan E. Egge31296062016-11-16 09:44:26 -05001217#if CONFIG_EC_MULTISYMBOL
Nathan E. Eggef627e582016-08-19 20:06:51 -04001218 aom_write_symbol(w, segment_id, segp->tree_cdf, MAX_SEGMENTS);
1219#else
Nathan E. Eggeeeedc632016-06-19 12:02:33 -04001220 aom_write_tree(w, av1_segment_tree, segp->tree_probs, segment_id, 3, 0);
Nathan E. Eggef627e582016-08-19 20:06:51 -04001221#endif
1222 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001223}
1224
1225// This function encodes the reference frame
Yaowu Xuf883b422016-08-30 14:01:10 -07001226static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1227 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001228 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1229 const int is_compound = has_second_ref(mbmi);
1230 const int segment_id = mbmi->segment_id;
1231
1232 // If segment level coding of this signal is disabled...
1233 // or the segment allows multiple reference frame options
1234 if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
1235 assert(!is_compound);
1236 assert(mbmi->ref_frame[0] ==
1237 get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
1238 } else {
1239 // does the feature use compound prediction or not
1240 // (if not specified at the frame/segment level)
1241 if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Jingning Hanc41a5492017-02-24 11:18:52 -08001242#if SUB8X8_COMP_REF
Yaowu Xuf883b422016-08-30 14:01:10 -07001243 aom_write(w, is_compound, av1_get_reference_mode_prob(cm, xd));
Jingning Hanc41a5492017-02-24 11:18:52 -08001244#else
1245 if (mbmi->sb_type >= BLOCK_8X8)
1246 aom_write(w, is_compound, av1_get_reference_mode_prob(cm, xd));
1247#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001248 } else {
1249 assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE));
1250 }
1251
1252 if (is_compound) {
1253#if CONFIG_EXT_REFS
1254 const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME ||
1255 mbmi->ref_frame[0] == LAST3_FRAME);
1256 const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME;
1257#else // CONFIG_EXT_REFS
1258 const int bit = mbmi->ref_frame[0] == GOLDEN_FRAME;
1259#endif // CONFIG_EXT_REFS
1260
Yaowu Xuf883b422016-08-30 14:01:10 -07001261 aom_write(w, bit, av1_get_pred_prob_comp_ref_p(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001262
1263#if CONFIG_EXT_REFS
1264 if (!bit) {
1265 const int bit1 = mbmi->ref_frame[0] == LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -07001266 aom_write(w, bit1, av1_get_pred_prob_comp_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001267 } else {
1268 const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -07001269 aom_write(w, bit2, av1_get_pred_prob_comp_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001270 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001271 aom_write(w, bit_bwd, av1_get_pred_prob_comp_bwdref_p(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001272#endif // CONFIG_EXT_REFS
1273 } else {
1274#if CONFIG_EXT_REFS
1275 const int bit0 = (mbmi->ref_frame[0] == ALTREF_FRAME ||
1276 mbmi->ref_frame[0] == BWDREF_FRAME);
Yaowu Xuf883b422016-08-30 14:01:10 -07001277 aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001278
1279 if (bit0) {
1280 const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -07001281 aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001282 } else {
1283 const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME ||
1284 mbmi->ref_frame[0] == GOLDEN_FRAME);
Yaowu Xuf883b422016-08-30 14:01:10 -07001285 aom_write(w, bit2, av1_get_pred_prob_single_ref_p3(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001286
1287 if (!bit2) {
1288 const int bit3 = mbmi->ref_frame[0] != LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -07001289 aom_write(w, bit3, av1_get_pred_prob_single_ref_p4(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001290 } else {
1291 const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -07001292 aom_write(w, bit4, av1_get_pred_prob_single_ref_p5(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001293 }
1294 }
1295#else // CONFIG_EXT_REFS
1296 const int bit0 = mbmi->ref_frame[0] != LAST_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -07001297 aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001298
1299 if (bit0) {
1300 const int bit1 = mbmi->ref_frame[0] != GOLDEN_FRAME;
Yaowu Xuf883b422016-08-30 14:01:10 -07001301 aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001302 }
1303#endif // CONFIG_EXT_REFS
1304 }
1305 }
1306}
1307
hui su5db97432016-10-14 16:10:14 -07001308#if CONFIG_FILTER_INTRA
1309static void write_filter_intra_mode_info(const AV1_COMMON *const cm,
1310 const MB_MODE_INFO *const mbmi,
1311 aom_writer *w) {
Urvang Joshib100db72016-10-12 16:28:56 -07001312 if (mbmi->mode == DC_PRED
1313#if CONFIG_PALETTE
1314 && mbmi->palette_mode_info.palette_size[0] == 0
1315#endif // CONFIG_PALETTE
1316 ) {
hui su5db97432016-10-14 16:10:14 -07001317 aom_write(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[0],
1318 cm->fc->filter_intra_probs[0]);
1319 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[0]) {
1320 const FILTER_INTRA_MODE mode =
1321 mbmi->filter_intra_mode_info.filter_intra_mode[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001322 write_uniform(w, FILTER_INTRA_MODES, mode);
1323 }
1324 }
1325
Urvang Joshib100db72016-10-12 16:28:56 -07001326 if (mbmi->uv_mode == DC_PRED
1327#if CONFIG_PALETTE
1328 && mbmi->palette_mode_info.palette_size[1] == 0
1329#endif // CONFIG_PALETTE
1330 ) {
hui su5db97432016-10-14 16:10:14 -07001331 aom_write(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[1],
1332 cm->fc->filter_intra_probs[1]);
1333 if (mbmi->filter_intra_mode_info.use_filter_intra_mode[1]) {
1334 const FILTER_INTRA_MODE mode =
1335 mbmi->filter_intra_mode_info.filter_intra_mode[1];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001336 write_uniform(w, FILTER_INTRA_MODES, mode);
1337 }
1338 }
1339}
hui su5db97432016-10-14 16:10:14 -07001340#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001341
hui su5db97432016-10-14 16:10:14 -07001342#if CONFIG_EXT_INTRA
hui sub4e25d22017-03-09 15:32:30 -08001343static void write_intra_angle_info(const MACROBLOCKD *xd,
1344 FRAME_CONTEXT *const ec_ctx, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001345 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
1346 const BLOCK_SIZE bsize = mbmi->sb_type;
hui sueda3d762016-12-06 16:58:23 -08001347#if CONFIG_INTRA_INTERP
Yaowu Xuf883b422016-08-30 14:01:10 -07001348 const int intra_filter_ctx = av1_get_pred_context_intra_interp(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001349 int p_angle;
hui sueda3d762016-12-06 16:58:23 -08001350#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001351
hui sub4e25d22017-03-09 15:32:30 -08001352 (void)ec_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001353 if (bsize < BLOCK_8X8) return;
1354
hui su45dc5972016-12-08 17:42:50 -08001355 if (av1_is_directional_mode(mbmi->mode, bsize)) {
hui su0a6731f2017-04-26 15:23:47 -07001356 write_uniform(w, 2 * MAX_ANGLE_DELTA + 1,
1357 MAX_ANGLE_DELTA + mbmi->angle_delta[0]);
hui sueda3d762016-12-06 16:58:23 -08001358#if CONFIG_INTRA_INTERP
hui su0a6731f2017-04-26 15:23:47 -07001359 p_angle = mode_to_angle_map[mbmi->mode] + mbmi->angle_delta[0] * ANGLE_STEP;
Yaowu Xuf883b422016-08-30 14:01:10 -07001360 if (av1_is_intra_filter_switchable(p_angle)) {
hui sub4e25d22017-03-09 15:32:30 -08001361#if CONFIG_EC_MULTISYMBOL
1362 aom_write_symbol(w, mbmi->intra_filter,
1363 ec_ctx->intra_filter_cdf[intra_filter_ctx],
1364 INTRA_FILTERS);
1365#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001366 av1_write_token(w, av1_intra_filter_tree,
hui sub4e25d22017-03-09 15:32:30 -08001367 ec_ctx->intra_filter_probs[intra_filter_ctx],
Yaowu Xuf883b422016-08-30 14:01:10 -07001368 &intra_filter_encodings[mbmi->intra_filter]);
hui sub4e25d22017-03-09 15:32:30 -08001369#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07001370 }
hui sueda3d762016-12-06 16:58:23 -08001371#endif // CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07001372 }
1373
hui su45dc5972016-12-08 17:42:50 -08001374 if (av1_is_directional_mode(mbmi->uv_mode, bsize)) {
hui su0a6731f2017-04-26 15:23:47 -07001375 write_uniform(w, 2 * MAX_ANGLE_DELTA + 1,
1376 MAX_ANGLE_DELTA + mbmi->angle_delta[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001377 }
1378}
1379#endif // CONFIG_EXT_INTRA
1380
Angie Chiang5678ad92016-11-21 09:38:40 -08001381static void write_mb_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd,
1382 aom_writer *w) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001383 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001384 const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
Thomas Davies77c7c402017-01-11 17:58:54 +00001385#if CONFIG_EC_ADAPT
1386 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1387#else
1388 FRAME_CONTEXT *ec_ctx = cm->fc;
1389#endif
Jingning Han203b1d32017-01-12 16:00:13 -08001390
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07001391 if (!av1_is_interp_needed(xd)) {
1392#if CONFIG_DUAL_FILTER
1393 for (int i = 0; i < 4; ++i)
1394 assert(mbmi->interp_filter[i] == (cm->interp_filter == SWITCHABLE
1395 ? EIGHTTAP_REGULAR
1396 : cm->interp_filter));
1397#else
1398 assert(mbmi->interp_filter == (cm->interp_filter == SWITCHABLE
1399 ? EIGHTTAP_REGULAR
1400 : cm->interp_filter));
1401#endif // CONFIG_DUAL_FILTER
1402 return;
1403 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001404 if (cm->interp_filter == SWITCHABLE) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001405#if CONFIG_DUAL_FILTER
Jingning Han203b1d32017-01-12 16:00:13 -08001406 int dir;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001407 for (dir = 0; dir < 2; ++dir) {
1408 if (has_subpel_mv_component(xd->mi[0], xd, dir) ||
1409 (mbmi->ref_frame[1] > INTRA_FRAME &&
1410 has_subpel_mv_component(xd->mi[0], xd, dir + 2))) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001411 const int ctx = av1_get_pred_context_switchable_interp(xd, dir);
Angie Chiangb9b42a02017-01-20 12:47:36 -08001412#if CONFIG_EC_MULTISYMBOL
1413 aom_write_symbol(w, av1_switchable_interp_ind[mbmi->interp_filter[dir]],
1414 ec_ctx->switchable_interp_cdf[ctx],
1415 SWITCHABLE_FILTERS);
1416#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001417 av1_write_token(w, av1_switchable_interp_tree,
Thomas Davies77c7c402017-01-11 17:58:54 +00001418 ec_ctx->switchable_interp_prob[ctx],
Yaowu Xuf883b422016-08-30 14:01:10 -07001419 &switchable_interp_encodings[mbmi->interp_filter[dir]]);
Angie Chiangb9b42a02017-01-20 12:47:36 -08001420#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001421 ++cpi->interp_filter_selected[0][mbmi->interp_filter[dir]];
Angie Chiang38edf682017-02-21 15:13:09 -08001422 } else {
1423 assert(mbmi->interp_filter[dir] == EIGHTTAP_REGULAR);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001424 }
1425 }
1426#else
1427 {
Yaowu Xuf883b422016-08-30 14:01:10 -07001428 const int ctx = av1_get_pred_context_switchable_interp(xd);
Nathan E. Egge00b33312016-11-16 09:44:26 -05001429#if CONFIG_EC_MULTISYMBOL
Nathan E. Egge56eeaa52016-07-25 10:23:33 -04001430 aom_write_symbol(w, av1_switchable_interp_ind[mbmi->interp_filter],
Thomas Davies77c7c402017-01-11 17:58:54 +00001431 ec_ctx->switchable_interp_cdf[ctx], SWITCHABLE_FILTERS);
Nathan E. Egge4947c292016-04-26 11:37:06 -04001432#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001433 av1_write_token(w, av1_switchable_interp_tree,
Thomas Davies77c7c402017-01-11 17:58:54 +00001434 ec_ctx->switchable_interp_prob[ctx],
Yaowu Xuf883b422016-08-30 14:01:10 -07001435 &switchable_interp_encodings[mbmi->interp_filter]);
Nathan E. Egge4947c292016-04-26 11:37:06 -04001436#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001437 ++cpi->interp_filter_selected[0][mbmi->interp_filter];
1438 }
Jingning Han203b1d32017-01-12 16:00:13 -08001439#endif // CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07001440 }
1441}
1442
Urvang Joshib100db72016-10-12 16:28:56 -07001443#if CONFIG_PALETTE
hui sud13c24a2017-04-07 16:13:07 -07001444#if CONFIG_PALETTE_DELTA_ENCODING
1445// Write luma palette color values with delta encoding. Write the first value as
1446// literal, and the deltas between each value and the previous one. The luma
1447// palette is sorted so each delta is larger than 0.
1448static void write_palette_colors_y(const PALETTE_MODE_INFO *const pmi,
1449 int bit_depth, aom_writer *w) {
1450 const int n = pmi->palette_size[0];
1451 int min_bits, i;
1452 int bits = av1_get_palette_delta_bits_y(pmi, bit_depth, &min_bits);
1453 aom_write_literal(w, bits - min_bits, 2);
1454 aom_write_literal(w, pmi->palette_colors[0], bit_depth);
1455 for (i = 1; i < n; ++i) {
1456 aom_write_literal(
1457 w, pmi->palette_colors[i] - pmi->palette_colors[i - 1] - 1, bits);
1458 bits =
1459 AOMMIN(bits, av1_ceil_log2((1 << bit_depth) - pmi->palette_colors[i]));
1460 }
1461}
1462
1463// Write chroma palette color values. Use delta encoding for u channel as its
1464// palette is sorted. For v channel, either use delta encoding or transmit
1465// raw values directly, whichever costs less.
1466static void write_palette_colors_uv(const PALETTE_MODE_INFO *const pmi,
1467 int bit_depth, aom_writer *w) {
1468 int i;
1469 const int n = pmi->palette_size[1];
1470#if CONFIG_HIGHBITDEPTH
1471 const uint16_t *colors_u = pmi->palette_colors + PALETTE_MAX_SIZE;
1472 const uint16_t *colors_v = pmi->palette_colors + 2 * PALETTE_MAX_SIZE;
1473#else
1474 const uint8_t *colors_u = pmi->palette_colors + PALETTE_MAX_SIZE;
1475 const uint8_t *colors_v = pmi->palette_colors + 2 * PALETTE_MAX_SIZE;
1476#endif // CONFIG_HIGHBITDEPTH
1477 // U channel colors.
1478 int min_bits_u = 0;
1479 int bits_u = av1_get_palette_delta_bits_u(pmi, bit_depth, &min_bits_u);
1480 aom_write_literal(w, bits_u - min_bits_u, 2);
1481 aom_write_literal(w, colors_u[0], bit_depth);
1482 for (i = 1; i < n; ++i) {
1483 aom_write_literal(w, colors_u[i] - colors_u[i - 1], bits_u);
1484 bits_u = AOMMIN(bits_u, av1_ceil_log2(1 + (1 << bit_depth) - colors_u[i]));
1485 }
1486 // V channel colors.
1487 const int max_val = 1 << bit_depth;
1488 int zero_count = 0, min_bits_v = 0;
1489 int bits_v =
1490 av1_get_palette_delta_bits_v(pmi, bit_depth, &zero_count, &min_bits_v);
1491 const int rate_using_delta =
1492 2 + bit_depth + (bits_v + 1) * (n - 1) - zero_count;
1493 const int rate_using_raw = bit_depth * n;
1494 if (rate_using_delta < rate_using_raw) { // delta encoding
1495 aom_write_bit(w, 1);
1496 aom_write_literal(w, bits_v - min_bits_v, 2);
1497 aom_write_literal(w, colors_v[0], bit_depth);
1498 for (i = 1; i < n; ++i) {
1499 if (colors_v[i] == colors_v[i - 1]) { // No need to signal sign bit.
1500 aom_write_literal(w, 0, bits_v);
1501 continue;
1502 }
1503 const int delta = abs((int)colors_v[i] - colors_v[i - 1]);
1504 const int sign_bit = colors_v[i] < colors_v[i - 1];
1505 if (delta <= max_val - delta) {
1506 aom_write_literal(w, delta, bits_v);
1507 aom_write_bit(w, sign_bit);
1508 } else {
1509 aom_write_literal(w, max_val - delta, bits_v);
1510 aom_write_bit(w, !sign_bit);
1511 }
1512 }
1513 } else { // Transmit raw values.
1514 aom_write_bit(w, 0);
1515 for (i = 0; i < n; ++i) aom_write_literal(w, colors_v[i], bit_depth);
1516 }
1517}
1518#endif // CONFIG_PALETTE_DELTA_ENCODING
1519
Yaowu Xuf883b422016-08-30 14:01:10 -07001520static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd,
1521 const MODE_INFO *const mi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001522 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1523 const MODE_INFO *const above_mi = xd->above_mi;
1524 const MODE_INFO *const left_mi = xd->left_mi;
1525 const BLOCK_SIZE bsize = mbmi->sb_type;
1526 const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001527
1528 if (mbmi->mode == DC_PRED) {
Urvang Joshi23a61112017-01-30 14:59:27 -08001529 const int n = pmi->palette_size[0];
1530 int palette_y_mode_ctx = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001531 if (above_mi)
Urvang Joshi23a61112017-01-30 14:59:27 -08001532 palette_y_mode_ctx +=
1533 (above_mi->mbmi.palette_mode_info.palette_size[0] > 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001534 if (left_mi)
Urvang Joshi23a61112017-01-30 14:59:27 -08001535 palette_y_mode_ctx +=
1536 (left_mi->mbmi.palette_mode_info.palette_size[0] > 0);
clang-format55ce9e02017-02-15 22:27:12 -08001537 aom_write(
1538 w, n > 0,
1539 av1_default_palette_y_mode_prob[bsize - BLOCK_8X8][palette_y_mode_ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001540 if (n > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001541 av1_write_token(w, av1_palette_size_tree,
1542 av1_default_palette_y_size_prob[bsize - BLOCK_8X8],
Alex Converse92109812017-02-22 10:21:40 -08001543 &palette_size_encodings[n - PALETTE_MIN_SIZE]);
hui sud13c24a2017-04-07 16:13:07 -07001544#if CONFIG_PALETTE_DELTA_ENCODING
1545 write_palette_colors_y(pmi, cm->bit_depth, w);
1546#else
1547 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001548 for (i = 0; i < n; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07001549 aom_write_literal(w, pmi->palette_colors[i], cm->bit_depth);
hui sud13c24a2017-04-07 16:13:07 -07001550#endif // CONFIG_PALETTE_DELTA_ENCODING
Yaowu Xuc27fc142016-08-22 16:08:15 -07001551 write_uniform(w, n, pmi->palette_first_color_idx[0]);
1552 }
1553 }
1554
1555 if (mbmi->uv_mode == DC_PRED) {
Urvang Joshi23a61112017-01-30 14:59:27 -08001556 const int n = pmi->palette_size[1];
1557 const int palette_uv_mode_ctx = (pmi->palette_size[0] > 0);
1558 aom_write(w, n > 0, av1_default_palette_uv_mode_prob[palette_uv_mode_ctx]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001559 if (n > 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001560 av1_write_token(w, av1_palette_size_tree,
1561 av1_default_palette_uv_size_prob[bsize - BLOCK_8X8],
Alex Converse92109812017-02-22 10:21:40 -08001562 &palette_size_encodings[n - PALETTE_MIN_SIZE]);
hui sud13c24a2017-04-07 16:13:07 -07001563#if CONFIG_PALETTE_DELTA_ENCODING
1564 write_palette_colors_uv(pmi, cm->bit_depth, w);
1565#else
1566 int i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001567 for (i = 0; i < n; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07001568 aom_write_literal(w, pmi->palette_colors[PALETTE_MAX_SIZE + i],
1569 cm->bit_depth);
1570 aom_write_literal(w, pmi->palette_colors[2 * PALETTE_MAX_SIZE + i],
1571 cm->bit_depth);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001572 }
hui sud13c24a2017-04-07 16:13:07 -07001573#endif // CONFIG_PALETTE_DELTA_ENCODING
Yaowu Xuc27fc142016-08-22 16:08:15 -07001574 write_uniform(w, n, pmi->palette_first_color_idx[1]);
1575 }
1576 }
1577}
Urvang Joshib100db72016-10-12 16:28:56 -07001578#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001579
Angie Chiangc31ea682017-04-13 16:20:54 -07001580void av1_write_tx_type(const AV1_COMMON *const cm, const MACROBLOCKD *xd,
Jingning Han2a4da942016-11-03 18:31:30 -07001581#if CONFIG_SUPERTX
Angie Chiangc31ea682017-04-13 16:20:54 -07001582 const int supertx_enabled,
Jingning Han2a4da942016-11-03 18:31:30 -07001583#endif
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001584#if CONFIG_TXK_SEL
Angie Chiang39b06eb2017-04-14 09:52:29 -07001585 int block, int plane,
Angie Chiangc31ea682017-04-13 16:20:54 -07001586#endif
1587 aom_writer *w) {
1588 MB_MODE_INFO *mbmi = &xd->mi[0]->mbmi;
Jingning Han2a4da942016-11-03 18:31:30 -07001589 const int is_inter = is_inter_block(mbmi);
Jingning Hane67b38a2016-11-04 10:30:00 -07001590#if CONFIG_VAR_TX
1591 const TX_SIZE tx_size = is_inter ? mbmi->min_tx_size : mbmi->tx_size;
1592#else
Jingning Han641b1ad2016-11-04 09:58:36 -07001593 const TX_SIZE tx_size = mbmi->tx_size;
Urvang Joshifeb925f2016-12-05 10:37:29 -08001594#endif // CONFIG_VAR_TX
Thomas Daviescef09622017-01-11 17:27:12 +00001595#if CONFIG_EC_ADAPT
1596 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1597#else
1598 FRAME_CONTEXT *ec_ctx = cm->fc;
1599#endif
1600
Angie Chiangcd9b03f2017-04-16 13:37:13 -07001601#if !CONFIG_TXK_SEL
Angie Chiangc31ea682017-04-13 16:20:54 -07001602 TX_TYPE tx_type = mbmi->tx_type;
1603#else
1604 // Only y plane's tx_type is transmitted
Angie Chiang39b06eb2017-04-14 09:52:29 -07001605 if (plane > 0) return;
1606 PLANE_TYPE plane_type = get_plane_type(plane);
1607 TX_TYPE tx_type = get_tx_type(plane_type, xd, block, tx_size);
Angie Chiangc31ea682017-04-13 16:20:54 -07001608#endif
1609
Jingning Han2a4da942016-11-03 18:31:30 -07001610 if (!FIXED_TX_TYPE) {
1611#if CONFIG_EXT_TX
Urvang Joshifeb925f2016-12-05 10:37:29 -08001612 const TX_SIZE square_tx_size = txsize_sqr_map[tx_size];
Jingning Han2a4da942016-11-03 18:31:30 -07001613 const BLOCK_SIZE bsize = mbmi->sb_type;
Sarah Parkere68a3e42017-02-16 14:03:24 -08001614 if (get_ext_tx_types(tx_size, bsize, is_inter, cm->reduced_tx_set_used) >
1615 1 &&
Yue Cheneeacc4c2017-01-17 17:29:17 -08001616 ((!cm->seg.enabled && cm->base_qindex > 0) ||
1617 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
Jingning Han641b1ad2016-11-04 09:58:36 -07001618 !mbmi->skip &&
Jingning Han2a4da942016-11-03 18:31:30 -07001619#if CONFIG_SUPERTX
1620 !supertx_enabled &&
1621#endif // CONFIG_SUPERTX
1622 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
Sarah Parkere68a3e42017-02-16 14:03:24 -08001623 const int eset =
1624 get_ext_tx_set(tx_size, bsize, is_inter, cm->reduced_tx_set_used);
Jingning Han2a4da942016-11-03 18:31:30 -07001625 if (is_inter) {
Angie Chiangc31ea682017-04-13 16:20:54 -07001626 assert(ext_tx_used_inter[eset][tx_type]);
Urvang Joshifeb925f2016-12-05 10:37:29 -08001627 if (eset > 0) {
Thomas Daviesb1bedf52017-03-17 14:03:28 +00001628#if CONFIG_EC_MULTISYMBOL
Angie Chiangc31ea682017-04-13 16:20:54 -07001629 aom_write_symbol(w, av1_ext_tx_inter_ind[eset][tx_type],
Thomas Daviesb1bedf52017-03-17 14:03:28 +00001630 ec_ctx->inter_ext_tx_cdf[eset][square_tx_size],
1631 ext_tx_cnt_inter[eset]);
1632#else
Urvang Joshifeb925f2016-12-05 10:37:29 -08001633 av1_write_token(w, av1_ext_tx_inter_tree[eset],
Thomas Daviescef09622017-01-11 17:27:12 +00001634 ec_ctx->inter_ext_tx_prob[eset][square_tx_size],
Angie Chiangc31ea682017-04-13 16:20:54 -07001635 &ext_tx_inter_encodings[eset][tx_type]);
Thomas Daviesb1bedf52017-03-17 14:03:28 +00001636#endif
Urvang Joshifeb925f2016-12-05 10:37:29 -08001637 }
Jingning Han2a4da942016-11-03 18:31:30 -07001638 } else if (ALLOW_INTRA_EXT_TX) {
Angie Chiangc31ea682017-04-13 16:20:54 -07001639 assert(ext_tx_used_intra[eset][tx_type]);
Urvang Joshifeb925f2016-12-05 10:37:29 -08001640 if (eset > 0) {
Thomas Daviesb1bedf52017-03-17 14:03:28 +00001641#if CONFIG_EC_MULTISYMBOL
1642 aom_write_symbol(
Angie Chiangc31ea682017-04-13 16:20:54 -07001643 w, av1_ext_tx_intra_ind[eset][tx_type],
Thomas Daviesb1bedf52017-03-17 14:03:28 +00001644 ec_ctx->intra_ext_tx_cdf[eset][square_tx_size][mbmi->mode],
1645 ext_tx_cnt_intra[eset]);
1646#else
Urvang Joshifeb925f2016-12-05 10:37:29 -08001647 av1_write_token(
1648 w, av1_ext_tx_intra_tree[eset],
Thomas Daviescef09622017-01-11 17:27:12 +00001649 ec_ctx->intra_ext_tx_prob[eset][square_tx_size][mbmi->mode],
Angie Chiangc31ea682017-04-13 16:20:54 -07001650 &ext_tx_intra_encodings[eset][tx_type]);
Thomas Daviesb1bedf52017-03-17 14:03:28 +00001651#endif
Urvang Joshifeb925f2016-12-05 10:37:29 -08001652 }
Jingning Han2a4da942016-11-03 18:31:30 -07001653 }
1654 }
1655#else
Yue Cheneeacc4c2017-01-17 17:29:17 -08001656 if (tx_size < TX_32X32 &&
1657 ((!cm->seg.enabled && cm->base_qindex > 0) ||
1658 (cm->seg.enabled && xd->qindex[mbmi->segment_id] > 0)) &&
1659 !mbmi->skip &&
Jingning Han2a4da942016-11-03 18:31:30 -07001660#if CONFIG_SUPERTX
1661 !supertx_enabled &&
1662#endif // CONFIG_SUPERTX
1663 !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
1664 if (is_inter) {
Nathan E. Eggedfa33f22016-11-16 09:44:26 -05001665#if CONFIG_EC_MULTISYMBOL
Angie Chiangc31ea682017-04-13 16:20:54 -07001666 aom_write_symbol(w, av1_ext_tx_ind[tx_type],
Thomas Daviescef09622017-01-11 17:27:12 +00001667 ec_ctx->inter_ext_tx_cdf[tx_size], TX_TYPES);
Jingning Han2a4da942016-11-03 18:31:30 -07001668#else
Thomas Daviescef09622017-01-11 17:27:12 +00001669 av1_write_token(w, av1_ext_tx_tree, ec_ctx->inter_ext_tx_prob[tx_size],
Angie Chiangc31ea682017-04-13 16:20:54 -07001670 &ext_tx_encodings[tx_type]);
Jingning Han2a4da942016-11-03 18:31:30 -07001671#endif
1672 } else {
Nathan E. Egge29ccee02016-11-16 09:44:26 -05001673#if CONFIG_EC_MULTISYMBOL
Jingning Han2a4da942016-11-03 18:31:30 -07001674 aom_write_symbol(
Angie Chiangc31ea682017-04-13 16:20:54 -07001675 w, av1_ext_tx_ind[tx_type],
Thomas Daviescef09622017-01-11 17:27:12 +00001676 ec_ctx->intra_ext_tx_cdf[tx_size]
Jingning Han2a4da942016-11-03 18:31:30 -07001677 [intra_mode_to_tx_type_context[mbmi->mode]],
1678 TX_TYPES);
1679#else
1680 av1_write_token(
1681 w, av1_ext_tx_tree,
Thomas Daviescef09622017-01-11 17:27:12 +00001682 ec_ctx
Jingning Han641b1ad2016-11-04 09:58:36 -07001683 ->intra_ext_tx_prob[tx_size]
Jingning Han2a4da942016-11-03 18:31:30 -07001684 [intra_mode_to_tx_type_context[mbmi->mode]],
Angie Chiangc31ea682017-04-13 16:20:54 -07001685 &ext_tx_encodings[tx_type]);
Jingning Han2a4da942016-11-03 18:31:30 -07001686#endif
1687 }
Jingning Han2a4da942016-11-03 18:31:30 -07001688 }
1689#endif // CONFIG_EXT_TX
1690 }
1691}
1692
Jingning Hanf04254f2017-03-08 10:51:35 -08001693static void write_intra_mode(FRAME_CONTEXT *frame_ctx, BLOCK_SIZE bsize,
1694 PREDICTION_MODE mode, aom_writer *w) {
1695#if CONFIG_EC_MULTISYMBOL
1696 aom_write_symbol(w, av1_intra_mode_ind[mode],
1697 frame_ctx->y_mode_cdf[size_group_lookup[bsize]],
1698 INTRA_MODES);
1699#else
1700 av1_write_token(w, av1_intra_mode_tree,
1701 frame_ctx->y_mode_prob[size_group_lookup[bsize]],
1702 &intra_mode_encodings[mode]);
1703#endif
1704}
1705
1706static void write_intra_uv_mode(FRAME_CONTEXT *frame_ctx,
1707 PREDICTION_MODE uv_mode, PREDICTION_MODE y_mode,
1708 aom_writer *w) {
1709#if CONFIG_EC_MULTISYMBOL
1710 aom_write_symbol(w, av1_intra_mode_ind[uv_mode],
1711 frame_ctx->uv_mode_cdf[y_mode], INTRA_MODES);
1712#else
1713 av1_write_token(w, av1_intra_mode_tree, frame_ctx->uv_mode_prob[y_mode],
1714 &intra_mode_encodings[uv_mode]);
1715#endif
1716}
1717
Angie Chiangc31ea682017-04-13 16:20:54 -07001718static void pack_inter_mode_mvs(AV1_COMP *cpi, const int mi_row,
1719 const int mi_col,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001720#if CONFIG_SUPERTX
1721 int supertx_enabled,
1722#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001723 aom_writer *w) {
1724 AV1_COMMON *const cm = &cpi->common;
Thomas Davies24523292017-01-11 16:56:47 +00001725#if CONFIG_DELTA_Q || CONFIG_EC_ADAPT
Arild Fuldseth07441162016-08-15 15:07:52 +02001726 MACROBLOCK *const x = &cpi->td.mb;
1727 MACROBLOCKD *const xd = &x->e_mbd;
1728#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001729 const MACROBLOCK *x = &cpi->td.mb;
1730 const MACROBLOCKD *xd = &x->e_mbd;
Arild Fuldseth07441162016-08-15 15:07:52 +02001731#endif
Thomas Davies24523292017-01-11 16:56:47 +00001732#if CONFIG_EC_ADAPT
1733 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
1734#else
1735 FRAME_CONTEXT *ec_ctx = cm->fc;
1736#endif
1737#if !CONFIG_REF_MV
1738 nmv_context *nmvc = &ec_ctx->nmvc;
1739#endif
Angie Chiangc31ea682017-04-13 16:20:54 -07001740 const MODE_INFO *mi = xd->mi[0];
Thomas Davies24523292017-01-11 16:56:47 +00001741
Yaowu Xuc27fc142016-08-22 16:08:15 -07001742 const struct segmentation *const seg = &cm->seg;
Thomas9ac55082016-09-23 18:04:17 +01001743 struct segmentation_probs *const segp = &cm->fc->seg;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001744 const MB_MODE_INFO *const mbmi = &mi->mbmi;
1745 const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext;
1746 const PREDICTION_MODE mode = mbmi->mode;
1747 const int segment_id = mbmi->segment_id;
1748 const BLOCK_SIZE bsize = mbmi->sb_type;
1749 const int allow_hp = cm->allow_high_precision_mv;
1750 const int is_inter = is_inter_block(mbmi);
1751 const int is_compound = has_second_ref(mbmi);
1752 int skip, ref;
Jingning Han52261842016-12-14 12:17:49 -08001753#if CONFIG_CB4X4
1754 const int unify_bsize = 1;
1755#else
1756 const int unify_bsize = 0;
1757#endif
David Barker45390c12017-02-20 14:44:40 +00001758 (void)mi_row;
1759 (void)mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001760
1761 if (seg->update_map) {
1762 if (seg->temporal_update) {
1763 const int pred_flag = mbmi->seg_id_predicted;
Yaowu Xuf883b422016-08-30 14:01:10 -07001764 aom_prob pred_prob = av1_get_pred_prob_seg_id(segp, xd);
1765 aom_write(w, pred_flag, pred_prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001766 if (!pred_flag) write_segment_id(w, seg, segp, segment_id);
1767 } else {
1768 write_segment_id(w, seg, segp, segment_id);
1769 }
1770 }
1771
1772#if CONFIG_SUPERTX
1773 if (supertx_enabled)
1774 skip = mbmi->skip;
1775 else
1776 skip = write_skip(cm, xd, segment_id, mi, w);
1777#else
1778 skip = write_skip(cm, xd, segment_id, mi, w);
1779#endif // CONFIG_SUPERTX
Arild Fuldseth07441162016-08-15 15:07:52 +02001780#if CONFIG_DELTA_Q
1781 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01001782 int super_block_upper_left =
1783 ((mi_row & MAX_MIB_MASK) == 0) && ((mi_col & MAX_MIB_MASK) == 0);
Alex Converse68abef82017-03-23 14:50:33 -07001784 if ((bsize != BLOCK_LARGEST || skip == 0) && super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01001785 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01001786 int reduced_delta_qindex =
1787 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00001788 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02001789 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07001790#if CONFIG_EXT_DELTA_Q
1791 if (cm->delta_lf_present_flag) {
1792 int reduced_delta_lflevel =
1793 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
1794 cm->delta_lf_res;
1795 write_delta_lflevel(cm, xd, reduced_delta_lflevel, w);
1796 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
1797 }
1798#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02001799 }
1800 }
1801#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001802
1803#if CONFIG_SUPERTX
1804 if (!supertx_enabled)
1805#endif // CONFIG_SUPERTX
1806 if (!segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
Yaowu Xuf883b422016-08-30 14:01:10 -07001807 aom_write(w, is_inter, av1_get_intra_inter_prob(cm, xd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07001808
Jingning Han581d1692017-01-05 16:03:54 -08001809 if (cm->tx_mode == TX_MODE_SELECT &&
Jingning Han3daa4fd2017-01-20 10:33:50 -08001810#if CONFIG_CB4X4 && (CONFIG_VAR_TX || CONFIG_RECT_TX)
Jingning Hancb512282017-02-10 14:21:35 -08001811#if CONFIG_RECT_TX
Jingning Han3daa4fd2017-01-20 10:33:50 -08001812 bsize > BLOCK_4X4 &&
Jingning Han581d1692017-01-05 16:03:54 -08001813#else
Jingning Hancb512282017-02-10 14:21:35 -08001814 (bsize >= BLOCK_8X8 || (bsize > BLOCK_4X4 && is_inter)) &&
1815#endif // CONFIG_RECT_TX
1816#else
Jingning Han581d1692017-01-05 16:03:54 -08001817 bsize >= BLOCK_8X8 &&
1818#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001819#if CONFIG_SUPERTX
1820 !supertx_enabled &&
1821#endif // CONFIG_SUPERTX
1822 !(is_inter && skip) && !xd->lossless[segment_id]) {
1823#if CONFIG_VAR_TX
1824 if (is_inter) { // This implies skip flag is 0.
Sarah Parker106b3cb2017-04-21 12:13:37 -07001825 const TX_SIZE max_tx_size = get_vartx_max_txsize(mbmi, bsize);
Jingning Hanf64062f2016-11-02 16:22:18 -07001826 const int bh = tx_size_high_unit[max_tx_size];
1827 const int bw = tx_size_wide_unit[max_tx_size];
Jingning Han9ca05b72017-01-03 14:41:36 -08001828 const int width = block_size_wide[bsize] >> tx_size_wide_log2[0];
1829 const int height = block_size_high[bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001830 int idx, idy;
Jingning Hanfe45b212016-11-22 10:30:23 -08001831 for (idy = 0; idy < height; idy += bh)
1832 for (idx = 0; idx < width; idx += bw)
1833 write_tx_size_vartx(cm, xd, mbmi, max_tx_size, height != width, idy,
1834 idx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001835 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001836 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001837 write_selected_tx_size(cm, xd, w);
1838 }
1839 } else {
Jingning Han1b1dc932016-11-09 10:55:30 -08001840 set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001841#else
1842 write_selected_tx_size(cm, xd, w);
1843#endif
1844 }
1845
1846 if (!is_inter) {
Jingning Han52261842016-12-14 12:17:49 -08001847 if (bsize >= BLOCK_8X8 || unify_bsize) {
Jingning Hanf04254f2017-03-08 10:51:35 -08001848 write_intra_mode(ec_ctx, bsize, mode, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001849 } else {
1850 int idx, idy;
1851 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1852 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1853 for (idy = 0; idy < 2; idy += num_4x4_h) {
1854 for (idx = 0; idx < 2; idx += num_4x4_w) {
1855 const PREDICTION_MODE b_mode = mi->bmi[idy * 2 + idx].as_mode;
Jingning Hanf04254f2017-03-08 10:51:35 -08001856 write_intra_mode(ec_ctx, bsize, b_mode, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001857 }
1858 }
1859 }
Jingning Han0b7cbe62017-03-08 10:22:47 -08001860#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07001861 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
1862 xd->plane[1].subsampling_y))
Jingning Hanf04254f2017-03-08 10:51:35 -08001863 write_intra_uv_mode(ec_ctx, mbmi->uv_mode, mode, w);
Jingning Han0b7cbe62017-03-08 10:22:47 -08001864#else // !CONFIG_CB4X4
Jingning Hanf04254f2017-03-08 10:51:35 -08001865 write_intra_uv_mode(ec_ctx, mbmi->uv_mode, mode, w);
Jingning Han36fe3202017-02-20 22:31:49 -08001866#endif // CONFIG_CB4X4
Jingning Han0b7cbe62017-03-08 10:22:47 -08001867
Yaowu Xuc27fc142016-08-22 16:08:15 -07001868#if CONFIG_EXT_INTRA
hui sub4e25d22017-03-09 15:32:30 -08001869 write_intra_angle_info(xd, ec_ctx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001870#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07001871#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07001872 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
1873 write_palette_mode_info(cm, xd, mi, w);
Urvang Joshib100db72016-10-12 16:28:56 -07001874#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07001875#if CONFIG_FILTER_INTRA
Jingning Han48b1cb32017-01-23 10:26:14 -08001876 if (bsize >= BLOCK_8X8 || unify_bsize)
1877 write_filter_intra_mode_info(cm, mbmi, w);
hui su5db97432016-10-14 16:10:14 -07001878#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07001879 } else {
Yaowu Xub0d0d002016-11-22 09:26:43 -08001880 int16_t mode_ctx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001881 write_ref_frames(cm, xd, w);
1882
1883#if CONFIG_REF_MV
1884#if CONFIG_EXT_INTER
1885 if (is_compound)
1886 mode_ctx = mbmi_ext->compound_mode_context[mbmi->ref_frame[0]];
1887 else
1888#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001889 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1890 mbmi->ref_frame, bsize, -1);
Yaowu Xub0d0d002016-11-22 09:26:43 -08001891#else // CONFIG_REF_MV
1892 mode_ctx = mbmi_ext->mode_context[mbmi->ref_frame[0]];
1893#endif // CONFIG_REF_MV
Yaowu Xuc27fc142016-08-22 16:08:15 -07001894
1895 // If segment skip is not enabled code the mode.
1896 if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
Jingning Han52261842016-12-14 12:17:49 -08001897 if (bsize >= BLOCK_8X8 || unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001898#if CONFIG_EXT_INTER
1899 if (is_inter_compound_mode(mode))
1900 write_inter_compound_mode(cm, w, mode, mode_ctx);
1901 else if (is_inter_singleref_mode(mode))
1902#endif // CONFIG_EXT_INTER
Zoe Liu7f24e1b2017-03-17 17:42:05 -07001903 write_inter_mode(w, mode, ec_ctx, mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001904
1905#if CONFIG_REF_MV
David Barker9620bcd2017-03-22 14:46:42 +00001906#if CONFIG_EXT_INTER
David Barker3dfba992017-04-03 16:10:09 +01001907 if (mode == NEWMV || mode == NEW_NEWMV ||
1908 have_nearmv_in_inter_mode(mode))
David Barker404b2e82017-03-27 13:07:47 +01001909#else
1910 if (mode == NEARMV || mode == NEWMV)
1911#endif
1912 write_drl_idx(cm, mbmi, mbmi_ext, w);
David Barker9620bcd2017-03-22 14:46:42 +00001913 else
1914 assert(mbmi->ref_mv_idx == 0);
1915#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001916 }
1917 }
1918
Yue Chen19e7aa82016-11-30 14:05:39 -08001919#if !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION && !CONFIG_GLOBAL_MOTION
Angie Chiang5678ad92016-11-21 09:38:40 -08001920 write_mb_interp_filter(cpi, xd, w);
Angie Chiang1733f6b2017-01-05 09:52:20 -08001921#endif // !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07001922
Jingning Han52261842016-12-14 12:17:49 -08001923 if (bsize < BLOCK_8X8 && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001924 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
1925 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
1926 int idx, idy;
1927 for (idy = 0; idy < 2; idy += num_4x4_h) {
1928 for (idx = 0; idx < 2; idx += num_4x4_w) {
1929 const int j = idy * 2 + idx;
1930 const PREDICTION_MODE b_mode = mi->bmi[j].as_mode;
1931#if CONFIG_REF_MV
1932#if CONFIG_EXT_INTER
1933 if (!is_compound)
1934#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001935 mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context,
1936 mbmi->ref_frame, bsize, j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001937#endif
1938#if CONFIG_EXT_INTER
1939 if (is_inter_compound_mode(b_mode))
1940 write_inter_compound_mode(cm, w, b_mode, mode_ctx);
1941 else if (is_inter_singleref_mode(b_mode))
1942#endif // CONFIG_EXT_INTER
Zoe Liu7f24e1b2017-03-17 17:42:05 -07001943 write_inter_mode(w, b_mode, ec_ctx, mode_ctx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001944
1945#if CONFIG_EXT_INTER
Zoe Liu7f24e1b2017-03-17 17:42:05 -07001946 if (b_mode == NEWMV || b_mode == NEW_NEWMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001947#else
1948 if (b_mode == NEWMV) {
1949#endif // CONFIG_EXT_INTER
1950 for (ref = 0; ref < 1 + is_compound; ++ref) {
1951#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001952 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1953 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1954 mbmi_ext->ref_mv_stack[rf_type], ref,
1955 mbmi->ref_mv_idx);
Thomas Davies24523292017-01-11 16:56:47 +00001956 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001957#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001958 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001959#if CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001960 &mi->bmi[j].ref_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001961#else
1962#if CONFIG_REF_MV
Yue Chen09c0a5b2017-01-20 12:48:52 -08001963 &mi->bmi[j].pred_mv[ref].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001964#else
Yaowu Xuf883b422016-08-30 14:01:10 -07001965 &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0].as_mv,
Yaowu Xuc27fc142016-08-22 16:08:15 -07001966#endif // CONFIG_REF_MV
1967#endif // CONFIG_EXT_INTER
Yaowu Xuf883b422016-08-30 14:01:10 -07001968 nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001969 }
1970 }
1971#if CONFIG_EXT_INTER
1972 else if (b_mode == NEAREST_NEWMV || b_mode == NEAR_NEWMV) {
1973#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001974 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1975 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1976 mbmi_ext->ref_mv_stack[rf_type], 1,
1977 mbmi->ref_mv_idx);
Thomas Davies24523292017-01-11 16:56:47 +00001978 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001979#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001980 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[1].as_mv,
Yue Chen09c0a5b2017-01-20 12:48:52 -08001981 &mi->bmi[j].ref_mv[1].as_mv, nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001982 } else if (b_mode == NEW_NEARESTMV || b_mode == NEW_NEARMV) {
1983#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07001984 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
1985 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
1986 mbmi_ext->ref_mv_stack[rf_type], 0,
1987 mbmi->ref_mv_idx);
Thomas Davies24523292017-01-11 16:56:47 +00001988 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001989#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07001990 av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[0].as_mv,
Yue Chen09c0a5b2017-01-20 12:48:52 -08001991 &mi->bmi[j].ref_mv[0].as_mv, nmvc, allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001992 }
1993#endif // CONFIG_EXT_INTER
1994 }
1995 }
1996 } else {
1997#if CONFIG_EXT_INTER
Zoe Liu7f24e1b2017-03-17 17:42:05 -07001998 if (mode == NEWMV || mode == NEW_NEWMV) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001999#else
2000 if (mode == NEWMV) {
2001#endif // CONFIG_EXT_INTER
2002 int_mv ref_mv;
2003 for (ref = 0; ref < 1 + is_compound; ++ref) {
2004#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07002005 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
2006 int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
2007 mbmi_ext->ref_mv_stack[rf_type], ref,
2008 mbmi->ref_mv_idx);
Thomas Davies24523292017-01-11 16:56:47 +00002009 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002010#endif
2011 ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0];
Zoe Liu7f24e1b2017-03-17 17:42:05 -07002012 av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv, nmvc,
2013 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002014 }
2015#if CONFIG_EXT_INTER
2016 } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) {
2017#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07002018 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
2019 int nmv_ctx =
2020 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
2021 mbmi_ext->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx);
Thomas Davies24523292017-01-11 16:56:47 +00002022 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002023#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002024 av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv,
Yue Chen09c0a5b2017-01-20 12:48:52 -08002025 &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv, nmvc,
2026 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002027 } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) {
2028#if CONFIG_REF_MV
Yaowu Xu4306b6e2016-09-27 12:55:32 -07002029 int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame);
2030 int nmv_ctx =
2031 av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type],
2032 mbmi_ext->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx);
Thomas Davies24523292017-01-11 16:56:47 +00002033 nmv_context *nmvc = &ec_ctx->nmvc[nmv_ctx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002034#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07002035 av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv,
Yue Chen09c0a5b2017-01-20 12:48:52 -08002036 &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv, nmvc,
2037 allow_hp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002038#endif // CONFIG_EXT_INTER
2039 }
2040 }
2041
2042#if CONFIG_EXT_INTER
2043 if (cpi->common.reference_mode != COMPOUND_REFERENCE &&
2044#if CONFIG_SUPERTX
2045 !supertx_enabled &&
2046#endif // CONFIG_SUPERTX
2047 is_interintra_allowed(mbmi)) {
2048 const int interintra = mbmi->ref_frame[1] == INTRA_FRAME;
2049 const int bsize_group = size_group_lookup[bsize];
Yaowu Xuf883b422016-08-30 14:01:10 -07002050 aom_write(w, interintra, cm->fc->interintra_prob[bsize_group]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002051 if (interintra) {
2052 write_interintra_mode(w, mbmi->interintra_mode,
2053 cm->fc->interintra_mode_prob[bsize_group]);
2054 if (is_interintra_wedge_used(bsize)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002055 aom_write(w, mbmi->use_wedge_interintra,
2056 cm->fc->wedge_interintra_prob[bsize]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002057 if (mbmi->use_wedge_interintra) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002058 aom_write_literal(w, mbmi->interintra_wedge_index,
2059 get_wedge_bits_lookup(bsize));
Yaowu Xuc27fc142016-08-22 16:08:15 -07002060 assert(mbmi->interintra_wedge_sign == 0);
2061 }
2062 }
2063 }
2064 }
2065#endif // CONFIG_EXT_INTER
2066
Yue Chencb60b182016-10-13 15:18:22 -07002067#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07002068#if CONFIG_SUPERTX
2069 if (!supertx_enabled)
2070#endif // CONFIG_SUPERTX
2071#if CONFIG_EXT_INTER
2072 if (mbmi->ref_frame[1] != INTRA_FRAME)
2073#endif // CONFIG_EXT_INTER
Sarah Parker19234cc2017-03-10 16:43:25 -08002074 write_motion_mode(cm, mi, w);
Yue Chencb60b182016-10-13 15:18:22 -07002075#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07002076
2077#if CONFIG_EXT_INTER
2078 if (cpi->common.reference_mode != SINGLE_REFERENCE &&
Sarah Parker6fdc8532016-11-16 17:47:13 -08002079 is_inter_compound_mode(mbmi->mode)
Yue Chencb60b182016-10-13 15:18:22 -07002080#if CONFIG_MOTION_VAR
Sarah Parker6fdc8532016-11-16 17:47:13 -08002081 && mbmi->motion_mode == SIMPLE_TRANSLATION
Yue Chencb60b182016-10-13 15:18:22 -07002082#endif // CONFIG_MOTION_VAR
Sarah Parker42d96102017-01-31 21:05:27 -08002083 && is_any_masked_compound_used(bsize)) {
Debargha Mukherjeec5f735f2017-04-26 03:25:28 +00002084#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Sarah Parker2cf73eb2017-05-01 13:25:28 -07002085 av1_write_token(w, av1_compound_type_tree,
2086 cm->fc->compound_type_prob[bsize],
2087 &compound_type_encodings[mbmi->interinter_compound_type]);
Debargha Mukherjeec5f735f2017-04-26 03:25:28 +00002088#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
2089#if CONFIG_WEDGE
Sarah Parker2cf73eb2017-05-01 13:25:28 -07002090 if (mbmi->interinter_compound_type == COMPOUND_WEDGE) {
2091 aom_write_literal(w, mbmi->wedge_index, get_wedge_bits_lookup(bsize));
2092 aom_write_bit(w, mbmi->wedge_sign);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002093 }
Debargha Mukherjeec5f735f2017-04-26 03:25:28 +00002094#endif // CONFIG_WEDGE
Sarah Parker569edda2016-12-14 14:57:38 -08002095#if CONFIG_COMPOUND_SEGMENT
Sarah Parker2cf73eb2017-05-01 13:25:28 -07002096 if (mbmi->interinter_compound_type == COMPOUND_SEG) {
2097 aom_write_literal(w, mbmi->mask_type, MAX_SEG_MASK_BITS);
Sarah Parker569edda2016-12-14 14:57:38 -08002098 }
2099#endif // CONFIG_COMPOUND_SEGMENT
Yaowu Xuc27fc142016-08-22 16:08:15 -07002100 }
2101#endif // CONFIG_EXT_INTER
2102
Yue Chen19e7aa82016-11-30 14:05:39 -08002103#if CONFIG_DUAL_FILTER || CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION
Debargha Mukherjee0df711f2017-05-02 16:00:20 -07002104 write_mb_interp_filter(cpi, xd, w);
Angie Chiang1733f6b2017-01-05 09:52:20 -08002105#endif // CONFIG_DUAL_FILTE || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07002106 }
2107
Angie Chiangcd9b03f2017-04-16 13:37:13 -07002108#if !CONFIG_TXK_SEL
Angie Chiangc31ea682017-04-13 16:20:54 -07002109 av1_write_tx_type(cm, xd,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002110#if CONFIG_SUPERTX
Angie Chiangc31ea682017-04-13 16:20:54 -07002111 supertx_enabled,
Nathan E. Egge93878c42016-05-03 10:01:32 -04002112#endif
Angie Chiangc31ea682017-04-13 16:20:54 -07002113 w);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07002114#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002115}
2116
Arild Fuldseth07441162016-08-15 15:07:52 +02002117#if CONFIG_DELTA_Q
Jingning Han36fe3202017-02-20 22:31:49 -08002118static void write_mb_modes_kf(AV1_COMMON *cm, MACROBLOCKD *xd, const int mi_row,
Angie Chiangc31ea682017-04-13 16:20:54 -07002119 const int mi_col, aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +02002120 int skip;
2121#else
Thomas9ac55082016-09-23 18:04:17 +01002122static void write_mb_modes_kf(AV1_COMMON *cm, const MACROBLOCKD *xd,
Jingning Han36fe3202017-02-20 22:31:49 -08002123 const int mi_row, const int mi_col,
Angie Chiangc31ea682017-04-13 16:20:54 -07002124 aom_writer *w) {
Arild Fuldseth07441162016-08-15 15:07:52 +02002125#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002126 const struct segmentation *const seg = &cm->seg;
Thomas9ac55082016-09-23 18:04:17 +01002127 struct segmentation_probs *const segp = &cm->fc->seg;
Angie Chiangc31ea682017-04-13 16:20:54 -07002128 const MODE_INFO *const mi = xd->mi[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002129 const MODE_INFO *const above_mi = xd->above_mi;
2130 const MODE_INFO *const left_mi = xd->left_mi;
2131 const MB_MODE_INFO *const mbmi = &mi->mbmi;
2132 const BLOCK_SIZE bsize = mbmi->sb_type;
Jingning Han52261842016-12-14 12:17:49 -08002133#if CONFIG_CB4X4
2134 const int unify_bsize = 1;
2135#else
2136 const int unify_bsize = 0;
2137#endif
David Barker45390c12017-02-20 14:44:40 +00002138 (void)mi_row;
2139 (void)mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002140
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00002141#if CONFIG_EC_ADAPT
2142 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
Jingning Hanf04254f2017-03-08 10:51:35 -08002143#else
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00002144 FRAME_CONTEXT *ec_ctx = cm->fc;
2145#endif
2146
Yaowu Xuc27fc142016-08-22 16:08:15 -07002147 if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id);
2148
Arild Fuldseth07441162016-08-15 15:07:52 +02002149#if CONFIG_DELTA_Q
2150 skip = write_skip(cm, xd, mbmi->segment_id, mi, w);
2151 if (cm->delta_q_present_flag) {
Alex Converse68abef82017-03-23 14:50:33 -07002152 int super_block_upper_left =
2153 ((mi_row & MAX_MIB_MASK) == 0) && ((mi_col & MAX_MIB_MASK) == 0);
2154 if ((bsize != BLOCK_LARGEST || skip == 0) && super_block_upper_left) {
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01002155 assert(mbmi->current_q_index > 0);
Thomas Daviesf6936102016-09-05 16:51:31 +01002156 int reduced_delta_qindex =
2157 (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res;
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00002158 write_delta_qindex(cm, xd, reduced_delta_qindex, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02002159 xd->prev_qindex = mbmi->current_q_index;
Fangwen Fu231fe422017-04-24 17:52:29 -07002160#if CONFIG_EXT_DELTA_Q
2161 if (cm->delta_lf_present_flag) {
2162 int reduced_delta_lflevel =
2163 (mbmi->current_delta_lf_from_base - xd->prev_delta_lf_from_base) /
2164 cm->delta_lf_res;
2165 write_delta_lflevel(cm, xd, reduced_delta_lflevel, w);
2166 xd->prev_delta_lf_from_base = mbmi->current_delta_lf_from_base;
2167 }
2168#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02002169 }
2170 }
2171#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002172 write_skip(cm, xd, mbmi->segment_id, mi, w);
Arild Fuldseth07441162016-08-15 15:07:52 +02002173#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002174
Jingning Han3daa4fd2017-01-20 10:33:50 -08002175 if (cm->tx_mode == TX_MODE_SELECT &&
2176#if CONFIG_CB4X4 && (CONFIG_VAR_TX || CONFIG_RECT_TX)
Jingning Hancb512282017-02-10 14:21:35 -08002177#if CONFIG_RECT_TX
Jingning Han3daa4fd2017-01-20 10:33:50 -08002178 bsize > BLOCK_4X4 &&
2179#else
2180 bsize >= BLOCK_8X8 &&
Jingning Hancb512282017-02-10 14:21:35 -08002181#endif // CONFIG_RECT_TX
2182#else
2183 bsize >= BLOCK_8X8 &&
Jingning Han3daa4fd2017-01-20 10:33:50 -08002184#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002185 !xd->lossless[mbmi->segment_id])
2186 write_selected_tx_size(cm, xd, w);
2187
Alex Converse28744302017-04-13 14:46:22 -07002188#if CONFIG_INTRABC
2189 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools) {
2190 int use_intrabc = is_intrabc_block(mbmi);
2191 aom_write(w, use_intrabc, INTRABC_PROB);
2192 if (use_intrabc) {
2193 assert(mbmi->mode == DC_PRED);
2194 assert(mbmi->uv_mode == DC_PRED);
2195 int_mv dv_ref;
2196 av1_find_ref_dv(&dv_ref, mi_row, mi_col);
2197 av1_encode_dv(w, &mbmi->mv[0].as_mv, &dv_ref.as_mv, &ec_ctx->ndvc);
2198 return;
2199 }
2200 }
2201#endif // CONFIG_INTRABC
2202
Jingning Han52261842016-12-14 12:17:49 -08002203 if (bsize >= BLOCK_8X8 || unify_bsize) {
Jingning Hanf04254f2017-03-08 10:51:35 -08002204 write_intra_mode_kf(cm, ec_ctx, mi, above_mi, left_mi, 0, mbmi->mode, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002205 } else {
2206 const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
2207 const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
2208 int idx, idy;
2209
2210 for (idy = 0; idy < 2; idy += num_4x4_h) {
2211 for (idx = 0; idx < 2; idx += num_4x4_w) {
2212 const int block = idy * 2 + idx;
Jingning Hanf04254f2017-03-08 10:51:35 -08002213 write_intra_mode_kf(cm, ec_ctx, mi, above_mi, left_mi, block,
2214 mi->bmi[block].as_mode, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002215 }
2216 }
2217 }
Jingning Han0b7cbe62017-03-08 10:22:47 -08002218
2219#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07002220 if (is_chroma_reference(mi_row, mi_col, bsize, xd->plane[1].subsampling_x,
2221 xd->plane[1].subsampling_y))
Jingning Hanf04254f2017-03-08 10:51:35 -08002222 write_intra_uv_mode(ec_ctx, mbmi->uv_mode, mbmi->mode, w);
Jingning Han0b7cbe62017-03-08 10:22:47 -08002223#else // !CONFIG_CB4X4
Jingning Hanf04254f2017-03-08 10:51:35 -08002224 write_intra_uv_mode(ec_ctx, mbmi->uv_mode, mbmi->mode, w);
Jingning Han36fe3202017-02-20 22:31:49 -08002225#endif // CONFIG_CB4X4
Jingning Han0b7cbe62017-03-08 10:22:47 -08002226
Yaowu Xuc27fc142016-08-22 16:08:15 -07002227#if CONFIG_EXT_INTRA
hui sub4e25d22017-03-09 15:32:30 -08002228 write_intra_angle_info(xd, ec_ctx, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002229#endif // CONFIG_EXT_INTRA
Urvang Joshib100db72016-10-12 16:28:56 -07002230#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002231 if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools)
2232 write_palette_mode_info(cm, xd, mi, w);
Urvang Joshib100db72016-10-12 16:28:56 -07002233#endif // CONFIG_PALETTE
hui su5db97432016-10-14 16:10:14 -07002234#if CONFIG_FILTER_INTRA
Jingning Han48b1cb32017-01-23 10:26:14 -08002235 if (bsize >= BLOCK_8X8 || unify_bsize)
2236 write_filter_intra_mode_info(cm, mbmi, w);
hui su5db97432016-10-14 16:10:14 -07002237#endif // CONFIG_FILTER_INTRA
Yaowu Xuc27fc142016-08-22 16:08:15 -07002238
Angie Chiangcd9b03f2017-04-16 13:37:13 -07002239#if !CONFIG_TXK_SEL
Angie Chiangc31ea682017-04-13 16:20:54 -07002240 av1_write_tx_type(cm, xd,
Jingning Han2a4da942016-11-03 18:31:30 -07002241#if CONFIG_SUPERTX
Angie Chiangc31ea682017-04-13 16:20:54 -07002242 0,
Nathan E. Egge72762a22016-09-07 17:12:07 -04002243#endif
Angie Chiangc31ea682017-04-13 16:20:54 -07002244 w);
Angie Chiangcd9b03f2017-04-16 13:37:13 -07002245#endif // !CONFIG_TXK_SEL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002246}
2247
2248#if CONFIG_SUPERTX
2249#define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
2250 mi_row, mi_col) \
2251 write_modes_b(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col)
2252#else
2253#define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
2254 mi_row, mi_col) \
2255 write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col)
Alex Converseec6fb642016-10-19 11:31:48 -07002256#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002257
Angie Chiangd4022822016-11-02 18:30:25 -07002258#if CONFIG_RD_DEBUG
2259static void dump_mode_info(MODE_INFO *mi) {
2260 printf("\nmi->mbmi.mi_row == %d\n", mi->mbmi.mi_row);
2261 printf("&& mi->mbmi.mi_col == %d\n", mi->mbmi.mi_col);
2262 printf("&& mi->mbmi.sb_type == %d\n", mi->mbmi.sb_type);
2263 printf("&& mi->mbmi.tx_size == %d\n", mi->mbmi.tx_size);
2264 if (mi->mbmi.sb_type >= BLOCK_8X8) {
2265 printf("&& mi->mbmi.mode == %d\n", mi->mbmi.mode);
2266 } else {
2267 printf("&& mi->bmi[0].as_mode == %d\n", mi->bmi[0].as_mode);
2268 }
2269}
Angie Chiangd02001d2016-11-06 15:31:49 -08002270static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats,
2271 int plane) {
2272 if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) {
Angie Chiang3963d632016-11-10 18:41:40 -08002273#if CONFIG_VAR_TX
Angie Chiangd02001d2016-11-06 15:31:49 -08002274 int r, c;
Angie Chiang3963d632016-11-10 18:41:40 -08002275#endif
Angie Chiangd02001d2016-11-06 15:31:49 -08002276 printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n",
2277 plane, rd_stats->txb_coeff_cost[plane], token_stats->cost);
2278#if CONFIG_VAR_TX
2279 printf("rd txb_coeff_cost_map\n");
2280 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
2281 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
2282 printf("%d ", rd_stats->txb_coeff_cost_map[plane][r][c]);
2283 }
2284 printf("\n");
2285 }
2286
2287 printf("pack txb_coeff_cost_map\n");
2288 for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) {
2289 for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) {
2290 printf("%d ", token_stats->txb_coeff_cost_map[r][c]);
2291 }
2292 printf("\n");
2293 }
2294#endif
2295 return 1;
2296 }
2297 return 0;
2298}
Angie Chiangd4022822016-11-02 18:30:25 -07002299#endif
2300
Yue Chen64550b62017-01-12 12:18:22 -08002301static void write_mbmi_b(AV1_COMP *cpi, const TileInfo *const tile,
2302 aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002303#if CONFIG_SUPERTX
Yue Chen64550b62017-01-12 12:18:22 -08002304 int supertx_enabled,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002305#endif
Yue Chen64550b62017-01-12 12:18:22 -08002306 int mi_row, int mi_col) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002307 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002308 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
2309 MODE_INFO *m;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002310 int bh, bw;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002311 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
2312 m = xd->mi[0];
2313
2314 assert(m->mbmi.sb_type <= cm->sb_size);
2315
Jingning Hanc709e1f2016-12-06 14:48:09 -08002316 bh = mi_size_high[m->mbmi.sb_type];
2317 bw = mi_size_wide[m->mbmi.sb_type];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002318
2319 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
2320
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002321 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002322#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002323 cm->dependent_horz_tiles,
2324#endif // CONFIG_DEPENDENT_HORZTILES
2325 cm->mi_rows, cm->mi_cols);
Yushin Cho77bba8d2016-11-04 16:36:56 -07002326
Yaowu Xuc27fc142016-08-22 16:08:15 -07002327 if (frame_is_intra_only(cm)) {
Angie Chiangc31ea682017-04-13 16:20:54 -07002328 write_mb_modes_kf(cm, xd, mi_row, mi_col, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002329 } else {
2330#if CONFIG_VAR_TX
2331 xd->above_txfm_context = cm->above_txfm_context + mi_col;
2332 xd->left_txfm_context =
2333 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
2334#endif
Angie Chiang1733f6b2017-01-05 09:52:20 -08002335#if CONFIG_DUAL_FILTER
Angie Chiang38edf682017-02-21 15:13:09 -08002336 // has_subpel_mv_component needs the ref frame buffers set up to look
2337 // up if they are scaled. has_subpel_mv_component is in turn needed by
Yaowu Xuc27fc142016-08-22 16:08:15 -07002338 // write_switchable_interp_filter, which is called by pack_inter_mode_mvs.
2339 set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
Angie Chiang1733f6b2017-01-05 09:52:20 -08002340#endif // CONFIG_DUAL_FILTER
Yaowu Xuc27fc142016-08-22 16:08:15 -07002341#if 0
2342 // NOTE(zoeliu): For debug
2343 if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) {
2344 const PREDICTION_MODE mode = m->mbmi.mode;
2345 const int segment_id = m->mbmi.segment_id;
2346 const BLOCK_SIZE bsize = m->mbmi.sb_type;
2347
2348 // For sub8x8, simply dump out the first sub8x8 block info
2349 const PREDICTION_MODE b_mode =
2350 (bsize < BLOCK_8X8) ? m->bmi[0].as_mode : -1;
2351 const int mv_x = (bsize < BLOCK_8X8) ?
2352 m->bmi[0].as_mv[0].as_mv.row : m->mbmi.mv[0].as_mv.row;
2353 const int mv_y = (bsize < BLOCK_8X8) ?
2354 m->bmi[0].as_mv[0].as_mv.col : m->mbmi.mv[0].as_mv.col;
2355
2356 printf("Before pack_inter_mode_mvs(): "
2357 "Frame=%d, (mi_row,mi_col)=(%d,%d), "
2358 "mode=%d, segment_id=%d, bsize=%d, b_mode=%d, "
2359 "mv[0]=(%d, %d), ref[0]=%d, ref[1]=%d\n",
2360 cm->current_video_frame, mi_row, mi_col,
2361 mode, segment_id, bsize, b_mode, mv_x, mv_y,
2362 m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]);
2363 }
2364#endif // 0
Angie Chiangc31ea682017-04-13 16:20:54 -07002365 pack_inter_mode_mvs(cpi, mi_row, mi_col,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002366#if CONFIG_SUPERTX
2367 supertx_enabled,
2368#endif
2369 w);
2370 }
Yue Chen64550b62017-01-12 12:18:22 -08002371}
Yaowu Xuc27fc142016-08-22 16:08:15 -07002372
Yue Chen64550b62017-01-12 12:18:22 -08002373static void write_tokens_b(AV1_COMP *cpi, const TileInfo *const tile,
2374 aom_writer *w, const TOKENEXTRA **tok,
2375 const TOKENEXTRA *const tok_end, int mi_row,
2376 int mi_col) {
2377 AV1_COMMON *const cm = &cpi->common;
2378 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002379 MODE_INFO *const m = xd->mi[0];
2380 MB_MODE_INFO *const mbmi = &m->mbmi;
Yue Chen64550b62017-01-12 12:18:22 -08002381 int plane;
2382 int bh, bw;
Angie Chiangc8af6112017-03-16 16:11:22 -07002383#if CONFIG_PVQ || CONFIG_LV_MAP
Yushin Cho258a0242017-03-06 13:53:01 -08002384 MACROBLOCK *const x = &cpi->td.mb;
Yue Chen64550b62017-01-12 12:18:22 -08002385 (void)tok;
2386 (void)tok_end;
2387#endif
2388 xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col);
Yue Chen64550b62017-01-12 12:18:22 -08002389
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002390 assert(mbmi->sb_type <= cm->sb_size);
Yue Chen64550b62017-01-12 12:18:22 -08002391
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002392 bh = mi_size_high[mbmi->sb_type];
2393 bw = mi_size_wide[mbmi->sb_type];
Yue Chen64550b62017-01-12 12:18:22 -08002394 cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col);
2395
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002396 set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002397#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002398 cm->dependent_horz_tiles,
2399#endif // CONFIG_DEPENDENT_HORZTILES
2400 cm->mi_rows, cm->mi_cols);
Yue Chen64550b62017-01-12 12:18:22 -08002401
Urvang Joshib100db72016-10-12 16:28:56 -07002402#if CONFIG_PALETTE
Fangwen Fub3be9262017-03-06 15:34:28 -08002403 for (plane = 0; plane <= 1; ++plane) {
2404 const uint8_t palette_size_plane =
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002405 mbmi->palette_mode_info.palette_size[plane];
Fangwen Fub3be9262017-03-06 15:34:28 -08002406 if (palette_size_plane > 0) {
Alex Converseed37d012017-04-24 11:15:24 -07002407#if CONFIG_INTRABC
2408 assert(mbmi->use_intrabc == 0);
2409#endif
Fangwen Fub3be9262017-03-06 15:34:28 -08002410 int rows, cols;
hui su9bc1d8d2017-03-24 12:36:03 -07002411 assert(mbmi->sb_type >= BLOCK_8X8);
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002412 av1_get_block_dimensions(mbmi->sb_type, plane, xd, NULL, NULL, &rows,
Fangwen Fub3be9262017-03-06 15:34:28 -08002413 &cols);
2414 assert(*tok < tok_end);
2415 pack_palette_tokens(w, tok, palette_size_plane, rows * cols - 1);
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002416 assert(*tok < tok_end + mbmi->skip);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002417 }
Fangwen Fub3be9262017-03-06 15:34:28 -08002418 }
Urvang Joshib100db72016-10-12 16:28:56 -07002419#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002420
iole moccagattaf25a4cf2016-11-11 23:57:57 -08002421#if CONFIG_COEF_INTERLEAVE
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002422 if (!mbmi->skip) {
iole moccagattaf25a4cf2016-11-11 23:57:57 -08002423 const struct macroblockd_plane *const pd_y = &xd->plane[0];
2424 const struct macroblockd_plane *const pd_c = &xd->plane[1];
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002425 const TX_SIZE tx_log2_y = mbmi->tx_size;
2426 const TX_SIZE tx_log2_c = get_uv_tx_size(mbmi, pd_c);
iole moccagattaf25a4cf2016-11-11 23:57:57 -08002427 const int tx_sz_y = (1 << tx_log2_y);
2428 const int tx_sz_c = (1 << tx_log2_c);
2429
2430 const BLOCK_SIZE plane_bsize_y =
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002431 get_plane_block_size(AOMMAX(mbmi->sb_type, 3), pd_y);
iole moccagattaf25a4cf2016-11-11 23:57:57 -08002432 const BLOCK_SIZE plane_bsize_c =
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002433 get_plane_block_size(AOMMAX(mbmi->sb_type, 3), pd_c);
iole moccagattaf25a4cf2016-11-11 23:57:57 -08002434
2435 const int num_4x4_w_y = num_4x4_blocks_wide_lookup[plane_bsize_y];
2436 const int num_4x4_w_c = num_4x4_blocks_wide_lookup[plane_bsize_c];
2437 const int num_4x4_h_y = num_4x4_blocks_high_lookup[plane_bsize_y];
2438 const int num_4x4_h_c = num_4x4_blocks_high_lookup[plane_bsize_c];
2439
2440 const int max_4x4_w_y = get_max_4x4_size(num_4x4_w_y, xd->mb_to_right_edge,
2441 pd_y->subsampling_x);
2442 const int max_4x4_h_y = get_max_4x4_size(num_4x4_h_y, xd->mb_to_bottom_edge,
2443 pd_y->subsampling_y);
2444 const int max_4x4_w_c = get_max_4x4_size(num_4x4_w_c, xd->mb_to_right_edge,
2445 pd_c->subsampling_x);
2446 const int max_4x4_h_c = get_max_4x4_size(num_4x4_h_c, xd->mb_to_bottom_edge,
2447 pd_c->subsampling_y);
2448
2449 // The max_4x4_w/h may be smaller than tx_sz under some corner cases,
2450 // i.e. when the SB is splitted by tile boundaries.
2451 const int tu_num_w_y = (max_4x4_w_y + tx_sz_y - 1) / tx_sz_y;
2452 const int tu_num_h_y = (max_4x4_h_y + tx_sz_y - 1) / tx_sz_y;
2453 const int tu_num_w_c = (max_4x4_w_c + tx_sz_c - 1) / tx_sz_c;
2454 const int tu_num_h_c = (max_4x4_h_c + tx_sz_c - 1) / tx_sz_c;
2455 const int tu_num_y = tu_num_w_y * tu_num_h_y;
2456 const int tu_num_c = tu_num_w_c * tu_num_h_c;
2457
2458 int tu_idx_y = 0, tu_idx_c = 0;
2459 TOKEN_STATS token_stats;
2460 init_token_stats(&token_stats);
2461
2462 assert(*tok < tok_end);
2463
2464 while (tu_idx_y < tu_num_y) {
2465 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_y, &token_stats);
2466 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2467 (*tok)++;
2468 tu_idx_y++;
2469
2470 if (tu_idx_c < tu_num_c) {
2471 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
2472 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2473 (*tok)++;
2474
2475 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
2476 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2477 (*tok)++;
2478
2479 tu_idx_c++;
2480 }
2481 }
2482
2483 // In 422 case, it's possilbe that Chroma has more TUs than Luma
2484 while (tu_idx_c < tu_num_c) {
2485 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
2486 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2487 (*tok)++;
2488
2489 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats);
2490 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2491 (*tok)++;
2492
2493 tu_idx_c++;
2494 }
2495 }
2496#else // CONFIG_COEF_INTERLEAVE
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002497 if (!mbmi->skip) {
Angie Chiang1dca1262017-03-17 16:11:52 -07002498#if !CONFIG_PVQ && !CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07002499 assert(*tok < tok_end);
Yushin Cho258a0242017-03-06 13:53:01 -08002500#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002501 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002502#if CONFIG_CB4X4
Jingning Hand3a64432017-04-06 17:04:17 -07002503 if (!is_chroma_reference(mi_row, mi_col, mbmi->sb_type,
2504 xd->plane[plane].subsampling_x,
2505 xd->plane[plane].subsampling_y)) {
Jingning Hanc20dc8e2017-02-17 15:37:28 -08002506 (*tok)++;
2507 continue;
2508 }
2509#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002510#if CONFIG_VAR_TX
2511 const struct macroblockd_plane *const pd = &xd->plane[plane];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002512 BLOCK_SIZE bsize = mbmi->sb_type;
Jingning Han9ca05b72017-01-03 14:41:36 -08002513#if CONFIG_CB4X4
Jingning Han29a41202017-04-05 11:53:32 -07002514#if CONFIG_CHROMA_2X2
Jingning Han9ca05b72017-01-03 14:41:36 -08002515 const BLOCK_SIZE plane_bsize = get_plane_block_size(bsize, pd);
2516#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07002517 const BLOCK_SIZE plane_bsize =
Jingning Han29a41202017-04-05 11:53:32 -07002518 AOMMAX(BLOCK_4X4, get_plane_block_size(bsize, pd));
2519#endif
2520#else
2521 const BLOCK_SIZE plane_bsize =
Yaowu Xuf883b422016-08-30 14:01:10 -07002522 get_plane_block_size(AOMMAX(bsize, BLOCK_8X8), pd);
Jingning Han9ca05b72017-01-03 14:41:36 -08002523#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002524
Jingning Han42a0fb32016-10-31 10:43:31 -07002525 const int num_4x4_w =
2526 block_size_wide[plane_bsize] >> tx_size_wide_log2[0];
2527 const int num_4x4_h =
2528 block_size_high[plane_bsize] >> tx_size_wide_log2[0];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002529 int row, col;
Angie Chiangd4022822016-11-02 18:30:25 -07002530 TOKEN_STATS token_stats;
Angie Chiangd02001d2016-11-06 15:31:49 -08002531 init_token_stats(&token_stats);
Angie Chiangd4022822016-11-02 18:30:25 -07002532
Jingning Hanfe45b212016-11-22 10:30:23 -08002533 if (is_inter_block(mbmi)) {
Sarah Parker106b3cb2017-04-21 12:13:37 -07002534 const TX_SIZE max_tx_size = get_vartx_max_txsize(mbmi, plane_bsize);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002535 int block = 0;
Jingning Han42a0fb32016-10-31 10:43:31 -07002536 const int step =
2537 tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size];
2538 const int bkw = tx_size_wide_unit[max_tx_size];
2539 const int bkh = tx_size_high_unit[max_tx_size];
2540 for (row = 0; row < num_4x4_h; row += bkh) {
2541 for (col = 0; col < num_4x4_w; col += bkw) {
Yushin Cho258a0242017-03-06 13:53:01 -08002542 pack_txb_tokens(w, tok, tok_end,
2543#if CONFIG_PVQ
2544 x,
2545#endif
2546 xd, mbmi, plane, plane_bsize, cm->bit_depth, block,
2547 row, col, max_tx_size, &token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002548 block += step;
2549 }
2550 }
Angie Chiangd02001d2016-11-06 15:31:49 -08002551#if CONFIG_RD_DEBUG
Angie Chiang3963d632016-11-10 18:41:40 -08002552 if (mbmi->sb_type >= BLOCK_8X8 &&
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002553 rd_token_stats_mismatch(&mbmi->rd_stats, &token_stats, plane)) {
Angie Chiangd02001d2016-11-06 15:31:49 -08002554 dump_mode_info(m);
2555 assert(0);
2556 }
Jingning Hanfe45b212016-11-22 10:30:23 -08002557#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07002558 } else {
Angie Chiang7fcfee42017-02-24 15:51:03 -08002559 TX_SIZE tx = get_tx_size(plane, xd);
Jingning Han29a41202017-04-05 11:53:32 -07002560#if CONFIG_CB4X4 && !CONFIG_CHROMA_2X2
2561 tx = AOMMAX(TX_4X4, tx);
2562#endif
Jingning Han42a0fb32016-10-31 10:43:31 -07002563 const int bkw = tx_size_wide_unit[tx];
2564 const int bkh = tx_size_high_unit[tx];
Fangwen Fu33bcd112017-02-07 16:42:41 -08002565 for (row = 0; row < num_4x4_h; row += bkh) {
2566 for (col = 0; col < num_4x4_w; col += bkw) {
Yushin Cho258a0242017-03-06 13:53:01 -08002567#if !CONFIG_PVQ
Angie Chiangd4022822016-11-02 18:30:25 -07002568 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats);
Yushin Cho258a0242017-03-06 13:53:01 -08002569#else
2570 pack_pvq_tokens(w, x, xd, plane, bsize, tx);
2571#endif
Fangwen Fu33bcd112017-02-07 16:42:41 -08002572 }
2573 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07002574 }
2575#else
Angie Chiang7fcfee42017-02-24 15:51:03 -08002576 TX_SIZE tx = get_tx_size(plane, xd);
Angie Chiangd4022822016-11-02 18:30:25 -07002577 TOKEN_STATS token_stats;
Yushin Cho258a0242017-03-06 13:53:01 -08002578#if !CONFIG_PVQ
Angie Chiang3963d632016-11-10 18:41:40 -08002579 init_token_stats(&token_stats);
Angie Chiangc8af6112017-03-16 16:11:22 -07002580#if CONFIG_LV_MAP
2581 (void)tx;
2582 av1_write_coeffs_mb(cm, x, w, plane);
2583#else // CONFIG_LV_MAP
Angie Chiangd4022822016-11-02 18:30:25 -07002584 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats);
Angie Chiangc8af6112017-03-16 16:11:22 -07002585#endif // CONFIG_LV_MAP
2586
Yushin Cho258a0242017-03-06 13:53:01 -08002587#else
2588 (void)token_stats;
2589 pack_pvq_tokens(w, x, xd, plane, mbmi->sb_type, tx);
2590#endif
Angie Chiang3963d632016-11-10 18:41:40 -08002591#if CONFIG_RD_DEBUG
2592 if (is_inter_block(mbmi) && mbmi->sb_type >= BLOCK_8X8 &&
Luc Trudeau3e32f1a2017-03-08 10:37:49 -05002593 rd_token_stats_mismatch(&mbmi->rd_stats, &token_stats, plane)) {
Angie Chiang3963d632016-11-10 18:41:40 -08002594 dump_mode_info(m);
2595 assert(0);
2596 }
Jingning Hanfe45b212016-11-22 10:30:23 -08002597#endif // CONFIG_RD_DEBUG
Yaowu Xuc27fc142016-08-22 16:08:15 -07002598#endif // CONFIG_VAR_TX
Angie Chiangd4022822016-11-02 18:30:25 -07002599
Angie Chiang1dca1262017-03-17 16:11:52 -07002600#if !CONFIG_PVQ && !CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07002601 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2602 (*tok)++;
Yushin Cho258a0242017-03-06 13:53:01 -08002603#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002604 }
2605 }
iole moccagattaf25a4cf2016-11-11 23:57:57 -08002606#endif // CONFIG_COEF_INTERLEAVE
Yaowu Xuc27fc142016-08-22 16:08:15 -07002607}
2608
Yue Chen9ab6d712017-01-12 15:50:46 -08002609#if CONFIG_MOTION_VAR && CONFIG_NCOBMC
2610static void write_tokens_sb(AV1_COMP *cpi, const TileInfo *const tile,
2611 aom_writer *w, const TOKENEXTRA **tok,
2612 const TOKENEXTRA *const tok_end, int mi_row,
2613 int mi_col, BLOCK_SIZE bsize) {
2614 const AV1_COMMON *const cm = &cpi->common;
Yue Chenf27b1602017-01-13 11:11:43 -08002615 const int hbs = mi_size_wide[bsize] / 2;
Yue Chen9ab6d712017-01-12 15:50:46 -08002616 PARTITION_TYPE partition;
2617 BLOCK_SIZE subsize;
Yue Chenf27b1602017-01-13 11:11:43 -08002618#if CONFIG_CB4X4
2619 const int unify_bsize = 1;
2620#else
2621 const int unify_bsize = 0;
2622#endif
Yue Chen9ab6d712017-01-12 15:50:46 -08002623
2624 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2625
Yue Chenf27b1602017-01-13 11:11:43 -08002626 partition = get_partition(cm, mi_row, mi_col, bsize);
Yue Chen9ab6d712017-01-12 15:50:46 -08002627 subsize = get_subsize(bsize, partition);
2628
Yue Chenf27b1602017-01-13 11:11:43 -08002629 if (subsize < BLOCK_8X8 && !unify_bsize) {
Yue Chen9ab6d712017-01-12 15:50:46 -08002630 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2631 } else {
2632 switch (partition) {
2633 case PARTITION_NONE:
2634 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2635 break;
2636 case PARTITION_HORZ:
2637 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
Yue Chenf27b1602017-01-13 11:11:43 -08002638 if (mi_row + hbs < cm->mi_rows)
2639 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
Yue Chen9ab6d712017-01-12 15:50:46 -08002640 break;
2641 case PARTITION_VERT:
2642 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
Yue Chenf27b1602017-01-13 11:11:43 -08002643 if (mi_col + hbs < cm->mi_cols)
2644 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
Yue Chen9ab6d712017-01-12 15:50:46 -08002645 break;
2646 case PARTITION_SPLIT:
2647 write_tokens_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
Yue Chenf27b1602017-01-13 11:11:43 -08002648 write_tokens_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs,
Yue Chen9ab6d712017-01-12 15:50:46 -08002649 subsize);
Yue Chenf27b1602017-01-13 11:11:43 -08002650 write_tokens_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col,
Yue Chen9ab6d712017-01-12 15:50:46 -08002651 subsize);
Yue Chenf27b1602017-01-13 11:11:43 -08002652 write_tokens_sb(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs,
Yue Chen9ab6d712017-01-12 15:50:46 -08002653 subsize);
2654 break;
Yue Chenf27b1602017-01-13 11:11:43 -08002655#if CONFIG_EXT_PARTITION_TYPES
2656 case PARTITION_HORZ_A:
2657 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2658 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2659 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2660 break;
2661 case PARTITION_HORZ_B:
2662 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2663 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2664 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
2665 break;
2666 case PARTITION_VERT_A:
2667 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2668 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col);
2669 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2670 break;
2671 case PARTITION_VERT_B:
2672 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
2673 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + hbs);
2674 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row + hbs, mi_col + hbs);
2675 break;
2676#endif // CONFIG_EXT_PARTITION_TYPES
Yue Chen9ab6d712017-01-12 15:50:46 -08002677 default: assert(0);
2678 }
2679 }
2680}
2681#endif
2682
Yue Chen64550b62017-01-12 12:18:22 -08002683static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile,
2684 aom_writer *w, const TOKENEXTRA **tok,
2685 const TOKENEXTRA *const tok_end,
2686#if CONFIG_SUPERTX
2687 int supertx_enabled,
2688#endif
2689 int mi_row, int mi_col) {
2690 write_mbmi_b(cpi, tile, w,
2691#if CONFIG_SUPERTX
2692 supertx_enabled,
2693#endif
2694 mi_row, mi_col);
Yue Chen9ab6d712017-01-12 15:50:46 -08002695#if CONFIG_MOTION_VAR && CONFIG_NCOBMC
2696 (void)tok;
2697 (void)tok_end;
2698#else
Yue Chen64550b62017-01-12 12:18:22 -08002699#if !CONFIG_PVQ && CONFIG_SUPERTX
2700 if (!supertx_enabled)
2701#endif
2702 write_tokens_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
Yue Chen9ab6d712017-01-12 15:50:46 -08002703#endif
Yue Chen64550b62017-01-12 12:18:22 -08002704}
2705
Yaowu Xuf883b422016-08-30 14:01:10 -07002706static void write_partition(const AV1_COMMON *const cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002707 const MACROBLOCKD *const xd, int hbs, int mi_row,
2708 int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize,
Yaowu Xuf883b422016-08-30 14:01:10 -07002709 aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002710 const int has_rows = (mi_row + hbs) < cm->mi_rows;
2711 const int has_cols = (mi_col + hbs) < cm->mi_cols;
Alex Converse55c6bde2017-01-12 15:55:31 -08002712 const int is_partition_point = bsize >= BLOCK_8X8;
2713 const int ctx = is_partition_point
2714 ? partition_plane_context(xd, mi_row, mi_col,
2715#if CONFIG_UNPOISON_PARTITION_CTX
2716 has_rows, has_cols,
2717#endif
2718 bsize)
2719 : 0;
2720#if CONFIG_UNPOISON_PARTITION_CTX
Alex Converse2b9d19d2017-04-03 11:11:17 -07002721 const aom_prob *const probs =
2722 ctx < PARTITION_CONTEXTS ? cm->fc->partition_prob[ctx] : NULL;
Alex Converse55c6bde2017-01-12 15:55:31 -08002723#else
2724 const aom_prob *const probs = cm->fc->partition_prob[ctx];
2725#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002726
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00002727#if CONFIG_EC_ADAPT
2728 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
2729 (void)cm;
2730#elif CONFIG_EC_MULTISYMBOL
2731 FRAME_CONTEXT *ec_ctx = cm->fc;
2732#endif
2733
Jingning Hanbf9c6b72016-12-14 14:50:45 -08002734 if (!is_partition_point) return;
2735
Yaowu Xuc27fc142016-08-22 16:08:15 -07002736 if (has_rows && has_cols) {
2737#if CONFIG_EXT_PARTITION_TYPES
2738 if (bsize <= BLOCK_8X8)
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00002739#if CONFIG_EC_MULTISYMBOL
2740 aom_write_symbol(w, p, ec_ctx->partition_cdf[ctx], PARTITION_TYPES);
2741#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002742 av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]);
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00002743#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002744 else
Alex Converse57795a42017-03-14 12:18:25 -07002745#if CONFIG_EC_MULTISYMBOL
2746 aom_write_symbol(w, p, ec_ctx->partition_cdf[ctx], EXT_PARTITION_TYPES);
2747#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002748 av1_write_token(w, av1_ext_partition_tree, probs,
2749 &ext_partition_encodings[p]);
Alex Converse57795a42017-03-14 12:18:25 -07002750#endif // CONFIG_EC_MULTISYMBOL
Yaowu Xuc27fc142016-08-22 16:08:15 -07002751#else
Nathan E. Egge9d9eb6c2016-11-16 09:44:26 -05002752#if CONFIG_EC_MULTISYMBOL
Thomas Daviesc2ec0e42017-01-11 16:27:27 +00002753 aom_write_symbol(w, p, ec_ctx->partition_cdf[ctx], PARTITION_TYPES);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04002754#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002755 av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]);
Nathan E. Eggefba2be62016-05-03 09:48:54 -04002756#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002757#endif // CONFIG_EXT_PARTITION_TYPES
2758 } else if (!has_rows && has_cols) {
2759 assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
Yaowu Xuf883b422016-08-30 14:01:10 -07002760 aom_write(w, p == PARTITION_SPLIT, probs[1]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002761 } else if (has_rows && !has_cols) {
2762 assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
Yaowu Xuf883b422016-08-30 14:01:10 -07002763 aom_write(w, p == PARTITION_SPLIT, probs[2]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002764 } else {
2765 assert(p == PARTITION_SPLIT);
2766 }
2767}
2768
2769#if CONFIG_SUPERTX
2770#define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
2771 mi_row, mi_col, bsize) \
2772 write_modes_sb(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col, \
2773 bsize)
2774#else
2775#define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \
2776 mi_row, mi_col, bsize) \
2777 write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, bsize)
Alex Converseec6fb642016-10-19 11:31:48 -07002778#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07002779
Yaowu Xuf883b422016-08-30 14:01:10 -07002780static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile,
2781 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07002782 const TOKENEXTRA *const tok_end,
2783#if CONFIG_SUPERTX
2784 int supertx_enabled,
2785#endif
2786 int mi_row, int mi_col, BLOCK_SIZE bsize) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002787 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002788 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
Jingning Hanc709e1f2016-12-06 14:48:09 -08002789 const int hbs = mi_size_wide[bsize] / 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002790 const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize);
2791 const BLOCK_SIZE subsize = get_subsize(bsize, partition);
Jingning Han52261842016-12-14 12:17:49 -08002792#if CONFIG_CB4X4
2793 const int unify_bsize = 1;
2794#else
2795 const int unify_bsize = 0;
2796#endif
2797
Yaowu Xuc27fc142016-08-22 16:08:15 -07002798#if CONFIG_SUPERTX
2799 const int mi_offset = mi_row * cm->mi_stride + mi_col;
2800 MB_MODE_INFO *mbmi;
2801 const int pack_token = !supertx_enabled;
2802 TX_SIZE supertx_size;
2803 int plane;
2804#endif
2805
2806 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return;
2807
2808 write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w);
2809#if CONFIG_SUPERTX
2810 mbmi = &cm->mi_grid_visible[mi_offset]->mbmi;
2811 xd->mi = cm->mi_grid_visible + mi_offset;
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002812 set_mi_row_col(xd, tile, mi_row, mi_size_high[bsize], mi_col,
2813 mi_size_wide[bsize],
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002814#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002815 cm->dependent_horz_tiles,
2816#endif // CONFIG_DEPENDENT_HORZTILES
2817 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002818 if (!supertx_enabled && !frame_is_intra_only(cm) &&
2819 partition != PARTITION_NONE && bsize <= MAX_SUPERTX_BLOCK_SIZE &&
2820 !xd->lossless[0]) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002821 aom_prob prob;
Yaowu Xuc27fc142016-08-22 16:08:15 -07002822 supertx_size = max_txsize_lookup[bsize];
2823 prob = cm->fc->supertx_prob[partition_supertx_context_lookup[partition]]
2824 [supertx_size];
2825 supertx_enabled = (xd->mi[0]->mbmi.tx_size == supertx_size);
Yaowu Xuf883b422016-08-30 14:01:10 -07002826 aom_write(w, supertx_enabled, prob);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002827 }
2828#endif // CONFIG_SUPERTX
Jingning Han52261842016-12-14 12:17:49 -08002829 if (subsize < BLOCK_8X8 && !unify_bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07002830 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row,
2831 mi_col);
2832 } else {
2833 switch (partition) {
2834 case PARTITION_NONE:
2835 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2836 mi_row, mi_col);
2837 break;
2838 case PARTITION_HORZ:
2839 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2840 mi_row, mi_col);
2841 if (mi_row + hbs < cm->mi_rows)
2842 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2843 mi_row + hbs, mi_col);
2844 break;
2845 case PARTITION_VERT:
2846 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2847 mi_row, mi_col);
2848 if (mi_col + hbs < cm->mi_cols)
2849 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2850 mi_row, mi_col + hbs);
2851 break;
2852 case PARTITION_SPLIT:
2853 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2854 mi_row, mi_col, subsize);
2855 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2856 mi_row, mi_col + hbs, subsize);
2857 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2858 mi_row + hbs, mi_col, subsize);
2859 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2860 mi_row + hbs, mi_col + hbs, subsize);
2861 break;
2862#if CONFIG_EXT_PARTITION_TYPES
2863 case PARTITION_HORZ_A:
2864 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2865 mi_row, mi_col);
2866 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2867 mi_row, mi_col + hbs);
2868 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2869 mi_row + hbs, mi_col);
2870 break;
2871 case PARTITION_HORZ_B:
2872 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2873 mi_row, mi_col);
2874 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2875 mi_row + hbs, mi_col);
2876 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2877 mi_row + hbs, mi_col + hbs);
2878 break;
2879 case PARTITION_VERT_A:
2880 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2881 mi_row, mi_col);
2882 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2883 mi_row + hbs, mi_col);
2884 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2885 mi_row, mi_col + hbs);
2886 break;
2887 case PARTITION_VERT_B:
2888 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2889 mi_row, mi_col);
2890 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2891 mi_row, mi_col + hbs);
2892 write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled,
2893 mi_row + hbs, mi_col + hbs);
2894 break;
2895#endif // CONFIG_EXT_PARTITION_TYPES
2896 default: assert(0);
2897 }
2898 }
2899#if CONFIG_SUPERTX
2900 if (partition != PARTITION_NONE && supertx_enabled && pack_token) {
2901 int skip;
Jingning Han5b7706a2016-12-21 09:55:10 -08002902 const int bsw = mi_size_wide[bsize];
2903 const int bsh = mi_size_high[bsize];
Yaowu Xu41a36de2017-03-23 15:55:03 -07002904
Yaowu Xuc27fc142016-08-22 16:08:15 -07002905 xd->mi = cm->mi_grid_visible + mi_offset;
2906 supertx_size = mbmi->tx_size;
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002907 set_mi_row_col(xd, tile, mi_row, bsh, mi_col, bsw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08002908#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -07002909 cm->dependent_horz_tiles,
2910#endif // CONFIG_DEPENDENT_HORZTILES
2911 cm->mi_rows, cm->mi_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002912
2913 assert(IMPLIES(!cm->seg.enabled, mbmi->segment_id_supertx == 0));
2914 assert(mbmi->segment_id_supertx < MAX_SEGMENTS);
2915
2916 skip = write_skip(cm, xd, mbmi->segment_id_supertx, xd->mi[0], w);
2917#if CONFIG_EXT_TX
Sarah Parkere68a3e42017-02-16 14:03:24 -08002918 if (get_ext_tx_types(supertx_size, bsize, 1, cm->reduced_tx_set_used) > 1 &&
2919 !skip) {
2920 const int eset =
2921 get_ext_tx_set(supertx_size, bsize, 1, cm->reduced_tx_set_used);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002922 if (eset > 0) {
Thomas Daviesb1bedf52017-03-17 14:03:28 +00002923#if CONFIG_EC_MULTISYMBOL
Yaowu Xu41a36de2017-03-23 15:55:03 -07002924#if CONFIG_EC_ADAPT
2925 FRAME_CONTEXT *ec_ctx = xd->tile_ctx;
2926#else
2927 FRAME_CONTEXT *ec_ctx = cm->fc;
2928#endif
Thomas Daviesb1bedf52017-03-17 14:03:28 +00002929 aom_write_symbol(w, av1_ext_tx_inter_ind[eset][mbmi->tx_type],
2930 ec_ctx->inter_ext_tx_cdf[eset][supertx_size],
2931 ext_tx_cnt_inter[eset]);
2932#else
Yaowu Xuf883b422016-08-30 14:01:10 -07002933 av1_write_token(w, av1_ext_tx_inter_tree[eset],
2934 cm->fc->inter_ext_tx_prob[eset][supertx_size],
2935 &ext_tx_inter_encodings[eset][mbmi->tx_type]);
Thomas Daviesb1bedf52017-03-17 14:03:28 +00002936#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002937 }
2938 }
2939#else
2940 if (supertx_size < TX_32X32 && !skip) {
Yaowu Xuf883b422016-08-30 14:01:10 -07002941 av1_write_token(w, av1_ext_tx_tree,
2942 cm->fc->inter_ext_tx_prob[supertx_size],
2943 &ext_tx_encodings[mbmi->tx_type]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002944 }
2945#endif // CONFIG_EXT_TX
2946
2947 if (!skip) {
2948 assert(*tok < tok_end);
2949 for (plane = 0; plane < MAX_MB_PLANE; ++plane) {
Jingning Han5b7706a2016-12-21 09:55:10 -08002950 const struct macroblockd_plane *const pd = &xd->plane[plane];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002951 const int mbmi_txb_size = txsize_to_bsize[mbmi->tx_size];
Jingning Han5b7706a2016-12-21 09:55:10 -08002952 const BLOCK_SIZE plane_bsize = get_plane_block_size(mbmi_txb_size, pd);
2953
2954 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane);
2955 const int max_blocks_high = max_block_high(xd, plane_bsize, plane);
2956
Yaowu Xuc27fc142016-08-22 16:08:15 -07002957 int row, col;
Angie Chiang7fcfee42017-02-24 15:51:03 -08002958 TX_SIZE tx = get_tx_size(plane, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002959 BLOCK_SIZE txb_size = txsize_to_bsize[tx];
Jingning Han5b7706a2016-12-21 09:55:10 -08002960
2961 const int stepr = tx_size_high_unit[txb_size];
2962 const int stepc = tx_size_wide_unit[txb_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07002963
Angie Chiangd4022822016-11-02 18:30:25 -07002964 TOKEN_STATS token_stats;
2965 token_stats.cost = 0;
Jingning Han5b7706a2016-12-21 09:55:10 -08002966 for (row = 0; row < max_blocks_high; row += stepr)
2967 for (col = 0; col < max_blocks_wide; col += stepc)
Angie Chiangd4022822016-11-02 18:30:25 -07002968 pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats);
Yaowu Xuc27fc142016-08-22 16:08:15 -07002969 assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN);
2970 (*tok)++;
2971 }
2972 }
Jingning Hane92bf1c2016-11-21 10:41:56 -08002973#if CONFIG_VAR_TX
2974 xd->above_txfm_context = cm->above_txfm_context + mi_col;
2975 xd->left_txfm_context =
2976 xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK);
2977 set_txfm_ctxs(xd->mi[0]->mbmi.tx_size, bsw, bsh, skip, xd);
2978#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07002979 }
2980#endif // CONFIG_SUPERTX
2981
2982// update partition context
2983#if CONFIG_EXT_PARTITION_TYPES
2984 update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition);
2985#else
2986 if (bsize >= BLOCK_8X8 &&
2987 (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
2988 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
David Barkerf8935c92016-10-26 14:54:06 +01002989#endif // CONFIG_EXT_PARTITION_TYPES
Yaowu Xuc27fc142016-08-22 16:08:15 -07002990
Jean-Marc Valin01435132017-02-18 14:12:53 -05002991#if CONFIG_CDEF
David Barker9739f362016-11-10 09:29:32 +00002992#if CONFIG_EXT_PARTITION
2993 if (cm->sb_size == BLOCK_128X128 && bsize == BLOCK_128X128 &&
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002994 !sb_all_skip(cm, mi_row, mi_col)) {
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01002995 aom_write_literal(w, cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]
Jean-Marc Valin1df427e2017-03-23 18:32:57 -04002996 ->mbmi.cdef_strength,
2997 cm->cdef_bits);
David Barker9739f362016-11-10 09:29:32 +00002998 } else if (cm->sb_size == BLOCK_64X64 && bsize == BLOCK_64X64 &&
2999#else
3000 if (bsize == BLOCK_64X64 &&
3001#endif // CONFIG_EXT_PARTITION
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01003002 !sb_all_skip(cm, mi_row, mi_col)) {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04003003 if (cm->cdef_bits != 0)
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01003004 aom_write_literal(w, cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04003005 ->mbmi.cdef_strength,
3006 cm->cdef_bits);
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02003007 }
3008#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003009}
3010
Yaowu Xuf883b422016-08-30 14:01:10 -07003011static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile,
3012 aom_writer *const w, const TOKENEXTRA **tok,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003013 const TOKENEXTRA *const tok_end) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003014 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003015 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
3016 const int mi_row_start = tile->mi_row_start;
3017 const int mi_row_end = tile->mi_row_end;
3018 const int mi_col_start = tile->mi_col_start;
3019 const int mi_col_end = tile->mi_col_end;
3020 int mi_row, mi_col;
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003021
3022#if CONFIG_DEPENDENT_HORZTILES
Fangwen Fu73126c02017-02-08 22:37:47 -08003023#if CONFIG_TILE_GROUPS
3024 if (!cm->dependent_horz_tiles || mi_row_start == 0 ||
3025 tile->tg_horz_boundary) {
3026#else
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003027 if (!cm->dependent_horz_tiles || mi_row_start == 0) {
Fangwen Fu73126c02017-02-08 22:37:47 -08003028#endif
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003029 av1_zero_above_context(cm, mi_col_start, mi_col_end);
3030 }
3031#else
Yaowu Xuf883b422016-08-30 14:01:10 -07003032 av1_zero_above_context(cm, mi_col_start, mi_col_end);
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003033#endif
Yushin Cho77bba8d2016-11-04 16:36:56 -07003034#if CONFIG_PVQ
3035 assert(cpi->td.mb.pvq_q->curr_pos == 0);
3036#endif
Arild Fuldseth07441162016-08-15 15:07:52 +02003037#if CONFIG_DELTA_Q
3038 if (cpi->common.delta_q_present_flag) {
3039 xd->prev_qindex = cpi->common.base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07003040#if CONFIG_EXT_DELTA_Q
3041 if (cpi->common.delta_lf_present_flag) {
3042 xd->prev_delta_lf_from_base = 0;
3043 }
3044#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02003045 }
3046#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003047
3048 for (mi_row = mi_row_start; mi_row < mi_row_end; mi_row += cm->mib_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003049 av1_zero_left_context(xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003050
3051 for (mi_col = mi_col_start; mi_col < mi_col_end; mi_col += cm->mib_size) {
3052 write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, 0, mi_row, mi_col,
3053 cm->sb_size);
Yue Chen9ab6d712017-01-12 15:50:46 -08003054#if CONFIG_MOTION_VAR && CONFIG_NCOBMC
3055 write_tokens_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, cm->sb_size);
3056#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003057 }
3058 }
Yushin Cho77bba8d2016-11-04 16:36:56 -07003059#if CONFIG_PVQ
3060 // Check that the number of PVQ blocks encoded and written to the bitstream
3061 // are the same
3062 assert(cpi->td.mb.pvq_q->curr_pos == cpi->td.mb.pvq_q->last_pos);
3063 // Reset curr_pos in case we repack the bitstream
3064 cpi->td.mb.pvq_q->curr_pos = 0;
3065#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003066}
3067
Angie Chiang800df032017-03-22 11:14:12 -07003068#if !CONFIG_LV_MAP
Alex Conversea9598cd2017-02-03 14:18:05 -08003069#if !CONFIG_PVQ && !(CONFIG_EC_ADAPT && CONFIG_NEW_TOKENSET)
Yaowu Xuf883b422016-08-30 14:01:10 -07003070static void build_tree_distribution(AV1_COMP *cpi, TX_SIZE tx_size,
3071 av1_coeff_stats *coef_branch_ct,
3072 av1_coeff_probs_model *coef_probs) {
3073 av1_coeff_count *coef_counts = cpi->td.rd_counts.coef_counts[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003074 unsigned int(*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
3075 cpi->common.counts.eob_branch[tx_size];
3076 int i, j, k, l, m;
Urvang Joshifeb925f2016-12-05 10:37:29 -08003077#if CONFIG_RECT_TX
3078 assert(!is_rect_tx(tx_size));
3079#endif // CONFIG_RECT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003080
3081 for (i = 0; i < PLANE_TYPES; ++i) {
3082 for (j = 0; j < REF_TYPES; ++j) {
3083 for (k = 0; k < COEF_BANDS; ++k) {
3084 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003085 av1_tree_probs_from_distribution(av1_coef_tree,
3086 coef_branch_ct[i][j][k][l],
3087 coef_counts[i][j][k][l]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003088 coef_branch_ct[i][j][k][l][0][1] =
3089 eob_branch_ct[i][j][k][l] - coef_branch_ct[i][j][k][l][0][0];
3090 for (m = 0; m < UNCONSTRAINED_NODES; ++m)
3091 coef_probs[i][j][k][l][m] =
3092 get_binary_prob(coef_branch_ct[i][j][k][l][m][0],
3093 coef_branch_ct[i][j][k][l][m][1]);
3094 }
3095 }
3096 }
3097 }
3098}
3099
Alex Conversea9598cd2017-02-03 14:18:05 -08003100#if !(CONFIG_EC_ADAPT && CONFIG_NEW_TOKENSET)
Yaowu Xuf883b422016-08-30 14:01:10 -07003101static void update_coef_probs_common(aom_writer *const bc, AV1_COMP *cpi,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003102 TX_SIZE tx_size,
Yaowu Xuf883b422016-08-30 14:01:10 -07003103 av1_coeff_stats *frame_branch_ct,
3104 av1_coeff_probs_model *new_coef_probs) {
3105 av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
3106 const aom_prob upd = DIFF_UPDATE_PROB;
Alex Conversea9598cd2017-02-03 14:18:05 -08003107#if CONFIG_EC_ADAPT
3108 const int entropy_nodes_update = UNCONSTRAINED_NODES - 1;
3109#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07003110 const int entropy_nodes_update = UNCONSTRAINED_NODES;
Alex Conversea9598cd2017-02-03 14:18:05 -08003111#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003112 int i, j, k, l, t;
3113 int stepsize = cpi->sf.coeff_prob_appx_step;
Thomas Davies80188d12016-10-26 16:08:35 -07003114#if CONFIG_TILE_GROUPS
3115 const int probwt = cpi->common.num_tg;
3116#else
3117 const int probwt = 1;
3118#endif
Urvang Joshifeb925f2016-12-05 10:37:29 -08003119#if CONFIG_RECT_TX
3120 assert(!is_rect_tx(tx_size));
3121#endif // CONFIG_RECT_TX
Yaowu Xuc27fc142016-08-22 16:08:15 -07003122
3123 switch (cpi->sf.use_fast_coef_updates) {
3124 case TWO_LOOP: {
3125 /* dry run to see if there is any update at all needed */
3126 int savings = 0;
3127 int update[2] = { 0, 0 };
3128 for (i = 0; i < PLANE_TYPES; ++i) {
3129 for (j = 0; j < REF_TYPES; ++j) {
3130 for (k = 0; k < COEF_BANDS; ++k) {
3131 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
3132 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003133 aom_prob newp = new_coef_probs[i][j][k][l][t];
3134 const aom_prob oldp = old_coef_probs[i][j][k][l][t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003135 int s;
3136 int u = 0;
3137 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003138 s = av1_prob_diff_update_savings_search_model(
Alex Conversea9ce4b72016-05-25 10:28:03 -07003139 frame_branch_ct[i][j][k][l][0], oldp, &newp, upd,
3140 stepsize, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003141 else
Yaowu Xuf883b422016-08-30 14:01:10 -07003142 s = av1_prob_diff_update_savings_search(
Thomas Davies80188d12016-10-26 16:08:35 -07003143 frame_branch_ct[i][j][k][l][t], oldp, &newp, upd, probwt);
3144
Yaowu Xuc27fc142016-08-22 16:08:15 -07003145 if (s > 0 && newp != oldp) u = 1;
3146 if (u)
Yaowu Xuf883b422016-08-30 14:01:10 -07003147 savings += s - (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003148 else
Yaowu Xuf883b422016-08-30 14:01:10 -07003149 savings -= (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003150 update[u]++;
3151 }
3152 }
3153 }
3154 }
3155 }
3156
3157 /* Is coef updated at all */
3158 if (update[1] == 0 || savings < 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003159 aom_write_bit(bc, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003160 return;
3161 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003162 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003163 for (i = 0; i < PLANE_TYPES; ++i) {
3164 for (j = 0; j < REF_TYPES; ++j) {
3165 for (k = 0; k < COEF_BANDS; ++k) {
3166 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
3167 // calc probs and branch cts for this frame only
3168 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003169 aom_prob newp = new_coef_probs[i][j][k][l][t];
3170 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003171 int s;
3172 int u = 0;
3173 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003174 s = av1_prob_diff_update_savings_search_model(
Alex Conversea9ce4b72016-05-25 10:28:03 -07003175 frame_branch_ct[i][j][k][l][0], *oldp, &newp, upd,
3176 stepsize, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003177 else
Yaowu Xuf883b422016-08-30 14:01:10 -07003178 s = av1_prob_diff_update_savings_search(
Thomas Davies80188d12016-10-26 16:08:35 -07003179 frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd,
3180 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003181 if (s > 0 && newp != *oldp) u = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003182 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003183 if (u) {
3184 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07003185 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003186 *oldp = newp;
3187 }
3188 }
3189 }
3190 }
3191 }
3192 }
3193 return;
3194 }
3195
3196 case ONE_LOOP_REDUCED: {
3197 int updates = 0;
3198 int noupdates_before_first = 0;
3199 for (i = 0; i < PLANE_TYPES; ++i) {
3200 for (j = 0; j < REF_TYPES; ++j) {
3201 for (k = 0; k < COEF_BANDS; ++k) {
3202 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
3203 // calc probs and branch cts for this frame only
3204 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003205 aom_prob newp = new_coef_probs[i][j][k][l][t];
3206 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003207 int s;
3208 int u = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003209 if (t == PIVOT_NODE) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003210 s = av1_prob_diff_update_savings_search_model(
Alex Conversea9ce4b72016-05-25 10:28:03 -07003211 frame_branch_ct[i][j][k][l][0], *oldp, &newp, upd,
3212 stepsize, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003213 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003214 s = av1_prob_diff_update_savings_search(
Thomas Davies80188d12016-10-26 16:08:35 -07003215 frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd,
3216 probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003217 }
3218
3219 if (s > 0 && newp != *oldp) u = 1;
3220 updates += u;
3221 if (u == 0 && updates == 0) {
3222 noupdates_before_first++;
3223 continue;
3224 }
3225 if (u == 1 && updates == 1) {
3226 int v;
3227 // first update
Yaowu Xuf883b422016-08-30 14:01:10 -07003228 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003229 for (v = 0; v < noupdates_before_first; ++v)
Yaowu Xuf883b422016-08-30 14:01:10 -07003230 aom_write(bc, 0, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003231 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003232 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003233 if (u) {
3234 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07003235 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003236 *oldp = newp;
3237 }
3238 }
3239 }
3240 }
3241 }
3242 }
3243 if (updates == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003244 aom_write_bit(bc, 0); // no updates
Yaowu Xuc27fc142016-08-22 16:08:15 -07003245 }
3246 return;
3247 }
3248 default: assert(0);
3249 }
3250}
Thomas Daviesfc1598a2017-01-13 17:07:25 +00003251#endif
hui su0d103572017-03-01 17:58:01 -08003252#if CONFIG_SUBFRAME_PROB_UPDATE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003253// Calculate the token counts between subsequent subframe updates.
clang-format55ce9e02017-02-15 22:27:12 -08003254static void get_coef_counts_diff(
3255 AV1_COMP *cpi, int index,
3256 av1_coeff_count coef_counts[TX_SIZES][PLANE_TYPES],
3257 unsigned int eob_counts[TX_SIZES][PLANE_TYPES][REF_TYPES][COEF_BANDS]
3258 [COEFF_CONTEXTS]) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003259 int i, j, k, l, m, tx_size, val;
3260 const int max_idx = cpi->common.coef_probs_update_idx;
3261 const TX_MODE tx_mode = cpi->common.tx_mode;
Urvang Joshicb586f32016-09-20 11:36:33 -07003262 const int max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003263 const SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats;
3264
3265 assert(max_idx < COEF_PROBS_BUFS);
3266
Yaowu Xu6b763c92017-01-23 12:13:37 -08003267 for (tx_size = 0; tx_size <= max_tx_size; ++tx_size)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003268 for (i = 0; i < PLANE_TYPES; ++i)
3269 for (j = 0; j < REF_TYPES; ++j)
3270 for (k = 0; k < COEF_BANDS; ++k)
3271 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
3272 if (index == max_idx) {
3273 val =
3274 cpi->common.counts.eob_branch[tx_size][i][j][k][l] -
3275 subframe_stats->eob_counts_buf[max_idx][tx_size][i][j][k][l];
3276 } else {
clang-format55ce9e02017-02-15 22:27:12 -08003277 val = subframe_stats
3278 ->eob_counts_buf[index + 1][tx_size][i][j][k][l] -
Yaowu Xuc27fc142016-08-22 16:08:15 -07003279 subframe_stats->eob_counts_buf[index][tx_size][i][j][k][l];
3280 }
3281 assert(val >= 0);
3282 eob_counts[tx_size][i][j][k][l] = val;
3283
3284 for (m = 0; m < ENTROPY_TOKENS; ++m) {
3285 if (index == max_idx) {
3286 val = cpi->td.rd_counts.coef_counts[tx_size][i][j][k][l][m] -
clang-format55ce9e02017-02-15 22:27:12 -08003287 subframe_stats
3288 ->coef_counts_buf[max_idx][tx_size][i][j][k][l][m];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003289 } else {
clang-format55ce9e02017-02-15 22:27:12 -08003290 val = subframe_stats
3291 ->coef_counts_buf[index + 1][tx_size][i][j][k][l][m] -
3292 subframe_stats
3293 ->coef_counts_buf[index][tx_size][i][j][k][l][m];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003294 }
3295 assert(val >= 0);
3296 coef_counts[tx_size][i][j][k][l][m] = val;
3297 }
3298 }
3299}
3300
3301static void update_coef_probs_subframe(
Yaowu Xuf883b422016-08-30 14:01:10 -07003302 aom_writer *const bc, AV1_COMP *cpi, TX_SIZE tx_size,
3303 av1_coeff_stats branch_ct[COEF_PROBS_BUFS][TX_SIZES][PLANE_TYPES],
3304 av1_coeff_probs_model *new_coef_probs) {
3305 av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size];
3306 const aom_prob upd = DIFF_UPDATE_PROB;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003307 const int entropy_nodes_update = UNCONSTRAINED_NODES;
3308 int i, j, k, l, t;
3309 int stepsize = cpi->sf.coeff_prob_appx_step;
3310 const int max_idx = cpi->common.coef_probs_update_idx;
3311 int idx;
3312 unsigned int this_branch_ct[ENTROPY_NODES][COEF_PROBS_BUFS][2];
3313
3314 switch (cpi->sf.use_fast_coef_updates) {
3315 case TWO_LOOP: {
3316 /* dry run to see if there is any update at all needed */
3317 int savings = 0;
3318 int update[2] = { 0, 0 };
3319 for (i = 0; i < PLANE_TYPES; ++i) {
3320 for (j = 0; j < REF_TYPES; ++j) {
3321 for (k = 0; k < COEF_BANDS; ++k) {
3322 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
3323 for (t = 0; t < ENTROPY_NODES; ++t) {
3324 for (idx = 0; idx <= max_idx; ++idx) {
3325 memcpy(this_branch_ct[t][idx],
3326 branch_ct[idx][tx_size][i][j][k][l][t],
3327 2 * sizeof(this_branch_ct[t][idx][0]));
3328 }
3329 }
3330 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003331 aom_prob newp = new_coef_probs[i][j][k][l][t];
3332 const aom_prob oldp = old_coef_probs[i][j][k][l][t];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003333 int s, u = 0;
3334
3335 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003336 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003337 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
3338 stepsize, max_idx);
3339 else
Yaowu Xuf883b422016-08-30 14:01:10 -07003340 s = av1_prob_update_search_subframe(this_branch_ct[t], oldp,
3341 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003342 if (s > 0 && newp != oldp) u = 1;
3343 if (u)
Yaowu Xuf883b422016-08-30 14:01:10 -07003344 savings += s - (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003345 else
Yaowu Xuf883b422016-08-30 14:01:10 -07003346 savings -= (int)(av1_cost_zero(upd));
Yaowu Xuc27fc142016-08-22 16:08:15 -07003347 update[u]++;
3348 }
3349 }
3350 }
3351 }
3352 }
3353
3354 /* Is coef updated at all */
3355 if (update[1] == 0 || savings < 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003356 aom_write_bit(bc, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003357 return;
3358 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003359 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003360 for (i = 0; i < PLANE_TYPES; ++i) {
3361 for (j = 0; j < REF_TYPES; ++j) {
3362 for (k = 0; k < COEF_BANDS; ++k) {
3363 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
3364 for (t = 0; t < ENTROPY_NODES; ++t) {
3365 for (idx = 0; idx <= max_idx; ++idx) {
3366 memcpy(this_branch_ct[t][idx],
3367 branch_ct[idx][tx_size][i][j][k][l][t],
3368 2 * sizeof(this_branch_ct[t][idx][0]));
3369 }
3370 }
3371 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003372 aom_prob newp = new_coef_probs[i][j][k][l][t];
3373 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003374 int s;
3375 int u = 0;
3376
3377 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003378 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003379 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
3380 stepsize, max_idx);
3381 else
Yaowu Xuf883b422016-08-30 14:01:10 -07003382 s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp,
3383 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003384 if (s > 0 && newp != *oldp) u = 1;
Yaowu Xuf883b422016-08-30 14:01:10 -07003385 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003386 if (u) {
3387 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07003388 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003389 *oldp = newp;
3390 }
3391 }
3392 }
3393 }
3394 }
3395 }
3396 return;
3397 }
3398
3399 case ONE_LOOP_REDUCED: {
3400 int updates = 0;
3401 int noupdates_before_first = 0;
3402 for (i = 0; i < PLANE_TYPES; ++i) {
3403 for (j = 0; j < REF_TYPES; ++j) {
3404 for (k = 0; k < COEF_BANDS; ++k) {
3405 for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
3406 for (t = 0; t < ENTROPY_NODES; ++t) {
3407 for (idx = 0; idx <= max_idx; ++idx) {
3408 memcpy(this_branch_ct[t][idx],
3409 branch_ct[idx][tx_size][i][j][k][l][t],
3410 2 * sizeof(this_branch_ct[t][idx][0]));
3411 }
3412 }
3413 for (t = 0; t < entropy_nodes_update; ++t) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003414 aom_prob newp = new_coef_probs[i][j][k][l][t];
3415 aom_prob *oldp = old_coef_probs[i][j][k][l] + t;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003416 int s;
3417 int u = 0;
3418
3419 if (t == PIVOT_NODE)
Yaowu Xuf883b422016-08-30 14:01:10 -07003420 s = av1_prob_update_search_model_subframe(
Yaowu Xuc27fc142016-08-22 16:08:15 -07003421 this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd,
3422 stepsize, max_idx);
3423 else
Yaowu Xuf883b422016-08-30 14:01:10 -07003424 s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp,
3425 &newp, upd, max_idx);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003426 if (s > 0 && newp != *oldp) u = 1;
3427 updates += u;
3428 if (u == 0 && updates == 0) {
3429 noupdates_before_first++;
3430 continue;
3431 }
3432 if (u == 1 && updates == 1) {
3433 int v;
3434 // first update
Yaowu Xuf883b422016-08-30 14:01:10 -07003435 aom_write_bit(bc, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003436 for (v = 0; v < noupdates_before_first; ++v)
Yaowu Xuf883b422016-08-30 14:01:10 -07003437 aom_write(bc, 0, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003438 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003439 aom_write(bc, u, upd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003440 if (u) {
3441 /* send/use new probability */
Yaowu Xuf883b422016-08-30 14:01:10 -07003442 av1_write_prob_diff_update(bc, newp, *oldp);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003443 *oldp = newp;
3444 }
3445 }
3446 }
3447 }
3448 }
3449 }
3450 if (updates == 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003451 aom_write_bit(bc, 0); // no updates
Yaowu Xuc27fc142016-08-22 16:08:15 -07003452 }
3453 return;
3454 }
3455 default: assert(0);
3456 }
3457}
hui su0d103572017-03-01 17:58:01 -08003458#endif // CONFIG_SUBFRAME_PROB_UPDATE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003459
Alex Conversea9598cd2017-02-03 14:18:05 -08003460#if !(CONFIG_EC_ADAPT && CONFIG_NEW_TOKENSET)
Yaowu Xuf883b422016-08-30 14:01:10 -07003461static void update_coef_probs(AV1_COMP *cpi, aom_writer *w) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003462 const TX_MODE tx_mode = cpi->common.tx_mode;
3463 const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
3464 TX_SIZE tx_size;
hui su0d103572017-03-01 17:58:01 -08003465#if CONFIG_SUBFRAME_PROB_UPDATE
Yaowu Xuf883b422016-08-30 14:01:10 -07003466 AV1_COMMON *cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003467 SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003468 int i;
Yaowu Xuf883b422016-08-30 14:01:10 -07003469 av1_coeff_probs_model dummy_frame_coef_probs[PLANE_TYPES];
Yaowu Xuc27fc142016-08-22 16:08:15 -07003470
3471 if (cm->do_subframe_update &&
3472 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003473 av1_copy(cpi->common.fc->coef_probs,
3474 subframe_stats->enc_starting_coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003475 for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) {
3476 get_coef_counts_diff(cpi, i, cpi->wholeframe_stats.coef_counts_buf[i],
3477 cpi->wholeframe_stats.eob_counts_buf[i]);
3478 }
3479 }
hui su0d103572017-03-01 17:58:01 -08003480#endif // CONFIG_SUBFRAME_PROB_UPDATE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003481
Jingning Han83630632016-12-16 11:27:25 -08003482 for (tx_size = 0; tx_size <= max_tx_size; ++tx_size) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003483 av1_coeff_stats frame_branch_ct[PLANE_TYPES];
3484 av1_coeff_probs_model frame_coef_probs[PLANE_TYPES];
Jingning Hanc7ea7612017-01-11 15:01:30 -08003485 if (cpi->td.counts->tx_size_totals[tx_size] <= 20 || CONFIG_RD_DEBUG ||
Yaowu Xuc27fc142016-08-22 16:08:15 -07003486 (tx_size >= TX_16X16 && cpi->sf.tx_size_search_method == USE_TX_8X8)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003487 aom_write_bit(w, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003488 } else {
hui su0d103572017-03-01 17:58:01 -08003489#if CONFIG_SUBFRAME_PROB_UPDATE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003490 if (cm->do_subframe_update &&
3491 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Urvang Joshi43e62812016-10-20 14:51:01 -07003492 unsigned int this_eob_counts_copy[PLANE_TYPES][REF_TYPES][COEF_BANDS]
3493 [COEFF_CONTEXTS];
Yaowu Xuf883b422016-08-30 14:01:10 -07003494 av1_coeff_count coef_counts_copy[PLANE_TYPES];
Urvang Joshi43e62812016-10-20 14:51:01 -07003495 av1_copy(this_eob_counts_copy, cpi->common.counts.eob_branch[tx_size]);
Yaowu Xuf883b422016-08-30 14:01:10 -07003496 av1_copy(coef_counts_copy, cpi->td.rd_counts.coef_counts[tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003497 build_tree_distribution(cpi, tx_size, frame_branch_ct,
3498 frame_coef_probs);
3499 for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003500 av1_copy(cpi->common.counts.eob_branch[tx_size],
3501 cpi->wholeframe_stats.eob_counts_buf[i][tx_size]);
3502 av1_copy(cpi->td.rd_counts.coef_counts[tx_size],
3503 cpi->wholeframe_stats.coef_counts_buf[i][tx_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003504 build_tree_distribution(cpi, tx_size, cpi->branch_ct_buf[i][tx_size],
3505 dummy_frame_coef_probs);
3506 }
Urvang Joshi43e62812016-10-20 14:51:01 -07003507 av1_copy(cpi->common.counts.eob_branch[tx_size], this_eob_counts_copy);
Yaowu Xuf883b422016-08-30 14:01:10 -07003508 av1_copy(cpi->td.rd_counts.coef_counts[tx_size], coef_counts_copy);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003509
3510 update_coef_probs_subframe(w, cpi, tx_size, cpi->branch_ct_buf,
3511 frame_coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003512 } else {
hui su0d103572017-03-01 17:58:01 -08003513#endif // CONFIG_SUBFRAME_PROB_UPDATE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003514 build_tree_distribution(cpi, tx_size, frame_branch_ct,
3515 frame_coef_probs);
3516 update_coef_probs_common(w, cpi, tx_size, frame_branch_ct,
3517 frame_coef_probs);
hui su0d103572017-03-01 17:58:01 -08003518#if CONFIG_SUBFRAME_PROB_UPDATE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003519 }
hui su0d103572017-03-01 17:58:01 -08003520#endif // CONFIG_SUBFRAME_PROB_UPDATE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003521 }
3522 }
3523
hui su0d103572017-03-01 17:58:01 -08003524#if CONFIG_SUBFRAME_PROB_UPDATE
Yaowu Xuf883b422016-08-30 14:01:10 -07003525 av1_copy(cm->starting_coef_probs, cm->fc->coef_probs);
3526 av1_copy(subframe_stats->coef_probs_buf[0], cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003527 if (cm->do_subframe_update &&
3528 cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) {
Urvang Joshi43e62812016-10-20 14:51:01 -07003529 unsigned int eob_counts_copy[TX_SIZES][PLANE_TYPES][REF_TYPES][COEF_BANDS]
3530 [COEFF_CONTEXTS];
Yaowu Xuf883b422016-08-30 14:01:10 -07003531 av1_copy(eob_counts_copy, cm->counts.eob_branch);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003532 for (i = 1; i <= cpi->common.coef_probs_update_idx; ++i) {
Yaowu Xu6b763c92017-01-23 12:13:37 -08003533 for (tx_size = 0; tx_size <= max_tx_size; ++tx_size)
Yaowu Xuf883b422016-08-30 14:01:10 -07003534 av1_full_to_model_counts(cm->counts.coef[tx_size],
3535 subframe_stats->coef_counts_buf[i][tx_size]);
3536 av1_copy(cm->counts.eob_branch, subframe_stats->eob_counts_buf[i]);
3537 av1_partial_adapt_probs(cm, 0, 0);
3538 av1_copy(subframe_stats->coef_probs_buf[i], cm->fc->coef_probs);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003539 }
Yaowu Xuf883b422016-08-30 14:01:10 -07003540 av1_copy(cm->fc->coef_probs, subframe_stats->coef_probs_buf[0]);
3541 av1_copy(cm->counts.eob_branch, eob_counts_copy);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003542 }
hui su0d103572017-03-01 17:58:01 -08003543#endif // CONFIG_SUBFRAME_PROB_UPDATE
Yaowu Xuc27fc142016-08-22 16:08:15 -07003544}
Angie Chiang800df032017-03-22 11:14:12 -07003545#endif // !(CONFIG_EC_ADAPT && CONFIG_NEW_TOKENSET)
Thomas Daviesfc1598a2017-01-13 17:07:25 +00003546#endif // !CONFIG_EC_ADAPT
Angie Chiang800df032017-03-22 11:14:12 -07003547#endif // !CONFIG_LV_MAP
Yaowu Xuc27fc142016-08-22 16:08:15 -07003548
3549#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003550static void encode_restoration_mode(AV1_COMMON *cm,
3551 struct aom_write_bit_buffer *wb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003552 int p;
3553 RestorationInfo *rsi = &cm->rst_info[0];
3554 switch (rsi->frame_restoration_type) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003555 case RESTORE_NONE:
3556 aom_wb_write_bit(wb, 0);
3557 aom_wb_write_bit(wb, 0);
3558 break;
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003559 case RESTORE_WIENER:
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003560 aom_wb_write_bit(wb, 1);
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003561 aom_wb_write_bit(wb, 0);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003562 break;
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -08003563 case RESTORE_SGRPROJ:
3564 aom_wb_write_bit(wb, 1);
3565 aom_wb_write_bit(wb, 1);
3566 break;
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003567 case RESTORE_SWITCHABLE:
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003568 aom_wb_write_bit(wb, 0);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003569 aom_wb_write_bit(wb, 1);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003570 break;
3571 default: assert(0);
3572 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003573 for (p = 1; p < MAX_MB_PLANE; ++p) {
3574 rsi = &cm->rst_info[p];
3575 switch (rsi->frame_restoration_type) {
3576 case RESTORE_NONE: aom_wb_write_bit(wb, 0); break;
3577 case RESTORE_WIENER: aom_wb_write_bit(wb, 1); break;
3578 default: assert(0);
3579 }
3580 }
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08003581 if (cm->rst_info[0].frame_restoration_type != RESTORE_NONE ||
3582 cm->rst_info[1].frame_restoration_type != RESTORE_NONE ||
3583 cm->rst_info[2].frame_restoration_type != RESTORE_NONE) {
3584 rsi = &cm->rst_info[0];
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08003585 aom_wb_write_bit(wb, rsi->restoration_tilesize != RESTORATION_TILESIZE_MAX);
3586 if (rsi->restoration_tilesize != RESTORATION_TILESIZE_MAX) {
3587 aom_wb_write_bit(
3588 wb, rsi->restoration_tilesize != (RESTORATION_TILESIZE_MAX >> 1));
3589 }
3590 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003591}
3592
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07003593static void write_wiener_filter(WienerInfo *wiener_info,
3594 WienerInfo *ref_wiener_info, aom_writer *wb) {
3595 aom_write_primitive_refsubexpfin(
3596 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
3597 WIENER_FILT_TAP0_SUBEXP_K,
3598 ref_wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV,
3599 wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV);
3600 aom_write_primitive_refsubexpfin(
3601 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
3602 WIENER_FILT_TAP1_SUBEXP_K,
3603 ref_wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV,
3604 wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV);
3605 aom_write_primitive_refsubexpfin(
3606 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
3607 WIENER_FILT_TAP2_SUBEXP_K,
3608 ref_wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV,
3609 wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV);
3610 aom_write_primitive_refsubexpfin(
3611 wb, WIENER_FILT_TAP0_MAXV - WIENER_FILT_TAP0_MINV + 1,
3612 WIENER_FILT_TAP0_SUBEXP_K,
3613 ref_wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV,
3614 wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV);
3615 aom_write_primitive_refsubexpfin(
3616 wb, WIENER_FILT_TAP1_MAXV - WIENER_FILT_TAP1_MINV + 1,
3617 WIENER_FILT_TAP1_SUBEXP_K,
3618 ref_wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV,
3619 wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV);
3620 aom_write_primitive_refsubexpfin(
3621 wb, WIENER_FILT_TAP2_MAXV - WIENER_FILT_TAP2_MINV + 1,
3622 WIENER_FILT_TAP2_SUBEXP_K,
3623 ref_wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV,
3624 wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV);
3625 memcpy(ref_wiener_info, wiener_info, sizeof(*wiener_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003626}
3627
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07003628static void write_sgrproj_filter(SgrprojInfo *sgrproj_info,
3629 SgrprojInfo *ref_sgrproj_info,
3630 aom_writer *wb) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003631 aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07003632 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX0 - SGRPROJ_PRJ_MIN0 + 1,
3633 SGRPROJ_PRJ_SUBEXP_K,
3634 ref_sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0,
3635 sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0);
3636 aom_write_primitive_refsubexpfin(wb, SGRPROJ_PRJ_MAX1 - SGRPROJ_PRJ_MIN1 + 1,
3637 SGRPROJ_PRJ_SUBEXP_K,
3638 ref_sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1,
3639 sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1);
3640 memcpy(ref_sgrproj_info, sgrproj_info, sizeof(*sgrproj_info));
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003641}
3642
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003643static void encode_restoration(AV1_COMMON *cm, aom_writer *wb) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003644 int i, p;
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08003645 const int ntiles = av1_get_rest_ntiles(cm->width, cm->height,
3646 cm->rst_info[0].restoration_tilesize,
3647 NULL, NULL, NULL, NULL);
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07003648 WienerInfo ref_wiener_info;
3649 SgrprojInfo ref_sgrproj_info;
3650 set_default_wiener(&ref_wiener_info);
3651 set_default_sgrproj(&ref_sgrproj_info);
Debargha Mukherjee1008c1e2017-03-06 19:18:43 -08003652 const int ntiles_uv = av1_get_rest_ntiles(
3653 ROUND_POWER_OF_TWO(cm->width, cm->subsampling_x),
3654 ROUND_POWER_OF_TWO(cm->height, cm->subsampling_y),
3655 cm->rst_info[1].restoration_tilesize, NULL, NULL, NULL, NULL);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003656 RestorationInfo *rsi = &cm->rst_info[0];
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07003657 if (rsi->frame_restoration_type != RESTORE_NONE) {
3658 if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003659 // RESTORE_SWITCHABLE
Debargha Mukherjeed7489142017-01-05 13:58:16 -08003660 for (i = 0; i < ntiles; ++i) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07003661 av1_write_token(
clang-formatbda8d612016-09-19 15:55:46 -07003662 wb, av1_switchable_restore_tree, cm->fc->switchable_restore_prob,
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07003663 &switchable_restore_encodings[rsi->restoration_type[i]]);
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08003664 if (rsi->restoration_type[i] == RESTORE_WIENER) {
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07003665 write_wiener_filter(&rsi->wiener_info[i], &ref_wiener_info, wb);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003666 } else if (rsi->restoration_type[i] == RESTORE_SGRPROJ) {
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07003667 write_sgrproj_filter(&rsi->sgrproj_info[i], &ref_sgrproj_info, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003668 }
3669 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07003670 } else if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjeed7489142017-01-05 13:58:16 -08003671 for (i = 0; i < ntiles; ++i) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003672 aom_write(wb, rsi->restoration_type[i] != RESTORE_NONE,
3673 RESTORE_NONE_WIENER_PROB);
3674 if (rsi->restoration_type[i] != RESTORE_NONE) {
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07003675 write_wiener_filter(&rsi->wiener_info[i], &ref_wiener_info, wb);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003676 }
3677 }
3678 } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) {
Debargha Mukherjeed7489142017-01-05 13:58:16 -08003679 for (i = 0; i < ntiles; ++i) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003680 aom_write(wb, rsi->restoration_type[i] != RESTORE_NONE,
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07003681 RESTORE_NONE_SGRPROJ_PROB);
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003682 if (rsi->restoration_type[i] != RESTORE_NONE) {
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07003683 write_sgrproj_filter(&rsi->sgrproj_info[i], &ref_sgrproj_info, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003684 }
3685 }
3686 }
3687 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003688 for (p = 1; p < MAX_MB_PLANE; ++p) {
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07003689 set_default_wiener(&ref_wiener_info);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003690 rsi = &cm->rst_info[p];
3691 if (rsi->frame_restoration_type == RESTORE_WIENER) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003692 for (i = 0; i < ntiles_uv; ++i) {
3693 if (ntiles_uv > 1)
3694 aom_write(wb, rsi->restoration_type[i] != RESTORE_NONE,
3695 RESTORE_NONE_WIENER_PROB);
3696 if (rsi->restoration_type[i] != RESTORE_NONE) {
Debargha Mukherjeecfc12f32017-04-18 07:03:32 -07003697 write_wiener_filter(&rsi->wiener_info[i], &ref_wiener_info, wb);
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08003698 }
3699 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08003700 } else if (rsi->frame_restoration_type != RESTORE_NONE) {
3701 assert(0);
3702 }
3703 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003704}
3705#endif // CONFIG_LOOP_RESTORATION
3706
Yaowu Xuf883b422016-08-30 14:01:10 -07003707static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003708 int i;
3709 struct loopfilter *lf = &cm->lf;
3710
3711 // Encode the loop filter level and type
Yaowu Xuf883b422016-08-30 14:01:10 -07003712 aom_wb_write_literal(wb, lf->filter_level, 6);
3713 aom_wb_write_literal(wb, lf->sharpness_level, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003714
3715 // Write out loop filter deltas applied at the MB level based on mode or
3716 // ref frame (if they are enabled).
Yaowu Xuf883b422016-08-30 14:01:10 -07003717 aom_wb_write_bit(wb, lf->mode_ref_delta_enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003718
3719 if (lf->mode_ref_delta_enabled) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003720 aom_wb_write_bit(wb, lf->mode_ref_delta_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003721 if (lf->mode_ref_delta_update) {
3722 for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) {
3723 const int delta = lf->ref_deltas[i];
3724 const int changed = delta != lf->last_ref_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07003725 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003726 if (changed) {
3727 lf->last_ref_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07003728 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003729 }
3730 }
3731
3732 for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
3733 const int delta = lf->mode_deltas[i];
3734 const int changed = delta != lf->last_mode_deltas[i];
Yaowu Xuf883b422016-08-30 14:01:10 -07003735 aom_wb_write_bit(wb, changed);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003736 if (changed) {
3737 lf->last_mode_deltas[i] = delta;
Yaowu Xuf883b422016-08-30 14:01:10 -07003738 aom_wb_write_inv_signed_literal(wb, delta, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003739 }
3740 }
3741 }
3742 }
3743}
3744
Jean-Marc Valin01435132017-02-18 14:12:53 -05003745#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01003746static void encode_cdef(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) {
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04003747 int i;
Steinar Midtskogen0c966a52017-04-18 14:38:13 +02003748 aom_wb_write_literal(wb, cm->cdef_dering_damping - 5, 1);
3749 aom_wb_write_literal(wb, cm->cdef_clpf_damping - 3, 2);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04003750 aom_wb_write_literal(wb, cm->cdef_bits, 2);
3751 for (i = 0; i < cm->nb_cdef_strengths; i++) {
3752 aom_wb_write_literal(wb, cm->cdef_strengths[i], CDEF_STRENGTH_BITS);
Jean-Marc Valine9f77422017-03-22 17:09:51 -04003753 aom_wb_write_literal(wb, cm->cdef_uv_strengths[i], CDEF_STRENGTH_BITS);
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -04003754 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07003755}
3756#endif
3757
Yaowu Xuf883b422016-08-30 14:01:10 -07003758static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003759 if (delta_q != 0) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003760 aom_wb_write_bit(wb, 1);
3761 aom_wb_write_inv_signed_literal(wb, delta_q, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003762 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003763 aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003764 }
3765}
3766
Yaowu Xuf883b422016-08-30 14:01:10 -07003767static void encode_quantization(const AV1_COMMON *const cm,
3768 struct aom_write_bit_buffer *wb) {
3769 aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003770 write_delta_q(wb, cm->y_dc_delta_q);
3771 write_delta_q(wb, cm->uv_dc_delta_q);
3772 write_delta_q(wb, cm->uv_ac_delta_q);
3773#if CONFIG_AOM_QM
Yaowu Xuf883b422016-08-30 14:01:10 -07003774 aom_wb_write_bit(wb, cm->using_qmatrix);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003775 if (cm->using_qmatrix) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003776 aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS);
3777 aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003778 }
3779#endif
3780}
3781
Yaowu Xuf883b422016-08-30 14:01:10 -07003782static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd,
3783 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003784 int i, j;
3785 const struct segmentation *seg = &cm->seg;
3786
Yaowu Xuf883b422016-08-30 14:01:10 -07003787 aom_wb_write_bit(wb, seg->enabled);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003788 if (!seg->enabled) return;
3789
3790 // Segmentation map
3791 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003792 aom_wb_write_bit(wb, seg->update_map);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003793 } else {
3794 assert(seg->update_map == 1);
3795 }
3796 if (seg->update_map) {
3797 // Select the coding strategy (temporal or spatial)
Yaowu Xuf883b422016-08-30 14:01:10 -07003798 av1_choose_segmap_coding_method(cm, xd);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003799
3800 // Write out the chosen coding method.
3801 if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003802 aom_wb_write_bit(wb, seg->temporal_update);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003803 } else {
3804 assert(seg->temporal_update == 0);
3805 }
3806 }
3807
3808 // Segmentation data
Yaowu Xuf883b422016-08-30 14:01:10 -07003809 aom_wb_write_bit(wb, seg->update_data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003810 if (seg->update_data) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003811 aom_wb_write_bit(wb, seg->abs_delta);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003812
3813 for (i = 0; i < MAX_SEGMENTS; i++) {
3814 for (j = 0; j < SEG_LVL_MAX; j++) {
3815 const int active = segfeature_active(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07003816 aom_wb_write_bit(wb, active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003817 if (active) {
3818 const int data = get_segdata(seg, i, j);
Yaowu Xuf883b422016-08-30 14:01:10 -07003819 const int data_max = av1_seg_feature_data_max(j);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003820
Yaowu Xuf883b422016-08-30 14:01:10 -07003821 if (av1_is_segfeature_signed(j)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003822 encode_unsigned_max(wb, abs(data), data_max);
Yaowu Xuf883b422016-08-30 14:01:10 -07003823 aom_wb_write_bit(wb, data < 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003824 } else {
3825 encode_unsigned_max(wb, data, data_max);
3826 }
3827 }
3828 }
3829 }
3830 }
3831}
3832
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04003833#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -07003834static void update_seg_probs(AV1_COMP *cpi, aom_writer *w) {
3835 AV1_COMMON *cm = &cpi->common;
Thomas Davies80188d12016-10-26 16:08:35 -07003836#if CONFIG_TILE_GROUPS
3837 const int probwt = cm->num_tg;
3838#else
3839 const int probwt = 1;
3840#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003841
3842 if (!cm->seg.enabled || !cm->seg.update_map) return;
3843
3844 if (cm->seg.temporal_update) {
3845 int i;
3846
3847 for (i = 0; i < PREDICTION_PROBS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07003848 av1_cond_prob_diff_update(w, &cm->fc->seg.pred_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -07003849 cm->counts.seg.pred[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003850
Yaowu Xuf883b422016-08-30 14:01:10 -07003851 prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs,
Thomas Davies80188d12016-10-26 16:08:35 -07003852 cm->counts.seg.tree_mispred, MAX_SEGMENTS, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003853 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07003854 prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs,
Thomas Davies80188d12016-10-26 16:08:35 -07003855 cm->counts.seg.tree_total, MAX_SEGMENTS, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003856 }
3857}
Thomas Davies6519beb2016-10-19 14:46:07 +01003858#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003859
Yue Cheneeacc4c2017-01-17 17:29:17 -08003860static void write_tx_mode(AV1_COMMON *cm, MACROBLOCKD *xd, TX_MODE *mode,
3861 struct aom_write_bit_buffer *wb) {
3862 int i, all_lossless = 1;
3863
3864 if (cm->seg.enabled) {
3865 for (i = 0; i < MAX_SEGMENTS; ++i) {
3866 if (!xd->lossless[i]) {
3867 all_lossless = 0;
3868 break;
3869 }
3870 }
3871 } else {
3872 all_lossless = xd->lossless[0];
3873 }
3874 if (all_lossless) {
3875 *mode = ONLY_4X4;
3876 return;
3877 }
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08003878#if CONFIG_TX64X64
Yue Cheneeacc4c2017-01-17 17:29:17 -08003879 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
3880 if (*mode != TX_MODE_SELECT) {
3881 aom_wb_write_literal(wb, AOMMIN(*mode, ALLOW_32X32), 2);
3882 if (*mode >= ALLOW_32X32) aom_wb_write_bit(wb, *mode == ALLOW_64X64);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08003883 }
3884#else
Yue Cheneeacc4c2017-01-17 17:29:17 -08003885 aom_wb_write_bit(wb, *mode == TX_MODE_SELECT);
3886 if (*mode != TX_MODE_SELECT) aom_wb_write_literal(wb, *mode, 2);
Debargha Mukherjee18d38f62016-11-17 20:30:16 -08003887#endif // CONFIG_TX64X64
Yaowu Xuc27fc142016-08-22 16:08:15 -07003888}
3889
Nathan E. Eggeb353a8e2017-02-17 10:27:37 -05003890#if !CONFIG_EC_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -07003891static void update_txfm_probs(AV1_COMMON *cm, aom_writer *w,
Yaowu Xuc27fc142016-08-22 16:08:15 -07003892 FRAME_COUNTS *counts) {
Thomas Davies80188d12016-10-26 16:08:35 -07003893#if CONFIG_TILE_GROUPS
3894 const int probwt = cm->num_tg;
3895#else
3896 const int probwt = 1;
3897#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003898 if (cm->tx_mode == TX_MODE_SELECT) {
3899 int i, j;
Jingning Hanaae72a62016-10-25 15:35:29 -07003900 for (i = 0; i < MAX_TX_DEPTH; ++i)
Yaowu Xuc27fc142016-08-22 16:08:15 -07003901 for (j = 0; j < TX_SIZE_CONTEXTS; ++j)
Yaowu Xuf883b422016-08-30 14:01:10 -07003902 prob_diff_update(av1_tx_size_tree[i], cm->fc->tx_size_probs[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07003903 counts->tx_size[i][j], i + 2, probwt, w);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003904 }
3905}
Nathan E. Eggeb353a8e2017-02-17 10:27:37 -05003906#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07003907
Angie Chiang5678ad92016-11-21 09:38:40 -08003908static void write_frame_interp_filter(InterpFilter filter,
3909 struct aom_write_bit_buffer *wb) {
Yaowu Xuf883b422016-08-30 14:01:10 -07003910 aom_wb_write_bit(wb, filter == SWITCHABLE);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003911 if (filter != SWITCHABLE)
Angie Chiang6305abe2016-10-24 12:24:44 -07003912 aom_wb_write_literal(wb, filter, LOG_SWITCHABLE_FILTERS);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003913}
3914
Yaowu Xuf883b422016-08-30 14:01:10 -07003915static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003916 if (cm->interp_filter == SWITCHABLE) {
3917 // Check to see if only one of the filters is actually used
3918 int count[SWITCHABLE_FILTERS];
3919 int i, j, c = 0;
3920 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
3921 count[i] = 0;
3922 for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
3923 count[i] += counts->switchable_interp[j][i];
3924 c += (count[i] > 0);
3925 }
3926 if (c == 1) {
3927 // Only one filter is used. So set the filter at frame level
3928 for (i = 0; i < SWITCHABLE_FILTERS; ++i) {
3929 if (count[i]) {
Sarah Parkera036d862017-04-11 17:53:37 -07003930#if CONFIG_MOTION_VAR && (CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION)
3931#if CONFIG_WARPED_MOTION
Sarah Parker4c10a3c2017-04-10 19:37:59 -07003932 if (i == EIGHTTAP_REGULAR || WARP_WM_NEIGHBORS_WITH_OBMC)
Sarah Parkera036d862017-04-11 17:53:37 -07003933#else
Sarah Parker4c10a3c2017-04-10 19:37:59 -07003934 if (i == EIGHTTAP_REGULAR || WARP_GM_NEIGHBORS_WITH_OBMC)
Sarah Parkera036d862017-04-11 17:53:37 -07003935#endif // CONFIG_WARPED_MOTION
3936#endif // CONFIG_MOTION_VAR && (CONFIG_WARPED_MOTION || CONFIG_GLOBAL_MOTION)
Debargha Mukherjee604d8462017-04-06 15:27:00 -07003937 cm->interp_filter = i;
Yaowu Xuc27fc142016-08-22 16:08:15 -07003938 break;
3939 }
3940 }
3941 }
3942 }
3943}
3944
Yaowu Xuf883b422016-08-30 14:01:10 -07003945static void write_tile_info(const AV1_COMMON *const cm,
3946 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07003947#if CONFIG_EXT_TILE
3948 const int tile_width =
3949 ALIGN_POWER_OF_TWO(cm->tile_width, cm->mib_size_log2) >>
3950 cm->mib_size_log2;
3951 const int tile_height =
3952 ALIGN_POWER_OF_TWO(cm->tile_height, cm->mib_size_log2) >>
3953 cm->mib_size_log2;
3954
3955 assert(tile_width > 0);
3956 assert(tile_height > 0);
3957
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08003958 aom_wb_write_literal(wb, cm->tile_encoding_mode, 1);
3959
Yaowu Xuc27fc142016-08-22 16:08:15 -07003960// Write the tile sizes
3961#if CONFIG_EXT_PARTITION
3962 if (cm->sb_size == BLOCK_128X128) {
3963 assert(tile_width <= 32);
3964 assert(tile_height <= 32);
Yaowu Xuf883b422016-08-30 14:01:10 -07003965 aom_wb_write_literal(wb, tile_width - 1, 5);
3966 aom_wb_write_literal(wb, tile_height - 1, 5);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003967 } else
3968#endif // CONFIG_EXT_PARTITION
3969 {
3970 assert(tile_width <= 64);
3971 assert(tile_height <= 64);
Yaowu Xuf883b422016-08-30 14:01:10 -07003972 aom_wb_write_literal(wb, tile_width - 1, 6);
3973 aom_wb_write_literal(wb, tile_height - 1, 6);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003974 }
3975#else
3976 int min_log2_tile_cols, max_log2_tile_cols, ones;
Yaowu Xuf883b422016-08-30 14:01:10 -07003977 av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003978
3979 // columns
3980 ones = cm->log2_tile_cols - min_log2_tile_cols;
Yaowu Xuf883b422016-08-30 14:01:10 -07003981 while (ones--) aom_wb_write_bit(wb, 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003982
Yaowu Xuf883b422016-08-30 14:01:10 -07003983 if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003984
3985 // rows
Yaowu Xuf883b422016-08-30 14:01:10 -07003986 aom_wb_write_bit(wb, cm->log2_tile_rows != 0);
3987 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07003988#endif // CONFIG_EXT_TILE
Ryan Lei7386eda2016-12-08 21:08:31 -08003989
Fangwen Fu7b9f2b32017-01-17 14:01:52 -08003990#if CONFIG_DEPENDENT_HORZTILES
3991 if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->dependent_horz_tiles);
3992#endif
3993
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003994#if CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -08003995 aom_wb_write_bit(wb, cm->loop_filter_across_tiles_enabled);
Ryan Lei9b02b0e2017-01-30 15:52:20 -08003996#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Yaowu Xuc27fc142016-08-22 16:08:15 -07003997}
3998
Yaowu Xuf883b422016-08-30 14:01:10 -07003999static int get_refresh_mask(AV1_COMP *cpi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004000 int refresh_mask = 0;
4001
4002#if CONFIG_EXT_REFS
4003 // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be
4004 // notified to get LAST3_FRAME refreshed and then the virtual indexes for all
4005 // the 3 LAST reference frames will be updated accordingly, i.e.:
4006 // (1) The original virtual index for LAST3_FRAME will become the new virtual
4007 // index for LAST_FRAME; and
4008 // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be
4009 // shifted and become the new virtual indexes for LAST2_FRAME and
4010 // LAST3_FRAME.
4011 refresh_mask |=
4012 (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]);
4013 if (cpi->rc.is_bwd_ref_frame && cpi->num_extra_arfs) {
4014 // We have swapped the virtual indices
4015 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->arf_map[0]);
4016 } else {
4017 refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx);
4018 }
4019#else
4020 refresh_mask |= (cpi->refresh_last_frame << cpi->lst_fb_idx);
4021#endif // CONFIG_EXT_REFS
4022
Yaowu Xuf883b422016-08-30 14:01:10 -07004023 if (av1_preserve_existing_gf(cpi)) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004024 // We have decided to preserve the previously existing golden frame as our
4025 // new ARF frame. However, in the short term we leave it in the GF slot and,
4026 // if we're updating the GF with the current decoded frame, we save it
4027 // instead to the ARF slot.
Yaowu Xuf883b422016-08-30 14:01:10 -07004028 // Later, in the function av1_encoder.c:av1_update_reference_frames() we
Yaowu Xuc27fc142016-08-22 16:08:15 -07004029 // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it
4030 // there so that it can be done outside of the recode loop.
4031 // Note: This is highly specific to the use of ARF as a forward reference,
4032 // and this needs to be generalized as other uses are implemented
4033 // (like RTC/temporal scalability).
4034 return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx);
4035 } else {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004036#if CONFIG_EXT_REFS
4037 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
Zoe Liu6eeea0e2017-04-19 08:49:49 -07004038 int arf_idx = cpi->arf_map[gf_group->arf_update_idx[gf_group->index]];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004039#else
Zoe Liu6eeea0e2017-04-19 08:49:49 -07004040 int arf_idx = cpi->alt_fb_idx;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004041 if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) {
4042 const GF_GROUP *const gf_group = &cpi->twopass.gf_group;
4043 arf_idx = gf_group->arf_update_idx[gf_group->index];
4044 }
4045#endif // CONFIG_EXT_REFS
4046 return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) |
4047 (cpi->refresh_alt_ref_frame << arf_idx);
4048 }
4049}
4050
4051#if CONFIG_EXT_TILE
4052static INLINE int find_identical_tile(
4053 const int tile_row, const int tile_col,
4054 TileBufferEnc (*const tile_buffers)[1024]) {
4055 const MV32 candidate_offset[1] = { { 1, 0 } };
4056 const uint8_t *const cur_tile_data =
4057 tile_buffers[tile_row][tile_col].data + 4;
Jingning Han99ffce62017-04-25 15:48:41 -07004058 const size_t cur_tile_size = tile_buffers[tile_row][tile_col].size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004059
4060 int i;
4061
4062 if (tile_row == 0) return 0;
4063
4064 // (TODO: yunqingwang) For now, only above tile is checked and used.
4065 // More candidates such as left tile can be added later.
4066 for (i = 0; i < 1; i++) {
4067 int row_offset = candidate_offset[0].row;
4068 int col_offset = candidate_offset[0].col;
4069 int row = tile_row - row_offset;
4070 int col = tile_col - col_offset;
4071 uint8_t tile_hdr;
4072 const uint8_t *tile_data;
4073 TileBufferEnc *candidate;
4074
4075 if (row < 0 || col < 0) continue;
4076
4077 tile_hdr = *(tile_buffers[row][col].data);
4078
4079 // Read out tcm bit
4080 if ((tile_hdr >> 7) == 1) {
4081 // The candidate is a copy tile itself
4082 row_offset += tile_hdr & 0x7f;
4083 row = tile_row - row_offset;
4084 }
4085
4086 candidate = &tile_buffers[row][col];
4087
4088 if (row_offset >= 128 || candidate->size != cur_tile_size) continue;
4089
4090 tile_data = candidate->data + 4;
4091
4092 if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue;
4093
4094 // Identical tile found
4095 assert(row_offset > 0);
4096 return row_offset;
4097 }
4098
4099 // No identical tile found
4100 return 0;
4101}
4102#endif // CONFIG_EXT_TILE
4103
Thomas Davies80188d12016-10-26 16:08:35 -07004104#if CONFIG_TILE_GROUPS
4105static uint32_t write_tiles(AV1_COMP *const cpi,
4106 struct aom_write_bit_buffer *wb,
4107 unsigned int *max_tile_size,
4108 unsigned int *max_tile_col_size) {
4109#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004110static uint32_t write_tiles(AV1_COMP *const cpi, uint8_t *const dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004111 unsigned int *max_tile_size,
4112 unsigned int *max_tile_col_size) {
Thomas Davies80188d12016-10-26 16:08:35 -07004113#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07004114 const AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004115#if CONFIG_ANS
Alex Converse2a1b3af2016-10-26 13:11:26 -07004116 struct BufAnsCoder *buf_ans = &cpi->buf_ans;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004117#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004118 aom_writer mode_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004119#endif // CONFIG_ANS
4120 int tile_row, tile_col;
4121 TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok;
clang-format67948d32016-09-07 22:40:40 -07004122 TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers;
James Zern71a37de2017-04-20 16:03:13 -07004123 uint32_t total_size = 0;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004124 const int tile_cols = cm->tile_cols;
4125 const int tile_rows = cm->tile_rows;
Thomas Daviesaf6df172016-11-09 14:04:18 +00004126 unsigned int tile_size = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07004127#if CONFIG_TILE_GROUPS
4128 const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols;
4129 const int have_tiles = n_log2_tiles > 0;
James Zern71a37de2017-04-20 16:03:13 -07004130 uint32_t comp_hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07004131 // Fixed size tile groups for the moment
4132 const int num_tg_hdrs = cm->num_tg;
4133 const int tg_size = (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs;
4134 int tile_count = 0;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00004135 int tg_count = 1;
4136 int tile_size_bytes = 4;
4137 int tile_col_size_bytes;
James Zern71a37de2017-04-20 16:03:13 -07004138 uint32_t uncompressed_hdr_size = 0;
Thomas Davies80188d12016-10-26 16:08:35 -07004139 uint8_t *dst = NULL;
4140 struct aom_write_bit_buffer comp_hdr_len_wb;
4141 struct aom_write_bit_buffer tg_params_wb;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00004142 struct aom_write_bit_buffer tile_size_bytes_wb;
James Zern71a37de2017-04-20 16:03:13 -07004143 uint32_t saved_offset;
Thomas Daviesaf6df172016-11-09 14:04:18 +00004144 int mtu_size = cpi->oxcf.mtu;
4145 int curr_tg_data_size = 0;
4146 int hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07004147#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004148#if CONFIG_EXT_TILE
4149 const int have_tiles = tile_cols * tile_rows > 1;
4150#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004151
4152 *max_tile_size = 0;
4153 *max_tile_col_size = 0;
4154
4155// All tile size fields are output on 4 bytes. A call to remux_tiles will
4156// later compact the data if smaller headers are adequate.
4157
4158#if CONFIG_EXT_TILE
4159 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
4160 TileInfo tile_info;
4161 const int is_last_col = (tile_col == tile_cols - 1);
Jingning Han99ffce62017-04-25 15:48:41 -07004162 const uint32_t col_offset = total_size;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004163
Yaowu Xuf883b422016-08-30 14:01:10 -07004164 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004165
4166 // The last column does not have a column header
4167 if (!is_last_col) total_size += 4;
4168
4169 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
4170 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
Yaowu Xuc27fc142016-08-22 16:08:15 -07004171 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
4172 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
4173 const int data_offset = have_tiles ? 4 : 0;
Thomas Daviesb2f32d82017-04-05 14:46:20 +01004174#if CONFIG_EC_ADAPT
4175 const int tile_idx = tile_row * tile_cols + tile_col;
4176 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
4177#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07004178 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004179
4180 buf->data = dst + total_size;
4181
4182 // Is CONFIG_EXT_TILE = 1, every tile in the row has a header,
4183 // even for the last one, unless no tiling is used at all.
4184 total_size += data_offset;
Thomas Daviesb2f32d82017-04-05 14:46:20 +01004185#if CONFIG_EC_ADAPT
4186 // Initialise tile context from the frame context
4187 this_tile->tctx = *cm->fc;
4188 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
4189#endif
Yushin Cho749c0572017-04-07 10:36:47 -07004190#if CONFIG_PVQ
4191 cpi->td.mb.pvq_q = &this_tile->pvq_q;
4192 cpi->td.mb.daala_enc.state.adapt = &this_tile->tctx.pvq_context;
4193#endif // CONFIG_PVQ
Yaowu Xuc27fc142016-08-22 16:08:15 -07004194#if !CONFIG_ANS
Yaowu Xuf883b422016-08-30 14:01:10 -07004195 aom_start_encode(&mode_bc, buf->data + data_offset);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004196 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
4197 assert(tok == tok_end);
Yaowu Xuf883b422016-08-30 14:01:10 -07004198 aom_stop_encode(&mode_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004199 tile_size = mode_bc.pos;
4200#else
Alex Converse2a1b3af2016-10-26 13:11:26 -07004201 buf_ans_write_init(buf_ans, buf->data + data_offset);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004202 write_modes(cpi, &tile_info, buf_ans, &tok, tok_end);
4203 assert(tok == tok_end);
Alex Converse1ecdf2b2016-11-30 15:51:12 -08004204 aom_buf_ans_flush(buf_ans);
Alex Converse2a1b3af2016-10-26 13:11:26 -07004205 tile_size = buf_ans_write_end(buf_ans);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004206#endif // !CONFIG_ANS
Yushin Cho749c0572017-04-07 10:36:47 -07004207#if CONFIG_PVQ
4208 cpi->td.mb.pvq_q = NULL;
4209#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004210 buf->size = tile_size;
4211
4212 // Record the maximum tile size we see, so we can compact headers later.
Yaowu Xuf883b422016-08-30 14:01:10 -07004213 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004214
4215 if (have_tiles) {
4216 // tile header: size of this tile, or copy offset
4217 uint32_t tile_header = tile_size;
4218
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08004219 // If the tile_encoding_mode is 1 (i.e. TILE_VR), check if this tile is
4220 // a copy tile.
Yaowu Xuc27fc142016-08-22 16:08:15 -07004221 // Very low chances to have copy tiles on the key frames, so don't
4222 // search on key frames to reduce unnecessary search.
Yunqing Wangd8cd55f2017-02-27 12:16:00 -08004223 if (cm->frame_type != KEY_FRAME && cm->tile_encoding_mode) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004224 const int idendical_tile_offset =
4225 find_identical_tile(tile_row, tile_col, tile_buffers);
4226
4227 if (idendical_tile_offset > 0) {
4228 tile_size = 0;
4229 tile_header = idendical_tile_offset | 0x80;
4230 tile_header <<= 24;
4231 }
4232 }
4233
4234 mem_put_le32(buf->data, tile_header);
4235 }
4236
4237 total_size += tile_size;
4238 }
4239
4240 if (!is_last_col) {
Jingning Han99ffce62017-04-25 15:48:41 -07004241 uint32_t col_size = total_size - col_offset - 4;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004242 mem_put_le32(dst + col_offset, col_size);
4243
4244 // If it is not final packing, record the maximum tile column size we see,
4245 // otherwise, check if the tile size is out of the range.
Yaowu Xuf883b422016-08-30 14:01:10 -07004246 *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004247 }
4248 }
4249#else
Thomas Davies80188d12016-10-26 16:08:35 -07004250#if CONFIG_TILE_GROUPS
4251 write_uncompressed_header(cpi, wb);
4252
Jingning Hand3f441c2017-03-06 09:12:54 -08004253#if CONFIG_EXT_REFS
4254 if (cm->show_existing_frame) {
4255 total_size = aom_wb_bytes_written(wb);
4256 return (uint32_t)total_size;
4257 }
4258#endif // CONFIG_EXT_REFS
4259
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00004260 // Write the tile length code
4261 tile_size_bytes_wb = *wb;
Thomas Davies4974e522016-11-07 17:44:05 +00004262 aom_wb_write_literal(wb, 3, 2);
Thomas Davies80188d12016-10-26 16:08:35 -07004263
4264 /* Write a placeholder for the number of tiles in each tile group */
4265 tg_params_wb = *wb;
4266 saved_offset = wb->bit_offset;
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00004267 if (have_tiles) {
4268 aom_wb_overwrite_literal(wb, 3, n_log2_tiles);
4269 aom_wb_overwrite_literal(wb, (1 << n_log2_tiles) - 1, n_log2_tiles);
4270 }
Thomas Davies80188d12016-10-26 16:08:35 -07004271
4272 /* Write a placeholder for the compressed header length */
4273 comp_hdr_len_wb = *wb;
4274 aom_wb_write_literal(wb, 0, 16);
4275
4276 uncompressed_hdr_size = aom_wb_bytes_written(wb);
4277 dst = wb->bit_buffer;
4278 comp_hdr_size = write_compressed_header(cpi, dst + uncompressed_hdr_size);
Thomas Daviesfaa7fcf2016-11-14 11:59:43 +00004279 aom_wb_overwrite_literal(&comp_hdr_len_wb, (int)(comp_hdr_size), 16);
Thomas Daviesaf6df172016-11-09 14:04:18 +00004280 hdr_size = uncompressed_hdr_size + comp_hdr_size;
4281 total_size += hdr_size;
Thomas Davies80188d12016-10-26 16:08:35 -07004282#endif
4283
Yaowu Xuc27fc142016-08-22 16:08:15 -07004284 for (tile_row = 0; tile_row < tile_rows; tile_row++) {
4285 TileInfo tile_info;
4286 const int is_last_row = (tile_row == tile_rows - 1);
Yaowu Xuf883b422016-08-30 14:01:10 -07004287 av1_tile_set_row(&tile_info, cm, tile_row);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004288
4289 for (tile_col = 0; tile_col < tile_cols; tile_col++) {
Yushin Cho77bba8d2016-11-04 16:36:56 -07004290 const int tile_idx = tile_row * tile_cols + tile_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004291 TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col];
Thomas Daviesf77d4ad2017-01-10 18:55:42 +00004292#if CONFIG_PVQ || CONFIG_EC_ADAPT
Yushin Cho77bba8d2016-11-04 16:36:56 -07004293 TileDataEnc *this_tile = &cpi->tile_data[tile_idx];
4294#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004295 const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col];
4296 const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col];
Yaowu Xu8acaa552016-11-21 09:50:22 -08004297 const int is_last_col = (tile_col == tile_cols - 1);
Thomas Davies8fe64a32016-10-04 13:19:31 +01004298 const int is_last_tile = is_last_col && is_last_row;
Thomas Daviesa0de6d52017-01-20 14:45:25 +00004299#if !CONFIG_TILE_GROUPS
Yushin Cho77bba8d2016-11-04 16:36:56 -07004300 (void)tile_idx;
Thomas Davies8fe64a32016-10-04 13:19:31 +01004301#else
Thomas Daviesaf6df172016-11-09 14:04:18 +00004302
4303 if ((!mtu_size && tile_count > tg_size) ||
4304 (mtu_size && tile_count && curr_tg_data_size >= mtu_size)) {
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00004305 // New tile group
4306 tg_count++;
Thomas Daviesaf6df172016-11-09 14:04:18 +00004307 // We've exceeded the packet size
4308 if (tile_count > 1) {
4309 /* The last tile exceeded the packet size. The tile group size
4310 should therefore be tile_count-1.
4311 Move the last tile and insert headers before it
4312 */
James Zern71a37de2017-04-20 16:03:13 -07004313 uint32_t old_total_size = total_size - tile_size - 4;
Thomas Daviesaf6df172016-11-09 14:04:18 +00004314 memmove(dst + old_total_size + hdr_size, dst + old_total_size,
4315 (tile_size + 4) * sizeof(uint8_t));
4316 // Copy uncompressed header
4317 memmove(dst + old_total_size, dst,
4318 uncompressed_hdr_size * sizeof(uint8_t));
4319 // Write the number of tiles in the group into the last uncompressed
4320 // header before the one we've just inserted
Thomas Daviesfaa7fcf2016-11-14 11:59:43 +00004321 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
4322 n_log2_tiles);
4323 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 2, n_log2_tiles);
Thomas Daviesaf6df172016-11-09 14:04:18 +00004324 // Update the pointer to the last TG params
4325 tg_params_wb.bit_offset = saved_offset + 8 * old_total_size;
4326 // Copy compressed header
4327 memmove(dst + old_total_size + uncompressed_hdr_size,
4328 dst + uncompressed_hdr_size, comp_hdr_size * sizeof(uint8_t));
4329 total_size += hdr_size;
4330 tile_count = 1;
4331 curr_tg_data_size = hdr_size + tile_size + 4;
4332
4333 } else {
4334 // We exceeded the packet size in just one tile
4335 // Copy uncompressed header
4336 memmove(dst + total_size, dst,
4337 uncompressed_hdr_size * sizeof(uint8_t));
4338 // Write the number of tiles in the group into the last uncompressed
4339 // header
Thomas Daviesfaa7fcf2016-11-14 11:59:43 +00004340 aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count,
4341 n_log2_tiles);
4342 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1, n_log2_tiles);
Thomas Daviesaf6df172016-11-09 14:04:18 +00004343 tg_params_wb.bit_offset = saved_offset + 8 * total_size;
4344 // Copy compressed header
4345 memmove(dst + total_size + uncompressed_hdr_size,
4346 dst + uncompressed_hdr_size, comp_hdr_size * sizeof(uint8_t));
4347 total_size += hdr_size;
4348 tile_count = 0;
4349 curr_tg_data_size = hdr_size;
4350 }
Thomas Davies80188d12016-10-26 16:08:35 -07004351 }
4352 tile_count++;
4353#endif
Yaowu Xuf883b422016-08-30 14:01:10 -07004354 av1_tile_set_col(&tile_info, cm, tile_col);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004355
Fangwen Fu73126c02017-02-08 22:37:47 -08004356#if CONFIG_DEPENDENT_HORZTILES && CONFIG_TILE_GROUPS
4357 av1_tile_set_tg_boundary(&tile_info, cm, tile_row, tile_col);
4358#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004359 buf->data = dst + total_size;
4360
4361 // The last tile does not have a header.
4362 if (!is_last_tile) total_size += 4;
4363
Thomas Davies493623e2017-03-31 16:12:25 +01004364#if CONFIG_EC_ADAPT
4365 // Initialise tile context from the frame context
4366 this_tile->tctx = *cm->fc;
4367 cpi->td.mb.e_mbd.tile_ctx = &this_tile->tctx;
4368#endif
Yushin Choc49ef3a2017-03-13 17:27:25 -07004369#if CONFIG_PVQ
4370 cpi->td.mb.pvq_q = &this_tile->pvq_q;
4371 cpi->td.mb.daala_enc.state.adapt = &this_tile->tctx.pvq_context;
4372#endif // CONFIG_PVQ
Thomas Davies80188d12016-10-26 16:08:35 -07004373#if CONFIG_ANS
Alex Converse2a1b3af2016-10-26 13:11:26 -07004374 buf_ans_write_init(buf_ans, dst + total_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004375 write_modes(cpi, &tile_info, buf_ans, &tok, tok_end);
4376 assert(tok == tok_end);
Alex Converse1ecdf2b2016-11-30 15:51:12 -08004377 aom_buf_ans_flush(buf_ans);
Alex Converse2a1b3af2016-10-26 13:11:26 -07004378 tile_size = buf_ans_write_end(buf_ans);
Thomas Davies80188d12016-10-26 16:08:35 -07004379#else
4380 aom_start_encode(&mode_bc, dst + total_size);
4381 write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end);
Jingning Han223b90e2017-04-04 09:48:37 -07004382#if !CONFIG_LV_MAP
Thomas Davies80188d12016-10-26 16:08:35 -07004383 assert(tok == tok_end);
Jingning Han223b90e2017-04-04 09:48:37 -07004384#endif // !CONFIG_LV_MAP
Thomas Davies80188d12016-10-26 16:08:35 -07004385 aom_stop_encode(&mode_bc);
4386 tile_size = mode_bc.pos;
Alex Converse2a1b3af2016-10-26 13:11:26 -07004387#endif // CONFIG_ANS
Yushin Cho77bba8d2016-11-04 16:36:56 -07004388#if CONFIG_PVQ
4389 cpi->td.mb.pvq_q = NULL;
Alex Converse2a1b3af2016-10-26 13:11:26 -07004390#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004391
4392 assert(tile_size > 0);
4393
Thomas Daviesaf6df172016-11-09 14:04:18 +00004394#if CONFIG_TILE_GROUPS
4395 curr_tg_data_size += tile_size + 4;
4396#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004397 buf->size = tile_size;
4398
4399 if (!is_last_tile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004400 *max_tile_size = AOMMAX(*max_tile_size, tile_size);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004401 // size of this tile
4402 mem_put_le32(buf->data, tile_size);
4403 }
4404
4405 total_size += tile_size;
4406 }
4407 }
Thomas Davies80188d12016-10-26 16:08:35 -07004408#if CONFIG_TILE_GROUPS
4409 // Write the final tile group size
4410 if (n_log2_tiles) {
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00004411 aom_wb_overwrite_literal(&tg_params_wb, (1 << n_log2_tiles) - tile_count,
4412 n_log2_tiles);
4413 aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1, n_log2_tiles);
Thomas Davies80188d12016-10-26 16:08:35 -07004414 }
Thomas Daviesdbfc4f92017-01-18 16:46:09 +00004415 // Remux if possible. TODO (Thomas Davies): do this for more than one tile
4416 // group
4417 if (have_tiles && tg_count == 1) {
4418 int data_size = total_size - (uncompressed_hdr_size + comp_hdr_size);
4419 data_size = remux_tiles(cm, dst + uncompressed_hdr_size + comp_hdr_size,
4420 data_size, *max_tile_size, *max_tile_col_size,
4421 &tile_size_bytes, &tile_col_size_bytes);
4422 total_size = data_size + uncompressed_hdr_size + comp_hdr_size;
4423 aom_wb_overwrite_literal(&tile_size_bytes_wb, tile_size_bytes - 1, 2);
4424 }
4425
Thomas Davies80188d12016-10-26 16:08:35 -07004426#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004427#endif // CONFIG_EXT_TILE
4428 return (uint32_t)total_size;
4429}
4430
Yaowu Xuf883b422016-08-30 14:01:10 -07004431static void write_render_size(const AV1_COMMON *cm,
4432 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004433 const int scaling_active =
4434 cm->width != cm->render_width || cm->height != cm->render_height;
Yaowu Xuf883b422016-08-30 14:01:10 -07004435 aom_wb_write_bit(wb, scaling_active);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004436 if (scaling_active) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004437 aom_wb_write_literal(wb, cm->render_width - 1, 16);
4438 aom_wb_write_literal(wb, cm->render_height - 1, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004439 }
4440}
4441
Fergus Simpsond91c8c92017-04-07 12:12:00 -07004442#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07004443static void write_superres_scale(const AV1_COMMON *const cm,
4444 struct aom_write_bit_buffer *wb) {
4445 // This scaling and frame superres are probably incompatible
4446 assert(cm->width == cm->render_width && cm->height == cm->render_height);
4447
4448 // First bit is whether to to scale or not
4449 if (cm->superres_scale_numerator == SUPERRES_SCALE_DENOMINATOR) {
4450 aom_wb_write_bit(wb, 0); // no scaling
4451 } else {
4452 aom_wb_write_bit(wb, 1); // scaling, write scale factor
4453 // TODO(afergs): write factor to the compressed header instead
4454 aom_wb_write_literal(
4455 wb, cm->superres_scale_numerator - SUPERRES_SCALE_NUMERATOR_MIN,
4456 SUPERRES_SCALE_BITS);
4457 }
4458}
Fergus Simpsond91c8c92017-04-07 12:12:00 -07004459#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07004460
Yaowu Xuf883b422016-08-30 14:01:10 -07004461static void write_frame_size(const AV1_COMMON *cm,
4462 struct aom_write_bit_buffer *wb) {
Fergus Simpsond91c8c92017-04-07 12:12:00 -07004463#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07004464 // If SUPERRES scaling is happening, write the full resolution instead of the
4465 // downscaled resolution. The decoder will reduce this resolution itself.
4466 if (cm->superres_scale_numerator != SUPERRES_SCALE_DENOMINATOR) {
4467 aom_wb_write_literal(wb, cm->superres_width - 1, 16);
4468 aom_wb_write_literal(wb, cm->superres_height - 1, 16);
4469 } else {
Fergus Simpsond91c8c92017-04-07 12:12:00 -07004470#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07004471 aom_wb_write_literal(wb, cm->width - 1, 16);
4472 aom_wb_write_literal(wb, cm->height - 1, 16);
Fergus Simpsond91c8c92017-04-07 12:12:00 -07004473#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07004474 }
Fergus Simpsond91c8c92017-04-07 12:12:00 -07004475#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07004476
Fergus Simpsone7508412017-03-14 18:14:09 -07004477 // TODO(afergs): Also write something different to render_size?
4478 // When superres scales, they'll be almost guaranteed to be
4479 // different on the other side.
Yaowu Xuc27fc142016-08-22 16:08:15 -07004480 write_render_size(cm, wb);
Fergus Simpsond91c8c92017-04-07 12:12:00 -07004481#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07004482 write_superres_scale(cm, wb);
Fergus Simpsond91c8c92017-04-07 12:12:00 -07004483#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -07004484}
4485
Yaowu Xuf883b422016-08-30 14:01:10 -07004486static void write_frame_size_with_refs(AV1_COMP *cpi,
4487 struct aom_write_bit_buffer *wb) {
4488 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004489 int found = 0;
4490
4491 MV_REFERENCE_FRAME ref_frame;
4492 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4493 YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame);
4494
4495 if (cfg != NULL) {
4496 found =
4497 cm->width == cfg->y_crop_width && cm->height == cfg->y_crop_height;
4498 found &= cm->render_width == cfg->render_width &&
4499 cm->render_height == cfg->render_height;
4500 }
Yaowu Xuf883b422016-08-30 14:01:10 -07004501 aom_wb_write_bit(wb, found);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004502 if (found) {
4503 break;
4504 }
4505 }
4506
4507 if (!found) {
Fergus Simpsone7508412017-03-14 18:14:09 -07004508 write_frame_size(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004509 }
4510}
4511
Yaowu Xuf883b422016-08-30 14:01:10 -07004512static void write_sync_code(struct aom_write_bit_buffer *wb) {
4513 aom_wb_write_literal(wb, AV1_SYNC_CODE_0, 8);
4514 aom_wb_write_literal(wb, AV1_SYNC_CODE_1, 8);
4515 aom_wb_write_literal(wb, AV1_SYNC_CODE_2, 8);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004516}
4517
4518static void write_profile(BITSTREAM_PROFILE profile,
Yaowu Xuf883b422016-08-30 14:01:10 -07004519 struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004520 switch (profile) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004521 case PROFILE_0: aom_wb_write_literal(wb, 0, 2); break;
4522 case PROFILE_1: aom_wb_write_literal(wb, 2, 2); break;
4523 case PROFILE_2: aom_wb_write_literal(wb, 1, 2); break;
4524 case PROFILE_3: aom_wb_write_literal(wb, 6, 3); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004525 default: assert(0);
4526 }
4527}
4528
4529static void write_bitdepth_colorspace_sampling(
Yaowu Xuf883b422016-08-30 14:01:10 -07004530 AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004531 if (cm->profile >= PROFILE_2) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004532 assert(cm->bit_depth > AOM_BITS_8);
4533 aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004534 }
Yaowu Xuf883b422016-08-30 14:01:10 -07004535 aom_wb_write_literal(wb, cm->color_space, 3);
4536 if (cm->color_space != AOM_CS_SRGB) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07004537 // 0: [16, 235] (i.e. xvYCC), 1: [0, 255]
Yaowu Xuf883b422016-08-30 14:01:10 -07004538 aom_wb_write_bit(wb, cm->color_range);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004539 if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) {
4540 assert(cm->subsampling_x != 1 || cm->subsampling_y != 1);
Yaowu Xuf883b422016-08-30 14:01:10 -07004541 aom_wb_write_bit(wb, cm->subsampling_x);
4542 aom_wb_write_bit(wb, cm->subsampling_y);
4543 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07004544 } else {
4545 assert(cm->subsampling_x == 1 && cm->subsampling_y == 1);
4546 }
4547 } else {
4548 assert(cm->profile == PROFILE_1 || cm->profile == PROFILE_3);
Yaowu Xuf883b422016-08-30 14:01:10 -07004549 aom_wb_write_bit(wb, 0); // unused
Yaowu Xuc27fc142016-08-22 16:08:15 -07004550 }
4551}
4552
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004553#if CONFIG_REFERENCE_BUFFER
4554void write_sequence_header(SequenceHeader *seq_params) {
4555 /* Placeholder for actually writing to the bitstream */
4556 seq_params->frame_id_numbers_present_flag = FRAME_ID_NUMBERS_PRESENT_FLAG;
4557 seq_params->frame_id_length_minus7 = FRAME_ID_LENGTH_MINUS7;
4558 seq_params->delta_frame_id_length_minus2 = DELTA_FRAME_ID_LENGTH_MINUS2;
4559}
4560#endif
4561
Yaowu Xuf883b422016-08-30 14:01:10 -07004562static void write_uncompressed_header(AV1_COMP *cpi,
4563 struct aom_write_bit_buffer *wb) {
4564 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004565 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
4566
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004567#if CONFIG_REFERENCE_BUFFER
4568 /* TODO: Move outside frame loop or inside key-frame branch */
4569 write_sequence_header(&cpi->seq_params);
4570#endif
4571
Yaowu Xuf883b422016-08-30 14:01:10 -07004572 aom_wb_write_literal(wb, AOM_FRAME_MARKER, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004573
4574 write_profile(cm->profile, wb);
4575
4576#if CONFIG_EXT_REFS
4577 // NOTE: By default all coded frames to be used as a reference
4578 cm->is_reference_frame = 1;
4579
4580 if (cm->show_existing_frame) {
4581 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
4582 const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show];
4583
4584 if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004585 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004586 "Buffer %d does not contain a reconstructed frame",
4587 frame_to_show);
4588 }
4589 ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show);
4590
Yaowu Xuf883b422016-08-30 14:01:10 -07004591 aom_wb_write_bit(wb, 1); // show_existing_frame
4592 aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004593
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004594#if CONFIG_REFERENCE_BUFFER
4595 if (cpi->seq_params.frame_id_numbers_present_flag) {
4596 int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7;
4597 int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show];
4598 aom_wb_write_literal(wb, display_frame_id, frame_id_len);
4599 /* Add a zero byte to prevent emulation of superframe marker */
4600 /* Same logic as when when terminating the entropy coder */
4601 /* Consider to have this logic only one place */
4602 aom_wb_write_literal(wb, 0, 8);
4603 }
4604#endif
4605
Yaowu Xuc27fc142016-08-22 16:08:15 -07004606 return;
4607 } else {
4608#endif // CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07004609 aom_wb_write_bit(wb, 0); // show_existing_frame
Yaowu Xuc27fc142016-08-22 16:08:15 -07004610#if CONFIG_EXT_REFS
4611 }
4612#endif // CONFIG_EXT_REFS
4613
Yaowu Xuf883b422016-08-30 14:01:10 -07004614 aom_wb_write_bit(wb, cm->frame_type);
4615 aom_wb_write_bit(wb, cm->show_frame);
4616 aom_wb_write_bit(wb, cm->error_resilient_mode);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004617
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004618#if CONFIG_REFERENCE_BUFFER
4619 cm->invalid_delta_frame_id_minus1 = 0;
4620 if (cpi->seq_params.frame_id_numbers_present_flag) {
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004621 int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7;
4622 aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004623 }
4624#endif
4625
Fergus Simpsond91c8c92017-04-07 12:12:00 -07004626#if CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07004627 // TODO(afergs): Remove - this is just to stop superres from breaking
4628 cm->superres_scale_numerator = SUPERRES_SCALE_DENOMINATOR;
Fergus Simpsond91c8c92017-04-07 12:12:00 -07004629#endif // CONFIG_FRAME_SUPERRES
Fergus Simpsone7508412017-03-14 18:14:09 -07004630
Yaowu Xuc27fc142016-08-22 16:08:15 -07004631 if (cm->frame_type == KEY_FRAME) {
4632 write_sync_code(wb);
4633 write_bitdepth_colorspace_sampling(cm, wb);
4634 write_frame_size(cm, wb);
Alex Converseeb780e72016-12-13 12:46:41 -08004635#if CONFIG_ANS && ANS_MAX_SYMBOLS
4636 assert(cpi->common.ans_window_size_log2 >= 8);
4637 assert(cpi->common.ans_window_size_log2 < 24);
4638 aom_wb_write_literal(wb, cpi->common.ans_window_size_log2 - 8, 4);
4639#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
Urvang Joshib100db72016-10-12 16:28:56 -07004640#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07004641 aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Urvang Joshib100db72016-10-12 16:28:56 -07004642#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004643 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004644 if (!cm->show_frame) aom_wb_write_bit(wb, cm->intra_only);
Urvang Joshib100db72016-10-12 16:28:56 -07004645#if CONFIG_PALETTE
hui su24f7b072016-10-12 11:36:24 -07004646 if (cm->intra_only) aom_wb_write_bit(wb, cm->allow_screen_content_tools);
Urvang Joshib100db72016-10-12 16:28:56 -07004647#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -07004648 if (!cm->error_resilient_mode) {
4649 if (cm->intra_only) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004650 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004651 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4652 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07004653 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004654 cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE);
4655 if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE)
Yaowu Xuf883b422016-08-30 14:01:10 -07004656 aom_wb_write_bit(wb,
Yaowu Xuc27fc142016-08-22 16:08:15 -07004657 cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL);
4658 }
4659 }
4660
4661#if CONFIG_EXT_REFS
4662 cpi->refresh_frame_mask = get_refresh_mask(cpi);
4663#endif // CONFIG_EXT_REFS
4664
4665 if (cm->intra_only) {
4666 write_sync_code(wb);
4667 write_bitdepth_colorspace_sampling(cm, wb);
4668
4669#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07004670 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004671#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004672 aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004673#endif // CONFIG_EXT_REFS
4674 write_frame_size(cm, wb);
Alex Converseeb780e72016-12-13 12:46:41 -08004675
4676#if CONFIG_ANS && ANS_MAX_SYMBOLS
4677 assert(cpi->common.ans_window_size_log2 >= 8);
4678 assert(cpi->common.ans_window_size_log2 < 24);
4679 aom_wb_write_literal(wb, cpi->common.ans_window_size_log2 - 8, 4);
4680#endif // CONFIG_ANS && ANS_MAX_SYMBOLS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004681 } else {
4682 MV_REFERENCE_FRAME ref_frame;
4683
4684#if CONFIG_EXT_REFS
Yaowu Xuf883b422016-08-30 14:01:10 -07004685 aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004686#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004687 aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004688#endif // CONFIG_EXT_REFS
4689
4690#if CONFIG_EXT_REFS
4691 if (!cpi->refresh_frame_mask) {
4692 // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame
4693 // will not be used as a reference
4694 cm->is_reference_frame = 0;
4695 }
4696#endif // CONFIG_EXT_REFS
4697
4698 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
4699 assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX);
Yaowu Xuf883b422016-08-30 14:01:10 -07004700 aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame),
Yaowu Xuc27fc142016-08-22 16:08:15 -07004701 REF_FRAMES_LOG2);
Yaowu Xuf883b422016-08-30 14:01:10 -07004702 aom_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004703#if CONFIG_REFERENCE_BUFFER
4704 if (cpi->seq_params.frame_id_numbers_present_flag) {
4705 int i = get_ref_frame_map_idx(cpi, ref_frame);
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004706 int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7;
4707 int diff_len = cpi->seq_params.delta_frame_id_length_minus2 + 2;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004708 int delta_frame_id_minus1 =
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004709 ((cm->current_frame_id - cm->ref_frame_id[i] +
4710 (1 << frame_id_len)) %
4711 (1 << frame_id_len)) -
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004712 1;
4713 if (delta_frame_id_minus1 < 0 ||
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004714 delta_frame_id_minus1 >= (1 << diff_len))
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004715 cm->invalid_delta_frame_id_minus1 = 1;
Arild Fuldseth (arilfuld)788dc232016-12-20 17:55:52 +01004716 aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len);
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004717 }
4718#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004719 }
4720
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004721#if CONFIG_FRAME_SIZE
4722 if (cm->error_resilient_mode == 0) {
4723 write_frame_size_with_refs(cpi, wb);
4724 } else {
4725 write_frame_size(cm, wb);
4726 }
4727#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07004728 write_frame_size_with_refs(cpi, wb);
Arild Fuldseth842e9b02016-09-02 13:00:05 +02004729#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004730
Yaowu Xuf883b422016-08-30 14:01:10 -07004731 aom_wb_write_bit(wb, cm->allow_high_precision_mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004732
4733 fix_interp_filter(cm, cpi->td.counts);
Angie Chiang5678ad92016-11-21 09:38:40 -08004734 write_frame_interp_filter(cm->interp_filter, wb);
Fangwen Fu8d164de2016-12-14 13:40:54 -08004735#if CONFIG_TEMPMV_SIGNALING
4736 if (!cm->error_resilient_mode) {
4737 aom_wb_write_bit(wb, cm->use_prev_frame_mvs);
4738 }
4739#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004740 }
4741 }
4742
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +01004743#if CONFIG_REFERENCE_BUFFER
4744 cm->refresh_mask = cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi);
4745#endif
4746
Yaowu Xuc27fc142016-08-22 16:08:15 -07004747 if (!cm->error_resilient_mode) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004748 aom_wb_write_bit(
Yaowu Xuc27fc142016-08-22 16:08:15 -07004749 wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD);
4750 }
4751
Yaowu Xuf883b422016-08-30 14:01:10 -07004752 aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004753
Jingning Hanc709e1f2016-12-06 14:48:09 -08004754 assert(cm->mib_size == mi_size_wide[cm->sb_size]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004755 assert(cm->mib_size == 1 << cm->mib_size_log2);
4756#if CONFIG_EXT_PARTITION
4757 assert(cm->sb_size == BLOCK_128X128 || cm->sb_size == BLOCK_64X64);
Yaowu Xuf883b422016-08-30 14:01:10 -07004758 aom_wb_write_bit(wb, cm->sb_size == BLOCK_128X128 ? 1 : 0);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004759#else
4760 assert(cm->sb_size == BLOCK_64X64);
4761#endif // CONFIG_EXT_PARTITION
4762
4763 encode_loopfilter(cm, wb);
Jean-Marc Valin01435132017-02-18 14:12:53 -05004764#if CONFIG_CDEF
Steinar Midtskogena9d41e82017-03-17 12:48:15 +01004765 encode_cdef(cm, wb);
Steinar Midtskogen5d56f4d2016-09-25 09:23:16 +02004766#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004767#if CONFIG_LOOP_RESTORATION
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004768 encode_restoration_mode(cm, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004769#endif // CONFIG_LOOP_RESTORATION
4770 encode_quantization(cm, wb);
4771 encode_segmentation(cm, xd, wb);
Arild Fuldseth07441162016-08-15 15:07:52 +02004772#if CONFIG_DELTA_Q
4773 {
4774 int i;
4775 struct segmentation *const seg = &cm->seg;
4776 int segment_quantizer_active = 0;
4777 for (i = 0; i < MAX_SEGMENTS; i++) {
4778 if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) {
4779 segment_quantizer_active = 1;
4780 }
4781 }
Arild Fuldseth (arilfuld)54de7d62017-03-20 13:07:11 +01004782
4783 if (cm->delta_q_present_flag)
4784 assert(segment_quantizer_active == 0 && cm->base_qindex > 0);
4785 if (segment_quantizer_active == 0 && cm->base_qindex > 0) {
Arild Fuldseth07441162016-08-15 15:07:52 +02004786 aom_wb_write_bit(wb, cm->delta_q_present_flag);
4787 if (cm->delta_q_present_flag) {
Thomas Daviesf6936102016-09-05 16:51:31 +01004788 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2);
Arild Fuldseth07441162016-08-15 15:07:52 +02004789 xd->prev_qindex = cm->base_qindex;
Fangwen Fu231fe422017-04-24 17:52:29 -07004790#if CONFIG_EXT_DELTA_Q
4791 assert(seg->abs_delta == SEGMENT_DELTADATA);
4792 aom_wb_write_bit(wb, cm->delta_lf_present_flag);
4793 if (cm->delta_lf_present_flag) {
4794 aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_lf_res) - 1, 2);
4795 xd->prev_delta_lf_from_base = 0;
4796 }
4797#endif // CONFIG_EXT_DELTA_Q
Arild Fuldseth07441162016-08-15 15:07:52 +02004798 }
4799 }
4800 }
4801#endif
4802
Yue Cheneeacc4c2017-01-17 17:29:17 -08004803 write_tx_mode(cm, xd, &cm->tx_mode, wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004804
4805 if (cpi->allow_comp_inter_inter) {
4806 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
Zoe Liub05e5d12017-02-07 14:32:53 -08004807#if !CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004808 const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE;
Zoe Liub05e5d12017-02-07 14:32:53 -08004809#endif // !CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004810
Yaowu Xuf883b422016-08-30 14:01:10 -07004811 aom_wb_write_bit(wb, use_hybrid_pred);
Zoe Liub05e5d12017-02-07 14:32:53 -08004812#if !CONFIG_REF_ADAPT
Yaowu Xuf883b422016-08-30 14:01:10 -07004813 if (!use_hybrid_pred) aom_wb_write_bit(wb, use_compound_pred);
Zoe Liub05e5d12017-02-07 14:32:53 -08004814#endif // !CONFIG_REF_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004815 }
4816
Sarah Parkere68a3e42017-02-16 14:03:24 -08004817#if CONFIG_EXT_TX
4818 aom_wb_write_bit(wb, cm->reduced_tx_set_used);
4819#endif // CONFIG_EXT_TX
4820
Yaowu Xuc27fc142016-08-22 16:08:15 -07004821 write_tile_info(cm, wb);
4822}
4823
4824#if CONFIG_GLOBAL_MOTION
David Barkercf3d0b02016-11-10 10:14:49 +00004825static void write_global_motion_params(WarpedMotionParams *params,
Sarah Parkerf1783292017-04-05 11:55:27 -07004826 WarpedMotionParams *ref_params,
Sarah Parker13d06622017-03-10 17:03:28 -08004827 aom_prob *probs, aom_writer *w,
4828 int allow_hp) {
David Barkercf3d0b02016-11-10 10:14:49 +00004829 TransformationType type = params->wmtype;
Sarah Parker13d06622017-03-10 17:03:28 -08004830 int trans_bits;
4831 int trans_prec_diff;
Yaowu Xuf883b422016-08-30 14:01:10 -07004832 av1_write_token(w, av1_global_motion_types_tree, probs,
David Barkercf3d0b02016-11-10 10:14:49 +00004833 &global_motion_types_encodings[type]);
4834 switch (type) {
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004835 case HOMOGRAPHY:
Debargha Mukherjee5dfa9302017-02-10 05:00:08 -08004836 case HORTRAPEZOID:
4837 case VERTRAPEZOID:
4838 if (type != HORTRAPEZOID)
Sarah Parkerf1783292017-04-05 11:55:27 -07004839 aom_write_signed_primitive_refsubexpfin(
4840 w, GM_ROW3HOMO_MAX + 1, SUBEXPFIN_K,
4841 (ref_params->wmmat[6] >> GM_ROW3HOMO_PREC_DIFF),
4842 (params->wmmat[6] >> GM_ROW3HOMO_PREC_DIFF));
Debargha Mukherjee5dfa9302017-02-10 05:00:08 -08004843 if (type != VERTRAPEZOID)
Sarah Parkerf1783292017-04-05 11:55:27 -07004844 aom_write_signed_primitive_refsubexpfin(
4845 w, GM_ROW3HOMO_MAX + 1, SUBEXPFIN_K,
4846 (ref_params->wmmat[7] >> GM_ROW3HOMO_PREC_DIFF),
4847 (params->wmmat[7] >> GM_ROW3HOMO_PREC_DIFF));
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004848 // fallthrough intended
David Barkercf3d0b02016-11-10 10:14:49 +00004849 case AFFINE:
4850 case ROTZOOM:
Sarah Parkerf1783292017-04-05 11:55:27 -07004851 aom_write_signed_primitive_refsubexpfin(
4852 w, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4853 (ref_params->wmmat[2] >> GM_ALPHA_PREC_DIFF) -
4854 (1 << GM_ALPHA_PREC_BITS),
4855 (params->wmmat[2] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS));
Debargha Mukherjee5dfa9302017-02-10 05:00:08 -08004856 if (type != VERTRAPEZOID)
Sarah Parkerf1783292017-04-05 11:55:27 -07004857 aom_write_signed_primitive_refsubexpfin(
4858 w, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4859 (ref_params->wmmat[3] >> GM_ALPHA_PREC_DIFF),
4860 (params->wmmat[3] >> GM_ALPHA_PREC_DIFF));
Debargha Mukherjee5dfa9302017-02-10 05:00:08 -08004861 if (type >= AFFINE) {
4862 if (type != HORTRAPEZOID)
Sarah Parkerf1783292017-04-05 11:55:27 -07004863 aom_write_signed_primitive_refsubexpfin(
4864 w, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4865 (ref_params->wmmat[4] >> GM_ALPHA_PREC_DIFF),
4866 (params->wmmat[4] >> GM_ALPHA_PREC_DIFF));
4867 aom_write_signed_primitive_refsubexpfin(
4868 w, GM_ALPHA_MAX + 1, SUBEXPFIN_K,
4869 (ref_params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
4870 (1 << GM_ALPHA_PREC_BITS),
4871 (params->wmmat[5] >> GM_ALPHA_PREC_DIFF) -
4872 (1 << GM_ALPHA_PREC_BITS));
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004873 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004874 // fallthrough intended
David Barkercf3d0b02016-11-10 10:14:49 +00004875 case TRANSLATION:
Sarah Parker13d06622017-03-10 17:03:28 -08004876 trans_bits = (type == TRANSLATION) ? GM_ABS_TRANS_ONLY_BITS - !allow_hp
4877 : GM_ABS_TRANS_BITS;
4878 trans_prec_diff = (type == TRANSLATION)
4879 ? GM_TRANS_ONLY_PREC_DIFF + !allow_hp
4880 : GM_TRANS_PREC_DIFF;
Sarah Parkerf1783292017-04-05 11:55:27 -07004881 aom_write_signed_primitive_refsubexpfin(
4882 w, (1 << trans_bits) + 1, SUBEXPFIN_K,
4883 (ref_params->wmmat[0] >> trans_prec_diff),
4884 (params->wmmat[0] >> trans_prec_diff));
4885 aom_write_signed_primitive_refsubexpfin(
4886 w, (1 << trans_bits) + 1, SUBEXPFIN_K,
4887 (ref_params->wmmat[1] >> trans_prec_diff),
4888 (params->wmmat[1] >> trans_prec_diff));
Yaowu Xuc27fc142016-08-22 16:08:15 -07004889 break;
Debargha Mukherjee3fb33f02016-11-12 10:43:50 -08004890 case IDENTITY: break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004891 default: assert(0);
4892 }
4893}
4894
Yaowu Xuf883b422016-08-30 14:01:10 -07004895static void write_global_motion(AV1_COMP *cpi, aom_writer *w) {
4896 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004897 int frame;
4898 for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) {
David Barker43479c62016-11-30 10:34:20 +00004899#if !CONFIG_REF_MV
4900 // With ref-mv, clearing unused global motion models here is
4901 // unsafe, and we need to rely on the recode loop to do it
4902 // instead. See av1_find_mv_refs for details.
Debargha Mukherjee265db6d2017-03-28 11:15:27 -07004903 if (!cpi->global_motion_used[frame]) {
Sarah Parker4c10a3c2017-04-10 19:37:59 -07004904 set_default_warp_params(&cm->global_motion[frame]);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004905 }
David Barker43479c62016-11-30 10:34:20 +00004906#endif
Sarah Parkerf1783292017-04-05 11:55:27 -07004907 write_global_motion_params(
4908 &cm->global_motion[frame], &cm->prev_frame->global_motion[frame],
4909 cm->fc->global_motion_types_prob, w, cm->allow_high_precision_mv);
Sarah Parkere5299862016-08-16 14:57:37 -07004910 /*
Debargha Mukherjee265db6d2017-03-28 11:15:27 -07004911 printf("Frame %d/%d: Enc Ref %d (used %d): %d %d %d %d\n",
Debargha Mukherjeeb98a7022016-11-15 16:07:12 -08004912 cm->current_video_frame, cm->show_frame, frame,
Debargha Mukherjee265db6d2017-03-28 11:15:27 -07004913 cpi->global_motion_used[frame], cm->global_motion[frame].wmmat[0],
4914 cm->global_motion[frame].wmmat[1], cm->global_motion[frame].wmmat[2],
4915 cm->global_motion[frame].wmmat[3]);
Debargha Mukherjee8db4c772016-11-07 12:54:21 -08004916 */
Yaowu Xuc27fc142016-08-22 16:08:15 -07004917 }
4918}
4919#endif
4920
Yaowu Xuf883b422016-08-30 14:01:10 -07004921static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data) {
4922 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004923#if CONFIG_SUPERTX
4924 MACROBLOCKD *const xd = &cpi->td.mb.e_mbd;
4925#endif // CONFIG_SUPERTX
4926 FRAME_CONTEXT *const fc = cm->fc;
4927 FRAME_COUNTS *counts = cpi->td.counts;
Yaowu Xuf883b422016-08-30 14:01:10 -07004928 aom_writer *header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004929 int i, j;
Thomas9ac55082016-09-23 18:04:17 +01004930
Thomas Davies80188d12016-10-26 16:08:35 -07004931#if CONFIG_TILE_GROUPS
4932 const int probwt = cm->num_tg;
4933#else
4934 const int probwt = 1;
4935#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07004936
4937#if CONFIG_ANS
Yaowu Xuc27fc142016-08-22 16:08:15 -07004938 int header_size;
4939 header_bc = &cpi->buf_ans;
Alex Converse2a1b3af2016-10-26 13:11:26 -07004940 buf_ans_write_init(header_bc, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004941#else
Yaowu Xuf883b422016-08-30 14:01:10 -07004942 aom_writer real_header_bc;
Yaowu Xuc27fc142016-08-22 16:08:15 -07004943 header_bc = &real_header_bc;
Yaowu Xuf883b422016-08-30 14:01:10 -07004944 aom_start_encode(header_bc, data);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004945#endif
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07004946
4947#if CONFIG_LOOP_RESTORATION
4948 encode_restoration(cm, header_bc);
4949#endif // CONFIG_LOOP_RESTORATION
Nathan E. Eggeb353a8e2017-02-17 10:27:37 -05004950#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004951 update_txfm_probs(cm, header_bc, counts);
Nathan E. Eggeb353a8e2017-02-17 10:27:37 -05004952#endif
Angie Chiang800df032017-03-22 11:14:12 -07004953#if CONFIG_LV_MAP
4954 av1_write_txb_probs(cpi, header_bc);
4955#else
Yushin Cho77bba8d2016-11-04 16:36:56 -07004956#if !CONFIG_PVQ
Alex Conversea9598cd2017-02-03 14:18:05 -08004957#if !(CONFIG_EC_ADAPT && CONFIG_NEW_TOKENSET)
Yaowu Xuc27fc142016-08-22 16:08:15 -07004958 update_coef_probs(cpi, header_bc);
Alex Conversea9598cd2017-02-03 14:18:05 -08004959#endif // !(CONFIG_EC_ADAPT && CONFIG_NEW_TOKENSET)
Thomas Daviesfc1598a2017-01-13 17:07:25 +00004960#endif // CONFIG_PVQ
Angie Chiang800df032017-03-22 11:14:12 -07004961#endif // CONFIG_LV_MAP
4962
Yaowu Xuc27fc142016-08-22 16:08:15 -07004963#if CONFIG_VAR_TX
Thomas Davies80188d12016-10-26 16:08:35 -07004964 update_txfm_partition_probs(cm, header_bc, counts, probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004965#endif
4966
4967 update_skip_probs(cm, header_bc, counts);
Thomas Daviesd6ee8a82017-03-02 14:42:50 +00004968#if !CONFIG_EC_ADAPT && CONFIG_DELTA_Q
Thomas Daviesf6936102016-09-05 16:51:31 +01004969 update_delta_q_probs(cm, header_bc, counts);
Fangwen Fu231fe422017-04-24 17:52:29 -07004970#if CONFIG_EXT_DELTA_Q
4971 update_delta_lf_probs(cm, header_bc, counts);
4972#endif
Thomas Daviesf6936102016-09-05 16:51:31 +01004973#endif
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04004974#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07004975 update_seg_probs(cpi, header_bc);
4976
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04004977 for (i = 0; i < INTRA_MODES; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07004978 prob_diff_update(av1_intra_mode_tree, fc->uv_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004979 counts->uv_mode[i], INTRA_MODES, probwt, header_bc);
Nathan E. Egge380cb1a2016-09-08 10:13:42 -04004980 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07004981
4982#if CONFIG_EXT_PARTITION_TYPES
Alex Converse4e18d402017-03-14 15:36:38 -07004983 for (i = 0; i < PARTITION_PLOFFSET; ++i)
4984 prob_diff_update(av1_partition_tree, fc->partition_prob[i],
4985 counts->partition[i], PARTITION_TYPES, probwt, header_bc);
4986 for (; i < PARTITION_CONTEXTS_PRIMARY; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07004987 prob_diff_update(av1_ext_partition_tree, fc->partition_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004988 counts->partition[i], EXT_PARTITION_TYPES, probwt,
4989 header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07004990#else
Alex Converse4e18d402017-03-14 15:36:38 -07004991 for (i = 0; i < PARTITION_CONTEXTS_PRIMARY; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07004992 prob_diff_update(av1_partition_tree, fc->partition_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07004993 counts->partition[i], PARTITION_TYPES, probwt, header_bc);
Alex Converse55c6bde2017-01-12 15:55:31 -08004994#endif // CONFIG_EXT_PARTITION_TYPES
4995#if CONFIG_UNPOISON_PARTITION_CTX
4996 for (; i < PARTITION_CONTEXTS_PRIMARY + PARTITION_BLOCK_SIZES; ++i) {
4997 unsigned int ct[2] = { counts->partition[i][PARTITION_VERT],
4998 counts->partition[i][PARTITION_SPLIT] };
4999 assert(counts->partition[i][PARTITION_NONE] == 0);
5000 assert(counts->partition[i][PARTITION_HORZ] == 0);
5001 assert(fc->partition_prob[i][PARTITION_NONE] == 0);
5002 assert(fc->partition_prob[i][PARTITION_HORZ] == 0);
5003 av1_cond_prob_diff_update(header_bc, &fc->partition_prob[i][PARTITION_VERT],
5004 ct, probwt);
5005 }
5006 for (; i < PARTITION_CONTEXTS_PRIMARY + 2 * PARTITION_BLOCK_SIZES; ++i) {
5007 unsigned int ct[2] = { counts->partition[i][PARTITION_HORZ],
5008 counts->partition[i][PARTITION_SPLIT] };
5009 assert(counts->partition[i][PARTITION_NONE] == 0);
5010 assert(counts->partition[i][PARTITION_VERT] == 0);
5011 assert(fc->partition_prob[i][PARTITION_NONE] == 0);
5012 assert(fc->partition_prob[i][PARTITION_VERT] == 0);
5013 av1_cond_prob_diff_update(header_bc, &fc->partition_prob[i][PARTITION_HORZ],
5014 ct, probwt);
5015 }
5016#endif
hui su9aa97492017-01-26 16:46:01 -08005017#if CONFIG_EXT_INTRA && CONFIG_INTRA_INTERP
Yaowu Xuc27fc142016-08-22 16:08:15 -07005018 for (i = 0; i < INTRA_FILTERS + 1; ++i)
Yaowu Xuf883b422016-08-30 14:01:10 -07005019 prob_diff_update(av1_intra_filter_tree, fc->intra_filter_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -07005020 counts->intra_filter[i], INTRA_FILTERS, probwt, header_bc);
hui su9aa97492017-01-26 16:46:01 -08005021#endif // CONFIG_EXT_INTRA && CONFIG_INTRA_INTERP
hui sub4e25d22017-03-09 15:32:30 -08005022#endif // !CONFIG_EC_ADAPT
hui su9aa97492017-01-26 16:46:01 -08005023
Yaowu Xuc27fc142016-08-22 16:08:15 -07005024 if (frame_is_intra_only(cm)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005025 av1_copy(cm->kf_y_prob, av1_kf_y_mode_prob);
Nathan E. Egge10ba2be2016-11-16 09:44:26 -05005026#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +00005027 av1_copy(cm->fc->kf_y_cdf, av1_kf_y_mode_cdf);
Nathan E. Egge3ef926e2016-09-07 18:20:41 -04005028#endif
Thomas9ac55082016-09-23 18:04:17 +01005029
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005030#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07005031 for (i = 0; i < INTRA_MODES; ++i)
Thomas Davies6519beb2016-10-19 14:46:07 +01005032 for (j = 0; j < INTRA_MODES; ++j)
Yaowu Xuf883b422016-08-30 14:01:10 -07005033 prob_diff_update(av1_intra_mode_tree, cm->kf_y_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07005034 counts->kf_y_mode[i][j], INTRA_MODES, probwt,
5035 header_bc);
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005036#endif // CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07005037 } else {
5038#if CONFIG_REF_MV
5039 update_inter_mode_probs(cm, header_bc, counts);
5040#else
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005041#if !CONFIG_EC_ADAPT
Nathan E. Egge6ec4d102016-09-08 10:41:20 -04005042 for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005043 prob_diff_update(av1_inter_mode_tree, cm->fc->inter_mode_probs[i],
Thomas Davies80188d12016-10-26 16:08:35 -07005044 counts->inter_mode[i], INTER_MODES, probwt, header_bc);
Nathan E. Egge6ec4d102016-09-08 10:41:20 -04005045 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005046#endif
Thomas Davies6519beb2016-10-19 14:46:07 +01005047#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005048#if CONFIG_EXT_INTER
Thomas Davies80188d12016-10-26 16:08:35 -07005049 update_inter_compound_mode_probs(cm, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005050
5051 if (cm->reference_mode != COMPOUND_REFERENCE) {
5052 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
5053 if (is_interintra_allowed_bsize_group(i)) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005054 av1_cond_prob_diff_update(header_bc, &fc->interintra_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07005055 cm->counts.interintra[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005056 }
5057 }
5058 for (i = 0; i < BLOCK_SIZE_GROUPS; i++) {
5059 prob_diff_update(
Yaowu Xuf883b422016-08-30 14:01:10 -07005060 av1_interintra_mode_tree, cm->fc->interintra_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07005061 counts->interintra_mode[i], INTERINTRA_MODES, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005062 }
5063 for (i = 0; i < BLOCK_SIZES; i++) {
5064 if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i))
Yaowu Xuf883b422016-08-30 14:01:10 -07005065 av1_cond_prob_diff_update(header_bc, &fc->wedge_interintra_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07005066 cm->counts.wedge_interintra[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005067 }
5068 }
Debargha Mukherjeec5f735f2017-04-26 03:25:28 +00005069#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07005070 if (cm->reference_mode != SINGLE_REFERENCE) {
5071 for (i = 0; i < BLOCK_SIZES; i++)
Sarah Parker6fdc8532016-11-16 17:47:13 -08005072 prob_diff_update(av1_compound_type_tree, fc->compound_type_prob[i],
5073 cm->counts.compound_interinter[i], COMPOUND_TYPES,
5074 probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005075 }
Debargha Mukherjeec5f735f2017-04-26 03:25:28 +00005076#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
Yaowu Xuc27fc142016-08-22 16:08:15 -07005077#endif // CONFIG_EXT_INTER
5078
Yue Chencb60b182016-10-13 15:18:22 -07005079#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Yaowu Xuc27fc142016-08-22 16:08:15 -07005080 for (i = BLOCK_8X8; i < BLOCK_SIZES; ++i)
Yue Chencb60b182016-10-13 15:18:22 -07005081 prob_diff_update(av1_motion_mode_tree, fc->motion_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07005082 counts->motion_mode[i], MOTION_MODES, probwt, header_bc);
Yue Chencb60b182016-10-13 15:18:22 -07005083#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005084#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07005085 if (cm->interp_filter == SWITCHABLE)
5086 update_switchable_interp_probs(cm, header_bc, counts);
Thomas9ac55082016-09-23 18:04:17 +01005087#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005088
5089 for (i = 0; i < INTRA_INTER_CONTEXTS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07005090 av1_cond_prob_diff_update(header_bc, &fc->intra_inter_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07005091 counts->intra_inter[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005092
5093 if (cpi->allow_comp_inter_inter) {
5094 const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT;
5095 if (use_hybrid_pred)
5096 for (i = 0; i < COMP_INTER_CONTEXTS; i++)
Yaowu Xuf883b422016-08-30 14:01:10 -07005097 av1_cond_prob_diff_update(header_bc, &fc->comp_inter_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07005098 counts->comp_inter[i], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005099 }
5100
5101 if (cm->reference_mode != COMPOUND_REFERENCE) {
5102 for (i = 0; i < REF_CONTEXTS; i++) {
5103 for (j = 0; j < (SINGLE_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005104 av1_cond_prob_diff_update(header_bc, &fc->single_ref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07005105 counts->single_ref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005106 }
5107 }
5108 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07005109 if (cm->reference_mode != SINGLE_REFERENCE) {
5110 for (i = 0; i < REF_CONTEXTS; i++) {
5111#if CONFIG_EXT_REFS
5112 for (j = 0; j < (FWD_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005113 av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07005114 counts->comp_ref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005115 }
5116 for (j = 0; j < (BWD_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005117 av1_cond_prob_diff_update(header_bc, &fc->comp_bwdref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07005118 counts->comp_bwdref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005119 }
5120#else
5121 for (j = 0; j < (COMP_REFS - 1); j++) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005122 av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j],
Thomas Davies80188d12016-10-26 16:08:35 -07005123 counts->comp_ref[i][j], probwt);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005124 }
5125#endif // CONFIG_EXT_REFS
5126 }
5127 }
5128
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005129#if !CONFIG_EC_ADAPT
Nathan E. Egge5710c722016-09-08 10:01:16 -04005130 for (i = 0; i < BLOCK_SIZE_GROUPS; ++i) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005131 prob_diff_update(av1_intra_mode_tree, cm->fc->y_mode_prob[i],
Thomas Davies80188d12016-10-26 16:08:35 -07005132 counts->y_mode[i], INTRA_MODES, probwt, header_bc);
Nathan E. Egge5710c722016-09-08 10:01:16 -04005133 }
Thomas Davies6519beb2016-10-19 14:46:07 +01005134#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005135
Jingning Hanfd0cf162016-09-30 10:33:50 -07005136 av1_write_nmv_probs(cm, cm->allow_high_precision_mv, header_bc,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005137#if CONFIG_REF_MV
Yaowu Xuf883b422016-08-30 14:01:10 -07005138 counts->mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005139#else
Yaowu Xuf883b422016-08-30 14:01:10 -07005140 &counts->mv);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005141#endif
Nathan E. Eggebaaaa162016-10-24 09:50:52 -04005142#if !CONFIG_EC_ADAPT
Yaowu Xuc27fc142016-08-22 16:08:15 -07005143 update_ext_tx_probs(cm, header_bc);
Thomas9ac55082016-09-23 18:04:17 +01005144#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005145#if CONFIG_SUPERTX
Thomas Davies80188d12016-10-26 16:08:35 -07005146 if (!xd->lossless[0]) update_supertx_probs(cm, probwt, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005147#endif // CONFIG_SUPERTX
Yaowu Xuc27fc142016-08-22 16:08:15 -07005148#if CONFIG_GLOBAL_MOTION
Sarah Parker689b0ca2016-10-11 12:06:33 -07005149 write_global_motion(cpi, header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005150#endif // CONFIG_GLOBAL_MOTION
Sarah Parker689b0ca2016-10-11 12:06:33 -07005151 }
Thomas Davies6519beb2016-10-19 14:46:07 +01005152#if CONFIG_EC_MULTISYMBOL
Thomas Davies028b57f2017-02-22 16:42:11 +00005153#if !CONFIG_EC_ADAPT
Thomas Davies87aeeb82017-02-17 00:19:40 +00005154#if CONFIG_NEW_TOKENSET
5155 av1_coef_head_cdfs(fc);
5156#endif
Thomas Davies6519beb2016-10-19 14:46:07 +01005157 av1_coef_pareto_cdfs(fc);
David Barker599dfd02016-11-10 13:20:12 +00005158#if CONFIG_REF_MV
5159 for (i = 0; i < NMV_CONTEXTS; ++i) av1_set_mv_cdfs(&fc->nmvc[i]);
5160#else
Thomas Davies6519beb2016-10-19 14:46:07 +01005161 av1_set_mv_cdfs(&fc->nmvc);
David Barker599dfd02016-11-10 13:20:12 +00005162#endif
Nathan E. Egge31296062016-11-16 09:44:26 -05005163#if CONFIG_EC_MULTISYMBOL
Thomas Davies6519beb2016-10-19 14:46:07 +01005164 av1_set_mode_cdfs(cm);
5165#endif
Thomas Davies028b57f2017-02-22 16:42:11 +00005166#endif // !CONFIG_EC_ADAPT
Thomas Davies6519beb2016-10-19 14:46:07 +01005167#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005168#if CONFIG_ANS
Alex Converse1ecdf2b2016-11-30 15:51:12 -08005169 aom_buf_ans_flush(header_bc);
Alex Converse2a1b3af2016-10-26 13:11:26 -07005170 header_size = buf_ans_write_end(header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005171 assert(header_size <= 0xffff);
5172 return header_size;
5173#else
Yaowu Xuf883b422016-08-30 14:01:10 -07005174 aom_stop_encode(header_bc);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005175 assert(header_bc->pos <= 0xffff);
5176 return header_bc->pos;
5177#endif // CONFIG_ANS
5178}
5179
5180static int choose_size_bytes(uint32_t size, int spare_msbs) {
5181 // Choose the number of bytes required to represent size, without
5182 // using the 'spare_msbs' number of most significant bits.
5183
5184 // Make sure we will fit in 4 bytes to start with..
5185 if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1;
5186
5187 // Normalise to 32 bits
5188 size <<= spare_msbs;
5189
5190 if (size >> 24 != 0)
5191 return 4;
5192 else if (size >> 16 != 0)
5193 return 3;
5194 else if (size >> 8 != 0)
5195 return 2;
5196 else
5197 return 1;
5198}
5199
5200static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) {
5201 switch (sz) {
5202 case 1: dst[0] = (uint8_t)(val & 0xff); break;
5203 case 2: mem_put_le16(dst, val); break;
5204 case 3: mem_put_le24(dst, val); break;
5205 case 4: mem_put_le32(dst, val); break;
James Zern06c372d2017-04-20 16:08:29 -07005206 default: assert(0 && "Invalid size"); break;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005207 }
5208}
Yaowu Xuf883b422016-08-30 14:01:10 -07005209static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst,
Yaowu Xuc27fc142016-08-22 16:08:15 -07005210 const uint32_t data_size, const uint32_t max_tile_size,
5211 const uint32_t max_tile_col_size,
5212 int *const tile_size_bytes,
5213 int *const tile_col_size_bytes) {
5214// Choose the tile size bytes (tsb) and tile column size bytes (tcsb)
5215#if CONFIG_EXT_TILE
5216 // The top bit in the tile size field indicates tile copy mode, so we
5217 // have 1 less bit to code the tile size
5218 const int tsb = choose_size_bytes(max_tile_size, 1);
5219 const int tcsb = choose_size_bytes(max_tile_col_size, 0);
5220#else
5221 const int tsb = choose_size_bytes(max_tile_size, 0);
5222 const int tcsb = 4; // This is ignored
5223 (void)max_tile_col_size;
5224#endif // CONFIG_EXT_TILE
5225
5226 assert(tsb > 0);
5227 assert(tcsb > 0);
5228
5229 *tile_size_bytes = tsb;
5230 *tile_col_size_bytes = tcsb;
5231
5232 if (tsb == 4 && tcsb == 4) {
5233 return data_size;
5234 } else {
5235 uint32_t wpos = 0;
5236 uint32_t rpos = 0;
5237
5238#if CONFIG_EXT_TILE
5239 int tile_row;
5240 int tile_col;
5241
5242 for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) {
5243 // All but the last column has a column header
5244 if (tile_col < cm->tile_cols - 1) {
5245 uint32_t tile_col_size = mem_get_le32(dst + rpos);
5246 rpos += 4;
5247
5248 // Adjust the tile column size by the number of bytes removed
5249 // from the tile size fields.
5250 tile_col_size -= (4 - tsb) * cm->tile_rows;
5251
5252 mem_put_varsize(dst + wpos, tcsb, tile_col_size);
5253 wpos += tcsb;
5254 }
5255
5256 for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) {
5257 // All, including the last row has a header
5258 uint32_t tile_header = mem_get_le32(dst + rpos);
5259 rpos += 4;
5260
5261 // If this is a copy tile, we need to shift the MSB to the
5262 // top bit of the new width, and there is no data to copy.
5263 if (tile_header >> 31 != 0) {
5264 if (tsb < 4) tile_header >>= 32 - 8 * tsb;
5265 mem_put_varsize(dst + wpos, tsb, tile_header);
5266 wpos += tsb;
5267 } else {
5268 mem_put_varsize(dst + wpos, tsb, tile_header);
5269 wpos += tsb;
5270
5271 memmove(dst + wpos, dst + rpos, tile_header);
5272 rpos += tile_header;
5273 wpos += tile_header;
5274 }
5275 }
5276 }
5277#else
5278 const int n_tiles = cm->tile_cols * cm->tile_rows;
5279 int n;
5280
5281 for (n = 0; n < n_tiles; n++) {
5282 int tile_size;
5283
Thomas Daviesa0de6d52017-01-20 14:45:25 +00005284 if (n == n_tiles - 1) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005285 tile_size = data_size - rpos;
5286 } else {
5287 tile_size = mem_get_le32(dst + rpos);
5288 rpos += 4;
5289 mem_put_varsize(dst + wpos, tsb, tile_size);
5290 wpos += tsb;
5291 }
5292
5293 memmove(dst + wpos, dst + rpos, tile_size);
5294
5295 rpos += tile_size;
5296 wpos += tile_size;
5297 }
5298#endif // CONFIG_EXT_TILE
5299
5300 assert(rpos > wpos);
5301 assert(rpos == data_size);
5302
5303 return wpos;
5304 }
5305}
5306
Yaowu Xuf883b422016-08-30 14:01:10 -07005307void av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07005308 uint8_t *data = dst;
Thomas Davies80188d12016-10-26 16:08:35 -07005309#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005310 uint32_t compressed_header_size;
5311 uint32_t uncompressed_header_size;
Thomas Davies80188d12016-10-26 16:08:35 -07005312 struct aom_write_bit_buffer saved_wb;
5313#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005314 uint32_t data_size;
Yaowu Xuf883b422016-08-30 14:01:10 -07005315 struct aom_write_bit_buffer wb = { data, 0 };
Thomas Davies80188d12016-10-26 16:08:35 -07005316
Yaowu Xuc27fc142016-08-22 16:08:15 -07005317 unsigned int max_tile_size;
5318 unsigned int max_tile_col_size;
Thomas Davies80188d12016-10-26 16:08:35 -07005319
Angie Chiangb11aedf2017-03-10 17:31:46 -08005320#if CONFIG_BITSTREAM_DEBUG
5321 bitstream_queue_reset_write();
5322#endif
5323
Thomas Davies80188d12016-10-26 16:08:35 -07005324#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005325 int tile_size_bytes;
5326 int tile_col_size_bytes;
Yaowu Xuf883b422016-08-30 14:01:10 -07005327 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -07005328 const int have_tiles = cm->tile_cols * cm->tile_rows > 1;
5329
Yaowu Xuc27fc142016-08-22 16:08:15 -07005330 // Write the uncompressed header
5331 write_uncompressed_header(cpi, &wb);
5332
5333#if CONFIG_EXT_REFS
5334 if (cm->show_existing_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -07005335 *size = aom_wb_bytes_written(&wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005336 return;
5337 }
5338#endif // CONFIG_EXT_REFS
5339
5340 // We do not know these in advance. Output placeholder bit.
5341 saved_wb = wb;
5342 // Write tile size magnitudes
5343 if (have_tiles) {
5344// Note that the last item in the uncompressed header is the data
5345// describing tile configuration.
5346#if CONFIG_EXT_TILE
5347 // Number of bytes in tile column size - 1
Yaowu Xuf883b422016-08-30 14:01:10 -07005348 aom_wb_write_literal(&wb, 0, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005349#endif // CONFIG_EXT_TILE
5350 // Number of bytes in tile size - 1
Yaowu Xuf883b422016-08-30 14:01:10 -07005351 aom_wb_write_literal(&wb, 0, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005352 }
5353 // Size of compressed header
Yaowu Xuf883b422016-08-30 14:01:10 -07005354 aom_wb_write_literal(&wb, 0, 16);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005355
Yaowu Xuf883b422016-08-30 14:01:10 -07005356 uncompressed_header_size = (uint32_t)aom_wb_bytes_written(&wb);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005357 data += uncompressed_header_size;
5358
Yaowu Xuf883b422016-08-30 14:01:10 -07005359 aom_clear_system_state();
Yaowu Xuc27fc142016-08-22 16:08:15 -07005360
5361 // Write the compressed header
5362 compressed_header_size = write_compressed_header(cpi, data);
5363 data += compressed_header_size;
5364
5365 // Write the encoded tile data
5366 data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size);
Thomas Davies80188d12016-10-26 16:08:35 -07005367#else
5368 data_size = write_tiles(cpi, &wb, &max_tile_size, &max_tile_col_size);
5369#endif
5370#if !CONFIG_TILE_GROUPS
Yaowu Xuc27fc142016-08-22 16:08:15 -07005371 if (have_tiles) {
5372 data_size =
5373 remux_tiles(cm, data, data_size, max_tile_size, max_tile_col_size,
5374 &tile_size_bytes, &tile_col_size_bytes);
5375 }
5376
5377 data += data_size;
5378
5379 // Now fill in the gaps in the uncompressed header.
5380 if (have_tiles) {
5381#if CONFIG_EXT_TILE
5382 assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4);
Yaowu Xuf883b422016-08-30 14:01:10 -07005383 aom_wb_write_literal(&saved_wb, tile_col_size_bytes - 1, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005384#endif // CONFIG_EXT_TILE
5385 assert(tile_size_bytes >= 1 && tile_size_bytes <= 4);
Yaowu Xuf883b422016-08-30 14:01:10 -07005386 aom_wb_write_literal(&saved_wb, tile_size_bytes - 1, 2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07005387 }
5388 // TODO(jbb): Figure out what to do if compressed_header_size > 16 bits.
5389 assert(compressed_header_size <= 0xffff);
Yaowu Xuf883b422016-08-30 14:01:10 -07005390 aom_wb_write_literal(&saved_wb, compressed_header_size, 16);
Thomas Davies80188d12016-10-26 16:08:35 -07005391#else
5392 data += data_size;
5393#endif
Alex Converseb0bbd602016-10-21 14:15:06 -07005394#if CONFIG_ANS && ANS_REVERSE
5395 // Avoid aliasing the superframe index
5396 *data++ = 0;
5397#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07005398 *size = data - dst;
5399}