Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1 | /* |
Yaowu Xu | 2ab7ff0 | 2016-09-02 12:04:54 -0700 | [diff] [blame] | 2 | * Copyright (c) 2016, Alliance for Open Media. All rights reserved |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3 | * |
Yaowu Xu | 2ab7ff0 | 2016-09-02 12:04:54 -0700 | [diff] [blame] | 4 | * This source code is subject to the terms of the BSD 2 Clause License and |
| 5 | * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License |
| 6 | * was not distributed with this source code in the LICENSE file, you can |
| 7 | * obtain it at www.aomedia.org/license/software. If the Alliance for Open |
| 8 | * Media Patent License 1.0 was not distributed with this source code in the |
| 9 | * PATENTS file, you can obtain it at www.aomedia.org/license/patent. |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 10 | */ |
| 11 | |
| 12 | #include <assert.h> |
| 13 | #include <limits.h> |
| 14 | #include <stdio.h> |
| 15 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 16 | #include "aom/aom_encoder.h" |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 17 | #include "aom_dsp/bitwriter_buffer.h" |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 18 | #include "aom_dsp/aom_dsp_common.h" |
| 19 | #include "aom_mem/aom_mem.h" |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 20 | #include "aom_ports/mem_ops.h" |
| 21 | #include "aom_ports/system_state.h" |
Angie Chiang | 6062a8b | 2016-09-21 16:01:04 -0700 | [diff] [blame] | 22 | #if CONFIG_BITSTREAM_DEBUG |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 23 | #include "aom_util/debug_util.h" |
Angie Chiang | 6062a8b | 2016-09-21 16:01:04 -0700 | [diff] [blame] | 24 | #endif // CONFIG_BITSTREAM_DEBUG |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 25 | |
| 26 | #if CONFIG_CLPF |
| 27 | #include "av1/common/clpf.h" |
| 28 | #endif |
| 29 | #if CONFIG_DERING |
| 30 | #include "av1/common/dering.h" |
| 31 | #endif // CONFIG_DERING |
| 32 | #include "av1/common/entropy.h" |
| 33 | #include "av1/common/entropymode.h" |
| 34 | #include "av1/common/entropymv.h" |
| 35 | #include "av1/common/mvref_common.h" |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 36 | #include "av1/common/odintrin.h" |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 37 | #include "av1/common/pred_common.h" |
| 38 | #include "av1/common/reconinter.h" |
hui su | 45dc597 | 2016-12-08 17:42:50 -0800 | [diff] [blame] | 39 | #if CONFIG_EXT_INTRA |
| 40 | #include "av1/common/reconintra.h" |
| 41 | #endif // CONFIG_EXT_INTRA |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 42 | #include "av1/common/seg_common.h" |
| 43 | #include "av1/common/tile_common.h" |
| 44 | |
| 45 | #if CONFIG_ANS |
Alex Converse | 1ac1ae7 | 2016-09-17 15:11:16 -0700 | [diff] [blame] | 46 | #include "aom_dsp/buf_ans.h" |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 47 | #endif // CONFIG_ANS |
| 48 | #include "av1/encoder/bitstream.h" |
| 49 | #include "av1/encoder/cost.h" |
| 50 | #include "av1/encoder/encodemv.h" |
| 51 | #include "av1/encoder/mcomp.h" |
| 52 | #include "av1/encoder/segmentation.h" |
| 53 | #include "av1/encoder/subexp.h" |
| 54 | #include "av1/encoder/tokenize.h" |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 55 | #if CONFIG_PVQ |
| 56 | #include "av1/encoder/pvq_encoder.h" |
| 57 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 58 | |
Nathan E. Egge | 3c05679 | 2016-05-20 08:58:44 -0400 | [diff] [blame] | 59 | static struct av1_token intra_mode_encodings[INTRA_MODES]; |
| 60 | static struct av1_token switchable_interp_encodings[SWITCHABLE_FILTERS]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 61 | #if CONFIG_EXT_PARTITION_TYPES |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 62 | static const struct av1_token ext_partition_encodings[EXT_PARTITION_TYPES] = { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 63 | { 0, 1 }, { 4, 3 }, { 12, 4 }, { 7, 3 }, |
| 64 | { 10, 4 }, { 11, 4 }, { 26, 5 }, { 27, 5 } |
| 65 | }; |
| 66 | #endif |
Nathan E. Egge | 3c05679 | 2016-05-20 08:58:44 -0400 | [diff] [blame] | 67 | static struct av1_token partition_encodings[PARTITION_TYPES]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 68 | #if !CONFIG_REF_MV |
Nathan E. Egge | 3c05679 | 2016-05-20 08:58:44 -0400 | [diff] [blame] | 69 | static struct av1_token inter_mode_encodings[INTER_MODES]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 70 | #endif |
| 71 | #if CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 72 | static const struct av1_token |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 73 | inter_compound_mode_encodings[INTER_COMPOUND_MODES] = { |
| 74 | { 2, 2 }, { 50, 6 }, { 51, 6 }, { 24, 5 }, { 52, 6 }, |
| 75 | { 53, 6 }, { 54, 6 }, { 55, 6 }, { 0, 1 }, { 7, 3 } |
| 76 | }; |
| 77 | #endif // CONFIG_EXT_INTER |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 78 | #if CONFIG_PALETTE |
Urvang Joshi | 0b32597 | 2016-10-24 14:06:43 -0700 | [diff] [blame] | 79 | static struct av1_token palette_size_encodings[PALETTE_MAX_SIZE - 1]; |
| 80 | static struct av1_token palette_color_encodings[PALETTE_MAX_SIZE - 1] |
| 81 | [PALETTE_MAX_SIZE]; |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 82 | #endif // CONFIG_PALETTE |
Jingning Han | aae72a6 | 2016-10-25 15:35:29 -0700 | [diff] [blame] | 83 | static const struct av1_token tx_size_encodings[MAX_TX_DEPTH][TX_SIZES] = { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 84 | { { 0, 1 }, { 1, 1 } }, // Max tx_size is 8X8 |
| 85 | { { 0, 1 }, { 2, 2 }, { 3, 2 } }, // Max tx_size is 16X16 |
| 86 | { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 7, 3 } }, // Max tx_size is 32X32 |
Debargha Mukherjee | 25ed530 | 2016-11-22 12:13:41 -0800 | [diff] [blame] | 87 | #if CONFIG_TX64X64 |
| 88 | { { 0, 1 }, { 2, 2 }, { 6, 3 }, { 14, 4 }, { 15, 4 } }, // Max tx_size 64X64 |
| 89 | #endif // CONFIG_TX64X64 |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 90 | }; |
| 91 | |
hui su | 5db9743 | 2016-10-14 16:10:14 -0700 | [diff] [blame] | 92 | #if CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 93 | static INLINE void write_uniform(aom_writer *w, int n, int v) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 94 | int l = get_unsigned_bits(n); |
| 95 | int m = (1 << l) - n; |
| 96 | if (l == 0) return; |
| 97 | if (v < m) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 98 | aom_write_literal(w, v, l - 1); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 99 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 100 | aom_write_literal(w, m + ((v - m) >> 1), l - 1); |
| 101 | aom_write_literal(w, (v - m) & 1, 1); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 102 | } |
| 103 | } |
hui su | 5db9743 | 2016-10-14 16:10:14 -0700 | [diff] [blame] | 104 | #endif // CONFIG_EXT_INTRA || CONFIG_FILTER_INTRA || CONFIG_PALETTE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 105 | |
| 106 | #if CONFIG_EXT_TX |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 107 | static struct av1_token ext_tx_inter_encodings[EXT_TX_SETS_INTER][TX_TYPES]; |
| 108 | static struct av1_token ext_tx_intra_encodings[EXT_TX_SETS_INTRA][TX_TYPES]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 109 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 110 | static struct av1_token ext_tx_encodings[TX_TYPES]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 111 | #endif // CONFIG_EXT_TX |
| 112 | #if CONFIG_GLOBAL_MOTION |
David Barker | cf3d0b0 | 2016-11-10 10:14:49 +0000 | [diff] [blame] | 113 | static struct av1_token global_motion_types_encodings[GLOBAL_TRANS_TYPES]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 114 | #endif // CONFIG_GLOBAL_MOTION |
| 115 | #if CONFIG_EXT_INTRA |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 116 | #if CONFIG_INTRA_INTERP |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 117 | static struct av1_token intra_filter_encodings[INTRA_FILTERS]; |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 118 | #endif // CONFIG_INTRA_INTERP |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 119 | #endif // CONFIG_EXT_INTRA |
| 120 | #if CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 121 | static struct av1_token interintra_mode_encodings[INTERINTRA_MODES]; |
Sarah Parker | 6fddd18 | 2016-11-10 20:57:20 -0800 | [diff] [blame] | 122 | static struct av1_token compound_type_encodings[COMPOUND_TYPES]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 123 | #endif // CONFIG_EXT_INTER |
Yue Chen | cb60b18 | 2016-10-13 15:18:22 -0700 | [diff] [blame] | 124 | #if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION |
| 125 | static struct av1_token motion_mode_encodings[MOTION_MODES]; |
| 126 | #endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 127 | #if CONFIG_LOOP_RESTORATION |
| 128 | static struct av1_token switchable_restore_encodings[RESTORE_SWITCHABLE_TYPES]; |
| 129 | #endif // CONFIG_LOOP_RESTORATION |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 130 | static void write_uncompressed_header(AV1_COMP *cpi, |
| 131 | struct aom_write_bit_buffer *wb); |
| 132 | static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 133 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 134 | void av1_encode_token_init(void) { |
Urvang Joshi | 0b32597 | 2016-10-24 14:06:43 -0700 | [diff] [blame] | 135 | #if CONFIG_EXT_TX || CONFIG_PALETTE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 136 | int s; |
Urvang Joshi | 0b32597 | 2016-10-24 14:06:43 -0700 | [diff] [blame] | 137 | #endif // CONFIG_EXT_TX || CONFIG_PALETTE |
| 138 | #if CONFIG_EXT_TX |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 139 | for (s = 1; s < EXT_TX_SETS_INTER; ++s) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 140 | av1_tokens_from_tree(ext_tx_inter_encodings[s], av1_ext_tx_inter_tree[s]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 141 | } |
| 142 | for (s = 1; s < EXT_TX_SETS_INTRA; ++s) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 143 | av1_tokens_from_tree(ext_tx_intra_encodings[s], av1_ext_tx_intra_tree[s]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 144 | } |
| 145 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 146 | av1_tokens_from_tree(ext_tx_encodings, av1_ext_tx_tree); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 147 | #endif // CONFIG_EXT_TX |
Nathan E. Egge | 3c05679 | 2016-05-20 08:58:44 -0400 | [diff] [blame] | 148 | av1_tokens_from_tree(intra_mode_encodings, av1_intra_mode_tree); |
| 149 | av1_tokens_from_tree(switchable_interp_encodings, av1_switchable_interp_tree); |
| 150 | av1_tokens_from_tree(partition_encodings, av1_partition_tree); |
| 151 | #if !CONFIG_REF_MV |
| 152 | av1_tokens_from_tree(inter_mode_encodings, av1_inter_mode_tree); |
| 153 | #endif |
| 154 | |
Urvang Joshi | 0b32597 | 2016-10-24 14:06:43 -0700 | [diff] [blame] | 155 | #if CONFIG_PALETTE |
| 156 | av1_tokens_from_tree(palette_size_encodings, av1_palette_size_tree); |
| 157 | for (s = 0; s < PALETTE_MAX_SIZE - 1; ++s) { |
| 158 | av1_tokens_from_tree(palette_color_encodings[s], av1_palette_color_tree[s]); |
| 159 | } |
| 160 | #endif // CONFIG_PALETTE |
| 161 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 162 | #if CONFIG_EXT_INTRA |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 163 | #if CONFIG_INTRA_INTERP |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 164 | av1_tokens_from_tree(intra_filter_encodings, av1_intra_filter_tree); |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 165 | #endif // CONFIG_INTRA_INTERP |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 166 | #endif // CONFIG_EXT_INTRA |
| 167 | #if CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 168 | av1_tokens_from_tree(interintra_mode_encodings, av1_interintra_mode_tree); |
Sarah Parker | 6fddd18 | 2016-11-10 20:57:20 -0800 | [diff] [blame] | 169 | av1_tokens_from_tree(compound_type_encodings, av1_compound_type_tree); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 170 | #endif // CONFIG_EXT_INTER |
Yue Chen | cb60b18 | 2016-10-13 15:18:22 -0700 | [diff] [blame] | 171 | #if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION |
| 172 | av1_tokens_from_tree(motion_mode_encodings, av1_motion_mode_tree); |
| 173 | #endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 174 | #if CONFIG_GLOBAL_MOTION |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 175 | av1_tokens_from_tree(global_motion_types_encodings, |
| 176 | av1_global_motion_types_tree); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 177 | #endif // CONFIG_GLOBAL_MOTION |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 178 | #if CONFIG_LOOP_RESTORATION |
| 179 | av1_tokens_from_tree(switchable_restore_encodings, |
| 180 | av1_switchable_restore_tree); |
| 181 | #endif // CONFIG_LOOP_RESTORATION |
Nathan E. Egge | 4947c29 | 2016-04-26 11:37:06 -0400 | [diff] [blame] | 182 | |
Nathan E. Egge | dfa33f2 | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 183 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 4947c29 | 2016-04-26 11:37:06 -0400 | [diff] [blame] | 184 | /* This hack is necessary when CONFIG_EXT_INTERP is enabled because the five |
| 185 | SWITCHABLE_FILTERS are not consecutive, e.g., 0, 1, 2, 3, 4, when doing |
| 186 | an in-order traversal of the av1_switchable_interp_tree structure. */ |
| 187 | av1_indices_from_tree(av1_switchable_interp_ind, av1_switchable_interp_inv, |
| 188 | SWITCHABLE_FILTERS, av1_switchable_interp_tree); |
Debargha Mukherjee | 8f209a8 | 2016-10-12 10:47:01 -0700 | [diff] [blame] | 189 | /* This hack is necessary because the four TX_TYPES are not consecutive, |
| 190 | e.g., 0, 1, 2, 3, when doing an in-order traversal of the av1_ext_tx_tree |
| 191 | structure. */ |
David Barker | f541932 | 2016-11-10 12:04:21 +0000 | [diff] [blame] | 192 | #if !CONFIG_EXT_TX |
Nathan E. Egge | 7c5b4c1 | 2016-04-26 12:31:14 -0400 | [diff] [blame] | 193 | av1_indices_from_tree(av1_ext_tx_ind, av1_ext_tx_inv, TX_TYPES, |
| 194 | av1_ext_tx_tree); |
David Barker | f541932 | 2016-11-10 12:04:21 +0000 | [diff] [blame] | 195 | #endif |
Nathan E. Egge | 3ef926e | 2016-09-07 18:20:41 -0400 | [diff] [blame] | 196 | av1_indices_from_tree(av1_intra_mode_ind, av1_intra_mode_inv, INTRA_MODES, |
| 197 | av1_intra_mode_tree); |
Nathan E. Egge | 6ec4d10 | 2016-09-08 10:41:20 -0400 | [diff] [blame] | 198 | av1_indices_from_tree(av1_inter_mode_ind, av1_inter_mode_inv, INTER_MODES, |
| 199 | av1_inter_mode_tree); |
Nathan E. Egge | 4947c29 | 2016-04-26 11:37:06 -0400 | [diff] [blame] | 200 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 201 | } |
| 202 | |
Nathan E. Egge | 380cb1a | 2016-09-08 10:13:42 -0400 | [diff] [blame] | 203 | #if !CONFIG_DAALA_EC |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 204 | static void write_intra_mode(aom_writer *w, PREDICTION_MODE mode, |
| 205 | const aom_prob *probs) { |
| 206 | av1_write_token(w, av1_intra_mode_tree, probs, &intra_mode_encodings[mode]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 207 | } |
Nathan E. Egge | 380cb1a | 2016-09-08 10:13:42 -0400 | [diff] [blame] | 208 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 209 | |
| 210 | #if CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 211 | static void write_interintra_mode(aom_writer *w, INTERINTRA_MODE mode, |
| 212 | const aom_prob *probs) { |
| 213 | av1_write_token(w, av1_interintra_mode_tree, probs, |
| 214 | &interintra_mode_encodings[mode]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 215 | } |
| 216 | #endif // CONFIG_EXT_INTER |
| 217 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 218 | static void write_inter_mode(AV1_COMMON *cm, aom_writer *w, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 219 | PREDICTION_MODE mode, |
| 220 | #if CONFIG_REF_MV && CONFIG_EXT_INTER |
| 221 | int is_compound, |
| 222 | #endif // CONFIG_REF_MV && CONFIG_EXT_INTER |
| 223 | const int16_t mode_ctx) { |
| 224 | #if CONFIG_REF_MV |
| 225 | const int16_t newmv_ctx = mode_ctx & NEWMV_CTX_MASK; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 226 | const aom_prob newmv_prob = cm->fc->newmv_prob[newmv_ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 227 | #if CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 228 | aom_write(w, mode != NEWMV && mode != NEWFROMNEARMV, newmv_prob); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 229 | |
| 230 | if (!is_compound && (mode == NEWMV || mode == NEWFROMNEARMV)) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 231 | aom_write(w, mode == NEWFROMNEARMV, cm->fc->new2mv_prob); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 232 | |
| 233 | if (mode != NEWMV && mode != NEWFROMNEARMV) { |
| 234 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 235 | aom_write(w, mode != NEWMV, newmv_prob); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 236 | |
| 237 | if (mode != NEWMV) { |
| 238 | #endif // CONFIG_EXT_INTER |
| 239 | const int16_t zeromv_ctx = (mode_ctx >> ZEROMV_OFFSET) & ZEROMV_CTX_MASK; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 240 | const aom_prob zeromv_prob = cm->fc->zeromv_prob[zeromv_ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 241 | |
| 242 | if (mode_ctx & (1 << ALL_ZERO_FLAG_OFFSET)) { |
| 243 | assert(mode == ZEROMV); |
| 244 | return; |
| 245 | } |
| 246 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 247 | aom_write(w, mode != ZEROMV, zeromv_prob); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 248 | |
| 249 | if (mode != ZEROMV) { |
| 250 | int16_t refmv_ctx = (mode_ctx >> REFMV_OFFSET) & REFMV_CTX_MASK; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 251 | aom_prob refmv_prob; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 252 | |
| 253 | if (mode_ctx & (1 << SKIP_NEARESTMV_OFFSET)) refmv_ctx = 6; |
| 254 | if (mode_ctx & (1 << SKIP_NEARMV_OFFSET)) refmv_ctx = 7; |
| 255 | if (mode_ctx & (1 << SKIP_NEARESTMV_SUB8X8_OFFSET)) refmv_ctx = 8; |
| 256 | |
| 257 | refmv_prob = cm->fc->refmv_prob[refmv_ctx]; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 258 | aom_write(w, mode != NEARESTMV, refmv_prob); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 259 | } |
| 260 | } |
| 261 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 262 | assert(is_inter_mode(mode)); |
Nathan E. Egge | a59b23d | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 263 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 6ec4d10 | 2016-09-08 10:41:20 -0400 | [diff] [blame] | 264 | aom_write_symbol(w, av1_inter_mode_ind[INTER_OFFSET(mode)], |
| 265 | cm->fc->inter_mode_cdf[mode_ctx], INTER_MODES); |
| 266 | #else |
| 267 | { |
| 268 | const aom_prob *const inter_probs = cm->fc->inter_mode_probs[mode_ctx]; |
| 269 | av1_write_token(w, av1_inter_mode_tree, inter_probs, |
| 270 | &inter_mode_encodings[INTER_OFFSET(mode)]); |
| 271 | } |
| 272 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 273 | #endif |
| 274 | } |
| 275 | |
| 276 | #if CONFIG_REF_MV |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 277 | static void write_drl_idx(const AV1_COMMON *cm, const MB_MODE_INFO *mbmi, |
| 278 | const MB_MODE_INFO_EXT *mbmi_ext, aom_writer *w) { |
| 279 | uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 280 | |
| 281 | assert(mbmi->ref_mv_idx < 3); |
| 282 | |
| 283 | if (mbmi->mode == NEWMV) { |
| 284 | int idx; |
| 285 | for (idx = 0; idx < 2; ++idx) { |
| 286 | if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) { |
| 287 | uint8_t drl_ctx = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 288 | av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx); |
| 289 | aom_prob drl_prob = cm->fc->drl_prob[drl_ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 290 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 291 | aom_write(w, mbmi->ref_mv_idx != idx, drl_prob); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 292 | if (mbmi->ref_mv_idx == idx) return; |
| 293 | } |
| 294 | } |
| 295 | return; |
| 296 | } |
| 297 | |
| 298 | if (mbmi->mode == NEARMV) { |
| 299 | int idx; |
| 300 | // TODO(jingning): Temporary solution to compensate the NEARESTMV offset. |
| 301 | for (idx = 1; idx < 3; ++idx) { |
| 302 | if (mbmi_ext->ref_mv_count[ref_frame_type] > idx + 1) { |
| 303 | uint8_t drl_ctx = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 304 | av1_drl_ctx(mbmi_ext->ref_mv_stack[ref_frame_type], idx); |
| 305 | aom_prob drl_prob = cm->fc->drl_prob[drl_ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 306 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 307 | aom_write(w, mbmi->ref_mv_idx != (idx - 1), drl_prob); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 308 | if (mbmi->ref_mv_idx == (idx - 1)) return; |
| 309 | } |
| 310 | } |
| 311 | return; |
| 312 | } |
| 313 | } |
| 314 | #endif |
| 315 | |
| 316 | #if CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 317 | static void write_inter_compound_mode(AV1_COMMON *cm, aom_writer *w, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 318 | PREDICTION_MODE mode, |
| 319 | const int16_t mode_ctx) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 320 | const aom_prob *const inter_compound_probs = |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 321 | cm->fc->inter_compound_mode_probs[mode_ctx]; |
| 322 | |
| 323 | assert(is_inter_compound_mode(mode)); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 324 | av1_write_token(w, av1_inter_compound_mode_tree, inter_compound_probs, |
| 325 | &inter_compound_mode_encodings[INTER_COMPOUND_OFFSET(mode)]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 326 | } |
| 327 | #endif // CONFIG_EXT_INTER |
| 328 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 329 | static void encode_unsigned_max(struct aom_write_bit_buffer *wb, int data, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 330 | int max) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 331 | aom_wb_write_literal(wb, data, get_unsigned_bits(max)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 332 | } |
| 333 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 334 | static void prob_diff_update(const aom_tree_index *tree, |
| 335 | aom_prob probs[/*n - 1*/], |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 336 | const unsigned int counts[/*n - 1*/], int n, |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 337 | int probwt, aom_writer *w) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 338 | int i; |
| 339 | unsigned int branch_ct[32][2]; |
| 340 | |
| 341 | // Assuming max number of probabilities <= 32 |
| 342 | assert(n <= 32); |
| 343 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 344 | av1_tree_probs_from_distribution(tree, branch_ct, counts); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 345 | for (i = 0; i < n - 1; ++i) |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 346 | av1_cond_prob_diff_update(w, &probs[i], branch_ct[i], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 347 | } |
Yaowu Xu | 17fd2f2 | 2016-11-17 18:23:28 -0800 | [diff] [blame] | 348 | #if CONFIG_EXT_INTER || CONFIG_EXT_TX || !CONFIG_EC_ADAPT |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 349 | static int prob_diff_update_savings(const aom_tree_index *tree, |
| 350 | aom_prob probs[/*n - 1*/], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 351 | const unsigned int counts[/*n - 1*/], int n, |
| 352 | int probwt) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 353 | int i; |
| 354 | unsigned int branch_ct[32][2]; |
| 355 | int savings = 0; |
| 356 | |
| 357 | // Assuming max number of probabilities <= 32 |
| 358 | assert(n <= 32); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 359 | av1_tree_probs_from_distribution(tree, branch_ct, counts); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 360 | for (i = 0; i < n - 1; ++i) { |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 361 | savings += |
| 362 | av1_cond_prob_diff_update_savings(&probs[i], branch_ct[i], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 363 | } |
| 364 | return savings; |
| 365 | } |
Yaowu Xu | 17fd2f2 | 2016-11-17 18:23:28 -0800 | [diff] [blame] | 366 | #endif // CONFIG_EXT_INTER || CONFIG_EXT_TX || !CONFIG_EC_ADAPT |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 367 | |
| 368 | #if CONFIG_VAR_TX |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 369 | static void write_tx_size_vartx(const AV1_COMMON *cm, const MACROBLOCKD *xd, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 370 | const MB_MODE_INFO *mbmi, TX_SIZE tx_size, |
Jingning Han | 94d5bfc | 2016-10-21 10:14:36 -0700 | [diff] [blame] | 371 | int depth, int blk_row, int blk_col, |
| 372 | aom_writer *w) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 373 | const int tx_row = blk_row >> 1; |
| 374 | const int tx_col = blk_col >> 1; |
Jingning Han | f65b870 | 2016-10-31 12:13:20 -0700 | [diff] [blame] | 375 | const int max_blocks_high = max_block_high(xd, mbmi->sb_type, 0); |
| 376 | const int max_blocks_wide = max_block_wide(xd, mbmi->sb_type, 0); |
| 377 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 378 | int ctx = txfm_partition_context(xd->above_txfm_context + tx_col, |
Jingning Han | c8b8936 | 2016-11-01 10:28:53 -0700 | [diff] [blame] | 379 | xd->left_txfm_context + tx_row, |
| 380 | mbmi->sb_type, tx_size); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 381 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 382 | if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return; |
| 383 | |
Jingning Han | 571189c | 2016-10-24 10:38:43 -0700 | [diff] [blame] | 384 | if (depth == MAX_VARTX_DEPTH) { |
Jingning Han | 94d5bfc | 2016-10-21 10:14:36 -0700 | [diff] [blame] | 385 | txfm_partition_update(xd->above_txfm_context + tx_col, |
| 386 | xd->left_txfm_context + tx_row, tx_size); |
| 387 | return; |
| 388 | } |
| 389 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 390 | if (tx_size == mbmi->inter_tx_size[tx_row][tx_col]) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 391 | aom_write(w, 0, cm->fc->txfm_partition_prob[ctx]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 392 | txfm_partition_update(xd->above_txfm_context + tx_col, |
| 393 | xd->left_txfm_context + tx_row, tx_size); |
| 394 | } else { |
Jingning Han | f64062f | 2016-11-02 16:22:18 -0700 | [diff] [blame] | 395 | const TX_SIZE sub_txs = sub_tx_size_map[tx_size]; |
| 396 | const int bsl = tx_size_wide_unit[sub_txs]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 397 | int i; |
Jingning Han | f64062f | 2016-11-02 16:22:18 -0700 | [diff] [blame] | 398 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 399 | aom_write(w, 1, cm->fc->txfm_partition_prob[ctx]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 400 | |
| 401 | if (tx_size == TX_8X8) { |
| 402 | txfm_partition_update(xd->above_txfm_context + tx_col, |
| 403 | xd->left_txfm_context + tx_row, TX_4X4); |
| 404 | return; |
| 405 | } |
| 406 | |
| 407 | assert(bsl > 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 408 | for (i = 0; i < 4; ++i) { |
Jingning Han | f64062f | 2016-11-02 16:22:18 -0700 | [diff] [blame] | 409 | int offsetr = blk_row + (i >> 1) * bsl; |
| 410 | int offsetc = blk_col + (i & 0x01) * bsl; |
| 411 | write_tx_size_vartx(cm, xd, mbmi, sub_txs, depth + 1, offsetr, offsetc, |
| 412 | w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 413 | } |
| 414 | } |
| 415 | } |
| 416 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 417 | static void update_txfm_partition_probs(AV1_COMMON *cm, aom_writer *w, |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 418 | FRAME_COUNTS *counts, int probwt) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 419 | int k; |
| 420 | for (k = 0; k < TXFM_PARTITION_CONTEXTS; ++k) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 421 | av1_cond_prob_diff_update(w, &cm->fc->txfm_partition_prob[k], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 422 | counts->txfm_partition[k], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 423 | } |
| 424 | #endif |
| 425 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 426 | static void write_selected_tx_size(const AV1_COMMON *cm, const MACROBLOCKD *xd, |
| 427 | aom_writer *w) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 428 | const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi; |
| 429 | const BLOCK_SIZE bsize = mbmi->sb_type; |
| 430 | // For sub8x8 blocks the tx_size symbol does not need to be sent |
| 431 | if (bsize >= BLOCK_8X8) { |
| 432 | const TX_SIZE tx_size = mbmi->tx_size; |
| 433 | const int is_inter = is_inter_block(mbmi); |
| 434 | const int tx_size_ctx = get_tx_size_context(xd); |
| 435 | const int tx_size_cat = is_inter ? inter_tx_size_cat_lookup[bsize] |
| 436 | : intra_tx_size_cat_lookup[bsize]; |
| 437 | const TX_SIZE coded_tx_size = txsize_sqr_up_map[tx_size]; |
Jingning Han | 4e1737a | 2016-10-25 16:05:02 -0700 | [diff] [blame] | 438 | const int depth = tx_size_to_depth(coded_tx_size); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 439 | |
| 440 | #if CONFIG_EXT_TX && CONFIG_RECT_TX |
Yue Chen | 49587a7 | 2016-09-28 17:09:47 -0700 | [diff] [blame] | 441 | assert(IMPLIES(is_rect_tx(tx_size), is_rect_tx_allowed(xd, mbmi))); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 442 | assert( |
| 443 | IMPLIES(is_rect_tx(tx_size), tx_size == max_txsize_rect_lookup[bsize])); |
| 444 | #endif // CONFIG_EXT_TX && CONFIG_RECT_TX |
| 445 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 446 | av1_write_token(w, av1_tx_size_tree[tx_size_cat], |
| 447 | cm->fc->tx_size_probs[tx_size_cat][tx_size_ctx], |
Jingning Han | 4e1737a | 2016-10-25 16:05:02 -0700 | [diff] [blame] | 448 | &tx_size_encodings[tx_size_cat][depth]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 449 | } |
| 450 | } |
| 451 | |
| 452 | #if CONFIG_REF_MV |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 453 | static void update_inter_mode_probs(AV1_COMMON *cm, aom_writer *w, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 454 | FRAME_COUNTS *counts) { |
| 455 | int i; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 456 | #if CONFIG_TILE_GROUPS |
| 457 | const int probwt = cm->num_tg; |
| 458 | #else |
| 459 | const int probwt = 1; |
| 460 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 461 | for (i = 0; i < NEWMV_MODE_CONTEXTS; ++i) |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 462 | av1_cond_prob_diff_update(w, &cm->fc->newmv_prob[i], counts->newmv_mode[i], |
| 463 | probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 464 | for (i = 0; i < ZEROMV_MODE_CONTEXTS; ++i) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 465 | av1_cond_prob_diff_update(w, &cm->fc->zeromv_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 466 | counts->zeromv_mode[i], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 467 | for (i = 0; i < REFMV_MODE_CONTEXTS; ++i) |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 468 | av1_cond_prob_diff_update(w, &cm->fc->refmv_prob[i], counts->refmv_mode[i], |
| 469 | probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 470 | for (i = 0; i < DRL_MODE_CONTEXTS; ++i) |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 471 | av1_cond_prob_diff_update(w, &cm->fc->drl_prob[i], counts->drl_mode[i], |
| 472 | probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 473 | #if CONFIG_EXT_INTER |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 474 | av1_cond_prob_diff_update(w, &cm->fc->new2mv_prob, counts->new2mv_mode, |
| 475 | probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 476 | #endif // CONFIG_EXT_INTER |
| 477 | } |
| 478 | #endif |
| 479 | |
| 480 | #if CONFIG_EXT_INTER |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 481 | static void update_inter_compound_mode_probs(AV1_COMMON *cm, int probwt, |
| 482 | aom_writer *w) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 483 | const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) - |
| 484 | av1_cost_zero(GROUP_DIFF_UPDATE_PROB); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 485 | int i; |
| 486 | int savings = 0; |
| 487 | int do_update = 0; |
| 488 | for (i = 0; i < INTER_MODE_CONTEXTS; ++i) { |
| 489 | savings += prob_diff_update_savings( |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 490 | av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 491 | cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES, probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 492 | } |
| 493 | do_update = savings > savings_thresh; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 494 | aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 495 | if (do_update) { |
| 496 | for (i = 0; i < INTER_MODE_CONTEXTS; ++i) { |
| 497 | prob_diff_update( |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 498 | av1_inter_compound_mode_tree, cm->fc->inter_compound_mode_probs[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 499 | cm->counts.inter_compound_mode[i], INTER_COMPOUND_MODES, probwt, w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 500 | } |
| 501 | } |
| 502 | } |
| 503 | #endif // CONFIG_EXT_INTER |
| 504 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 505 | static int write_skip(const AV1_COMMON *cm, const MACROBLOCKD *xd, |
| 506 | int segment_id, const MODE_INFO *mi, aom_writer *w) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 507 | if (segfeature_active(&cm->seg, segment_id, SEG_LVL_SKIP)) { |
| 508 | return 1; |
| 509 | } else { |
| 510 | const int skip = mi->mbmi.skip; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 511 | aom_write(w, skip, av1_get_skip_prob(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 512 | return skip; |
| 513 | } |
| 514 | } |
| 515 | |
Yue Chen | 69f18e1 | 2016-09-08 14:48:15 -0700 | [diff] [blame] | 516 | #if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION |
| 517 | static void write_motion_mode(const AV1_COMMON *cm, const MB_MODE_INFO *mbmi, |
| 518 | aom_writer *w) { |
| 519 | MOTION_MODE last_motion_mode_allowed = motion_mode_allowed(mbmi); |
| 520 | |
| 521 | if (last_motion_mode_allowed == SIMPLE_TRANSLATION) return; |
| 522 | #if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION |
| 523 | if (last_motion_mode_allowed == OBMC_CAUSAL) { |
| 524 | aom_write(w, mbmi->motion_mode == OBMC_CAUSAL, |
| 525 | cm->fc->obmc_prob[mbmi->sb_type]); |
| 526 | } else { |
| 527 | #endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION |
| 528 | av1_write_token(w, av1_motion_mode_tree, |
| 529 | cm->fc->motion_mode_prob[mbmi->sb_type], |
| 530 | &motion_mode_encodings[mbmi->motion_mode]); |
| 531 | #if CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION |
| 532 | } |
| 533 | #endif // CONFIG_MOTION_VAR && CONFIG_WARPED_MOTION |
| 534 | } |
| 535 | #endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION |
| 536 | |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 537 | #if CONFIG_DELTA_Q |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 538 | static void write_delta_qindex(const AV1_COMMON *cm, int delta_qindex, |
| 539 | aom_writer *w) { |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 540 | int sign = delta_qindex < 0; |
| 541 | int abs = sign ? -delta_qindex : delta_qindex; |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 542 | int rem_bits, thr, i = 0; |
| 543 | int smallval = abs < DELTA_Q_SMALL ? 1 : 0; |
| 544 | |
| 545 | while (i < DELTA_Q_SMALL && i <= abs) { |
| 546 | int bit = (i < abs); |
| 547 | aom_write(w, bit, cm->fc->delta_q_prob[i]); |
| 548 | i++; |
| 549 | } |
| 550 | |
| 551 | if (!smallval) { |
| 552 | rem_bits = OD_ILOG_NZ(abs - 1) - 1; |
| 553 | thr = (1 << rem_bits) + 1; |
| 554 | aom_write_literal(w, rem_bits, 3); |
| 555 | aom_write_literal(w, abs - thr, rem_bits); |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 556 | } |
| 557 | if (abs > 0) { |
| 558 | aom_write_bit(w, sign); |
| 559 | } |
| 560 | } |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 561 | |
| 562 | static void update_delta_q_probs(AV1_COMMON *cm, aom_writer *w, |
| 563 | FRAME_COUNTS *counts) { |
| 564 | int k; |
Jingning Han | be44c5f | 2016-09-30 11:35:22 -0700 | [diff] [blame] | 565 | #if CONFIG_TILE_GROUPS |
| 566 | const int probwt = cm->num_tg; |
| 567 | #else |
| 568 | const int probwt = 1; |
| 569 | #endif |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 570 | for (k = 0; k < DELTA_Q_CONTEXTS; ++k) { |
Jingning Han | be44c5f | 2016-09-30 11:35:22 -0700 | [diff] [blame] | 571 | av1_cond_prob_diff_update(w, &cm->fc->delta_q_prob[k], counts->delta_q[k], |
| 572 | probwt); |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 573 | } |
| 574 | } |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 575 | #endif |
| 576 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 577 | static void update_skip_probs(AV1_COMMON *cm, aom_writer *w, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 578 | FRAME_COUNTS *counts) { |
| 579 | int k; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 580 | #if CONFIG_TILE_GROUPS |
| 581 | const int probwt = cm->num_tg; |
| 582 | #else |
| 583 | const int probwt = 1; |
| 584 | #endif |
| 585 | for (k = 0; k < SKIP_CONTEXTS; ++k) { |
| 586 | av1_cond_prob_diff_update(w, &cm->fc->skip_probs[k], counts->skip[k], |
| 587 | probwt); |
| 588 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 589 | } |
| 590 | |
Nathan E. Egge | baaaa16 | 2016-10-24 09:50:52 -0400 | [diff] [blame] | 591 | #if !CONFIG_EC_ADAPT |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 592 | static void update_switchable_interp_probs(AV1_COMMON *cm, aom_writer *w, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 593 | FRAME_COUNTS *counts) { |
| 594 | int j; |
Nathan E. Egge | 4947c29 | 2016-04-26 11:37:06 -0400 | [diff] [blame] | 595 | for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) { |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 596 | #if CONFIG_TILE_GROUPS |
| 597 | const int probwt = cm->num_tg; |
| 598 | #else |
| 599 | const int probwt = 1; |
| 600 | #endif |
| 601 | prob_diff_update( |
| 602 | av1_switchable_interp_tree, cm->fc->switchable_interp_prob[j], |
| 603 | counts->switchable_interp[j], SWITCHABLE_FILTERS, probwt, w); |
Nathan E. Egge | 4947c29 | 2016-04-26 11:37:06 -0400 | [diff] [blame] | 604 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 605 | } |
Thomas Davies | 6519beb | 2016-10-19 14:46:07 +0100 | [diff] [blame] | 606 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 607 | |
| 608 | #if CONFIG_EXT_TX |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 609 | static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) { |
| 610 | const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) - |
| 611 | av1_cost_zero(GROUP_DIFF_UPDATE_PROB); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 612 | int i, j; |
| 613 | int s; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 614 | #if CONFIG_TILE_GROUPS |
| 615 | const int probwt = cm->num_tg; |
| 616 | #else |
| 617 | const int probwt = 1; |
| 618 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 619 | for (s = 1; s < EXT_TX_SETS_INTER; ++s) { |
| 620 | int savings = 0; |
| 621 | int do_update = 0; |
| 622 | for (i = TX_4X4; i < EXT_TX_SIZES; ++i) { |
| 623 | if (!use_inter_ext_tx_for_txsize[s][i]) continue; |
| 624 | savings += prob_diff_update_savings( |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 625 | av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 626 | cm->counts.inter_ext_tx[s][i], num_ext_tx_set_inter[s], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 627 | } |
| 628 | do_update = savings > savings_thresh; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 629 | aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 630 | if (do_update) { |
| 631 | for (i = TX_4X4; i < EXT_TX_SIZES; ++i) { |
| 632 | if (!use_inter_ext_tx_for_txsize[s][i]) continue; |
| 633 | prob_diff_update( |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 634 | av1_ext_tx_inter_tree[s], cm->fc->inter_ext_tx_prob[s][i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 635 | cm->counts.inter_ext_tx[s][i], num_ext_tx_set_inter[s], probwt, w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 636 | } |
| 637 | } |
| 638 | } |
| 639 | |
| 640 | for (s = 1; s < EXT_TX_SETS_INTRA; ++s) { |
| 641 | int savings = 0; |
| 642 | int do_update = 0; |
| 643 | for (i = TX_4X4; i < EXT_TX_SIZES; ++i) { |
| 644 | if (!use_intra_ext_tx_for_txsize[s][i]) continue; |
| 645 | for (j = 0; j < INTRA_MODES; ++j) |
| 646 | savings += prob_diff_update_savings( |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 647 | av1_ext_tx_intra_tree[s], cm->fc->intra_ext_tx_prob[s][i][j], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 648 | cm->counts.intra_ext_tx[s][i][j], num_ext_tx_set_intra[s], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 649 | } |
| 650 | do_update = savings > savings_thresh; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 651 | aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 652 | if (do_update) { |
| 653 | for (i = TX_4X4; i < EXT_TX_SIZES; ++i) { |
| 654 | if (!use_intra_ext_tx_for_txsize[s][i]) continue; |
| 655 | for (j = 0; j < INTRA_MODES; ++j) |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 656 | prob_diff_update(av1_ext_tx_intra_tree[s], |
| 657 | cm->fc->intra_ext_tx_prob[s][i][j], |
| 658 | cm->counts.intra_ext_tx[s][i][j], |
| 659 | num_ext_tx_set_intra[s], probwt, w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 660 | } |
| 661 | } |
| 662 | } |
| 663 | } |
| 664 | |
| 665 | #else |
Nathan E. Egge | baaaa16 | 2016-10-24 09:50:52 -0400 | [diff] [blame] | 666 | #if !CONFIG_EC_ADAPT |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 667 | static void update_ext_tx_probs(AV1_COMMON *cm, aom_writer *w) { |
| 668 | const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) - |
| 669 | av1_cost_zero(GROUP_DIFF_UPDATE_PROB); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 670 | int i, j; |
| 671 | |
| 672 | int savings = 0; |
| 673 | int do_update = 0; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 674 | #if CONFIG_TILE_GROUPS |
| 675 | const int probwt = cm->num_tg; |
| 676 | #else |
| 677 | const int probwt = 1; |
| 678 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 679 | for (i = TX_4X4; i < EXT_TX_SIZES; ++i) { |
| 680 | for (j = 0; j < TX_TYPES; ++j) |
| 681 | savings += prob_diff_update_savings( |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 682 | av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 683 | cm->counts.intra_ext_tx[i][j], TX_TYPES, probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 684 | } |
| 685 | do_update = savings > savings_thresh; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 686 | aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 687 | if (do_update) { |
| 688 | for (i = TX_4X4; i < EXT_TX_SIZES; ++i) { |
Nathan E. Egge | 7c5b4c1 | 2016-04-26 12:31:14 -0400 | [diff] [blame] | 689 | for (j = 0; j < TX_TYPES; ++j) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 690 | prob_diff_update(av1_ext_tx_tree, cm->fc->intra_ext_tx_prob[i][j], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 691 | cm->counts.intra_ext_tx[i][j], TX_TYPES, probwt, w); |
Nathan E. Egge | 7c5b4c1 | 2016-04-26 12:31:14 -0400 | [diff] [blame] | 692 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 693 | } |
| 694 | } |
Thomas Davies | 6519beb | 2016-10-19 14:46:07 +0100 | [diff] [blame] | 695 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 696 | savings = 0; |
| 697 | for (i = TX_4X4; i < EXT_TX_SIZES; ++i) { |
| 698 | savings += |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 699 | prob_diff_update_savings(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 700 | cm->counts.inter_ext_tx[i], TX_TYPES, probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 701 | } |
| 702 | do_update = savings > savings_thresh; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 703 | aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 704 | if (do_update) { |
| 705 | for (i = TX_4X4; i < EXT_TX_SIZES; ++i) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 706 | prob_diff_update(av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 707 | cm->counts.inter_ext_tx[i], TX_TYPES, probwt, w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 708 | } |
| 709 | } |
| 710 | } |
| 711 | #endif // CONFIG_EXT_TX |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 712 | #endif |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 713 | #if CONFIG_PALETTE |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 714 | static void pack_palette_tokens(aom_writer *w, const TOKENEXTRA **tp, int n, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 715 | int num) { |
| 716 | int i; |
| 717 | const TOKENEXTRA *p = *tp; |
| 718 | |
| 719 | for (i = 0; i < num; ++i) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 720 | av1_write_token(w, av1_palette_color_tree[n - 2], p->context_tree, |
| 721 | &palette_color_encodings[n - 2][p->token]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 722 | ++p; |
| 723 | } |
| 724 | |
| 725 | *tp = p; |
| 726 | } |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 727 | #endif // CONFIG_PALETTE |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 728 | #if !CONFIG_PVQ |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 729 | #if CONFIG_SUPERTX |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 730 | static void update_supertx_probs(AV1_COMMON *cm, int probwt, aom_writer *w) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 731 | const int savings_thresh = av1_cost_one(GROUP_DIFF_UPDATE_PROB) - |
| 732 | av1_cost_zero(GROUP_DIFF_UPDATE_PROB); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 733 | int i, j; |
| 734 | int savings = 0; |
| 735 | int do_update = 0; |
| 736 | for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) { |
Jingning Han | 5b7706a | 2016-12-21 09:55:10 -0800 | [diff] [blame] | 737 | for (j = TX_8X8; j < TX_SIZES; ++j) { |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 738 | savings += av1_cond_prob_diff_update_savings( |
| 739 | &cm->fc->supertx_prob[i][j], cm->counts.supertx[i][j], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 740 | } |
| 741 | } |
| 742 | do_update = savings > savings_thresh; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 743 | aom_write(w, do_update, GROUP_DIFF_UPDATE_PROB); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 744 | if (do_update) { |
| 745 | for (i = 0; i < PARTITION_SUPERTX_CONTEXTS; ++i) { |
Jingning Han | 5b7706a | 2016-12-21 09:55:10 -0800 | [diff] [blame] | 746 | for (j = TX_8X8; j < TX_SIZES; ++j) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 747 | av1_cond_prob_diff_update(w, &cm->fc->supertx_prob[i][j], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 748 | cm->counts.supertx[i][j], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 749 | } |
| 750 | } |
| 751 | } |
| 752 | } |
| 753 | #endif // CONFIG_SUPERTX |
| 754 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 755 | static void pack_mb_tokens(aom_writer *w, const TOKENEXTRA **tp, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 756 | const TOKENEXTRA *const stop, |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 757 | aom_bit_depth_t bit_depth, const TX_SIZE tx_size, |
| 758 | TOKEN_STATS *token_stats) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 759 | const TOKENEXTRA *p = *tp; |
| 760 | #if CONFIG_VAR_TX |
| 761 | int count = 0; |
Jingning Han | 7e99297 | 2016-10-31 11:03:06 -0700 | [diff] [blame] | 762 | const int seg_eob = tx_size_2d[tx_size]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 763 | #endif |
Alex Converse | d8fdfaa | 2016-07-26 16:27:51 -0700 | [diff] [blame] | 764 | #if CONFIG_AOM_HIGHBITDEPTH |
| 765 | const av1_extra_bit *const extra_bits_table = |
| 766 | (bit_depth == AOM_BITS_12) |
| 767 | ? av1_extra_bits_high12 |
| 768 | : (bit_depth == AOM_BITS_10) ? av1_extra_bits_high10 : av1_extra_bits; |
| 769 | #else |
| 770 | const av1_extra_bit *const extra_bits_table = av1_extra_bits; |
| 771 | (void)bit_depth; |
| 772 | #endif // CONFIG_AOM_HIGHBITDEPTH |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 773 | |
| 774 | while (p < stop && p->token != EOSB_TOKEN) { |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 775 | const int token = p->token; |
| 776 | aom_tree_index index = 0; |
Alex Converse | aca9feb | 2016-10-10 11:08:10 -0700 | [diff] [blame] | 777 | #if !CONFIG_EC_MULTISYMBOL |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 778 | const struct av1_token *const coef_encoding = &av1_coef_encodings[token]; |
| 779 | int coef_value = coef_encoding->value; |
| 780 | int coef_length = coef_encoding->len; |
Alex Converse | aca9feb | 2016-10-10 11:08:10 -0700 | [diff] [blame] | 781 | #endif // !CONFIG_EC_MULTISYMBOL |
Alex Converse | d8fdfaa | 2016-07-26 16:27:51 -0700 | [diff] [blame] | 782 | const av1_extra_bit *const extra_bits = &extra_bits_table[token]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 783 | |
Alex Converse | aca9feb | 2016-10-10 11:08:10 -0700 | [diff] [blame] | 784 | #if CONFIG_EC_MULTISYMBOL |
Alex Converse | dc62b09 | 2016-10-11 16:50:56 -0700 | [diff] [blame] | 785 | /* skip one or two nodes */ |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 786 | if (!p->skip_eob_node) |
| 787 | aom_write_record(w, token != EOB_TOKEN, p->context_tree[0], token_stats); |
Alex Converse | dc62b09 | 2016-10-11 16:50:56 -0700 | [diff] [blame] | 788 | |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 789 | if (token != EOB_TOKEN) { |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 790 | aom_write_record(w, token != ZERO_TOKEN, p->context_tree[1], token_stats); |
Alex Converse | ea7e990 | 2016-10-12 12:53:40 -0700 | [diff] [blame] | 791 | |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 792 | if (token != ZERO_TOKEN) { |
| 793 | aom_write_symbol(w, token - ONE_TOKEN, *p->token_cdf, |
Alex Converse | a1ac972 | 2016-10-12 15:59:58 -0700 | [diff] [blame] | 794 | CATEGORY6_TOKEN - ONE_TOKEN + 1); |
Alex Converse | ea7e990 | 2016-10-12 12:53:40 -0700 | [diff] [blame] | 795 | } |
Alex Converse | dc62b09 | 2016-10-11 16:50:56 -0700 | [diff] [blame] | 796 | } |
| 797 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 798 | /* skip one or two nodes */ |
| 799 | if (p->skip_eob_node) |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 800 | coef_length -= p->skip_eob_node; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 801 | else |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 802 | aom_write_record(w, token != EOB_TOKEN, p->context_tree[0], token_stats); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 803 | |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 804 | if (token != EOB_TOKEN) { |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 805 | aom_write_record(w, token != ZERO_TOKEN, p->context_tree[1], token_stats); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 806 | |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 807 | if (token != ZERO_TOKEN) { |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 808 | aom_write_record(w, token != ONE_TOKEN, p->context_tree[2], |
| 809 | token_stats); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 810 | |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 811 | if (token != ONE_TOKEN) { |
| 812 | const int unconstrained_len = UNCONSTRAINED_NODES - p->skip_eob_node; |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 813 | aom_write_tree_record( |
| 814 | w, av1_coef_con_tree, |
| 815 | av1_pareto8_full[p->context_tree[PIVOT_NODE] - 1], coef_value, |
| 816 | coef_length - unconstrained_len, 0, token_stats); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 817 | } |
| 818 | } |
| 819 | } |
Alex Converse | aca9feb | 2016-10-10 11:08:10 -0700 | [diff] [blame] | 820 | #endif // CONFIG_EC_MULTISYMBOL |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 821 | |
Alex Converse | d8fdfaa | 2016-07-26 16:27:51 -0700 | [diff] [blame] | 822 | if (extra_bits->base_val) { |
| 823 | const int bit_string = p->extra; |
| 824 | const int bit_string_length = extra_bits->len; // Length of extra bits to |
| 825 | // be written excluding |
| 826 | // the sign bit. |
| 827 | int skip_bits = (extra_bits->base_val == CAT6_MIN_VAL) |
Jingning Han | 7e99297 | 2016-10-31 11:03:06 -0700 | [diff] [blame] | 828 | ? TX_SIZES - 1 - txsize_sqr_up_map[tx_size] |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 829 | : 0; |
| 830 | |
Alex Converse | d8fdfaa | 2016-07-26 16:27:51 -0700 | [diff] [blame] | 831 | if (bit_string_length > 0) { |
| 832 | const unsigned char *pb = extra_bits->prob; |
| 833 | const int value = bit_string >> 1; |
| 834 | const int num_bits = bit_string_length; // number of bits in value |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 835 | assert(num_bits > 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 836 | |
Alex Converse | 81fd890 | 2016-07-26 15:35:42 -0700 | [diff] [blame] | 837 | for (index = 0; index < num_bits; ++index) { |
| 838 | const int shift = num_bits - index - 1; |
| 839 | const int bb = (value >> shift) & 1; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 840 | if (skip_bits) { |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 841 | --skip_bits; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 842 | assert(!bb); |
| 843 | } else { |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 844 | aom_write_record(w, bb, pb[index], token_stats); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 845 | } |
Alex Converse | 81fd890 | 2016-07-26 15:35:42 -0700 | [diff] [blame] | 846 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 847 | } |
| 848 | |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 849 | aom_write_bit_record(w, bit_string & 1, token_stats); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 850 | } |
| 851 | ++p; |
| 852 | |
| 853 | #if CONFIG_VAR_TX |
| 854 | ++count; |
Urvang Joshi | 454280d | 2016-10-14 16:51:44 -0700 | [diff] [blame] | 855 | if (token == EOB_TOKEN || count == seg_eob) break; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 856 | #endif |
| 857 | } |
| 858 | |
| 859 | *tp = p; |
| 860 | } |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 861 | #endif // !CONFIG_PVG |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 862 | #if CONFIG_VAR_TX |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 863 | static void pack_txb_tokens(aom_writer *w, const TOKENEXTRA **tp, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 864 | const TOKENEXTRA *const tok_end, MACROBLOCKD *xd, |
| 865 | MB_MODE_INFO *mbmi, int plane, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 866 | BLOCK_SIZE plane_bsize, aom_bit_depth_t bit_depth, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 867 | int block, int blk_row, int blk_col, |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 868 | TX_SIZE tx_size, TOKEN_STATS *token_stats) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 869 | const struct macroblockd_plane *const pd = &xd->plane[plane]; |
| 870 | const BLOCK_SIZE bsize = txsize_to_bsize[tx_size]; |
| 871 | const int tx_row = blk_row >> (1 - pd->subsampling_y); |
| 872 | const int tx_col = blk_col >> (1 - pd->subsampling_x); |
| 873 | TX_SIZE plane_tx_size; |
Jingning Han | f65b870 | 2016-10-31 12:13:20 -0700 | [diff] [blame] | 874 | const int max_blocks_high = max_block_high(xd, plane_bsize, plane); |
| 875 | const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 876 | |
| 877 | if (blk_row >= max_blocks_high || blk_col >= max_blocks_wide) return; |
| 878 | |
Debargha Mukherjee | 2f12340 | 2016-08-30 17:43:38 -0700 | [diff] [blame] | 879 | plane_tx_size = |
| 880 | plane ? uv_txsize_lookup[bsize][mbmi->inter_tx_size[tx_row][tx_col]][0][0] |
| 881 | : mbmi->inter_tx_size[tx_row][tx_col]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 882 | |
| 883 | if (tx_size == plane_tx_size) { |
Angie Chiang | d02001d | 2016-11-06 15:31:49 -0800 | [diff] [blame] | 884 | TOKEN_STATS tmp_token_stats; |
| 885 | init_token_stats(&tmp_token_stats); |
| 886 | pack_mb_tokens(w, tp, tok_end, bit_depth, tx_size, &tmp_token_stats); |
| 887 | #if CONFIG_RD_DEBUG |
| 888 | token_stats->txb_coeff_cost_map[blk_row][blk_col] = tmp_token_stats.cost; |
| 889 | token_stats->cost += tmp_token_stats.cost; |
| 890 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 891 | } else { |
Jingning Han | 1807fdc | 2016-11-08 15:17:58 -0800 | [diff] [blame] | 892 | const TX_SIZE sub_txs = sub_tx_size_map[tx_size]; |
| 893 | const int bsl = tx_size_wide_unit[sub_txs]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 894 | int i; |
| 895 | |
| 896 | assert(bsl > 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 897 | |
| 898 | for (i = 0; i < 4; ++i) { |
Jingning Han | 42a0fb3 | 2016-10-31 10:43:31 -0700 | [diff] [blame] | 899 | const int offsetr = blk_row + (i >> 1) * bsl; |
| 900 | const int offsetc = blk_col + (i & 0x01) * bsl; |
Jingning Han | 42a0fb3 | 2016-10-31 10:43:31 -0700 | [diff] [blame] | 901 | const int step = tx_size_wide_unit[sub_txs] * tx_size_high_unit[sub_txs]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 902 | |
| 903 | if (offsetr >= max_blocks_high || offsetc >= max_blocks_wide) continue; |
| 904 | |
| 905 | pack_txb_tokens(w, tp, tok_end, xd, mbmi, plane, plane_bsize, bit_depth, |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 906 | block, offsetr, offsetc, sub_txs, token_stats); |
Jingning Han | 98d6a1f | 2016-11-03 12:47:47 -0700 | [diff] [blame] | 907 | block += step; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 908 | } |
| 909 | } |
| 910 | } |
| 911 | #endif |
| 912 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 913 | static void write_segment_id(aom_writer *w, const struct segmentation *seg, |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 914 | struct segmentation_probs *segp, int segment_id) { |
Nathan E. Egge | f627e58 | 2016-08-19 20:06:51 -0400 | [diff] [blame] | 915 | if (seg->enabled && seg->update_map) { |
Nathan E. Egge | 3129606 | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 916 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | f627e58 | 2016-08-19 20:06:51 -0400 | [diff] [blame] | 917 | aom_write_symbol(w, segment_id, segp->tree_cdf, MAX_SEGMENTS); |
| 918 | #else |
Nathan E. Egge | eeedc63 | 2016-06-19 12:02:33 -0400 | [diff] [blame] | 919 | aom_write_tree(w, av1_segment_tree, segp->tree_probs, segment_id, 3, 0); |
Nathan E. Egge | f627e58 | 2016-08-19 20:06:51 -0400 | [diff] [blame] | 920 | #endif |
| 921 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 922 | } |
| 923 | |
| 924 | // This function encodes the reference frame |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 925 | static void write_ref_frames(const AV1_COMMON *cm, const MACROBLOCKD *xd, |
| 926 | aom_writer *w) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 927 | const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi; |
| 928 | const int is_compound = has_second_ref(mbmi); |
| 929 | const int segment_id = mbmi->segment_id; |
| 930 | |
| 931 | // If segment level coding of this signal is disabled... |
| 932 | // or the segment allows multiple reference frame options |
| 933 | if (segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) { |
| 934 | assert(!is_compound); |
| 935 | assert(mbmi->ref_frame[0] == |
| 936 | get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME)); |
| 937 | } else { |
| 938 | // does the feature use compound prediction or not |
| 939 | // (if not specified at the frame/segment level) |
| 940 | if (cm->reference_mode == REFERENCE_MODE_SELECT) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 941 | aom_write(w, is_compound, av1_get_reference_mode_prob(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 942 | } else { |
| 943 | assert((!is_compound) == (cm->reference_mode == SINGLE_REFERENCE)); |
| 944 | } |
| 945 | |
| 946 | if (is_compound) { |
| 947 | #if CONFIG_EXT_REFS |
| 948 | const int bit = (mbmi->ref_frame[0] == GOLDEN_FRAME || |
| 949 | mbmi->ref_frame[0] == LAST3_FRAME); |
| 950 | const int bit_bwd = mbmi->ref_frame[1] == ALTREF_FRAME; |
| 951 | #else // CONFIG_EXT_REFS |
| 952 | const int bit = mbmi->ref_frame[0] == GOLDEN_FRAME; |
| 953 | #endif // CONFIG_EXT_REFS |
| 954 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 955 | aom_write(w, bit, av1_get_pred_prob_comp_ref_p(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 956 | |
| 957 | #if CONFIG_EXT_REFS |
| 958 | if (!bit) { |
| 959 | const int bit1 = mbmi->ref_frame[0] == LAST_FRAME; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 960 | aom_write(w, bit1, av1_get_pred_prob_comp_ref_p1(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 961 | } else { |
| 962 | const int bit2 = mbmi->ref_frame[0] == GOLDEN_FRAME; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 963 | aom_write(w, bit2, av1_get_pred_prob_comp_ref_p2(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 964 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 965 | aom_write(w, bit_bwd, av1_get_pred_prob_comp_bwdref_p(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 966 | #endif // CONFIG_EXT_REFS |
| 967 | } else { |
| 968 | #if CONFIG_EXT_REFS |
| 969 | const int bit0 = (mbmi->ref_frame[0] == ALTREF_FRAME || |
| 970 | mbmi->ref_frame[0] == BWDREF_FRAME); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 971 | aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 972 | |
| 973 | if (bit0) { |
| 974 | const int bit1 = mbmi->ref_frame[0] == ALTREF_FRAME; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 975 | aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 976 | } else { |
| 977 | const int bit2 = (mbmi->ref_frame[0] == LAST3_FRAME || |
| 978 | mbmi->ref_frame[0] == GOLDEN_FRAME); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 979 | aom_write(w, bit2, av1_get_pred_prob_single_ref_p3(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 980 | |
| 981 | if (!bit2) { |
| 982 | const int bit3 = mbmi->ref_frame[0] != LAST_FRAME; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 983 | aom_write(w, bit3, av1_get_pred_prob_single_ref_p4(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 984 | } else { |
| 985 | const int bit4 = mbmi->ref_frame[0] != LAST3_FRAME; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 986 | aom_write(w, bit4, av1_get_pred_prob_single_ref_p5(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 987 | } |
| 988 | } |
| 989 | #else // CONFIG_EXT_REFS |
| 990 | const int bit0 = mbmi->ref_frame[0] != LAST_FRAME; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 991 | aom_write(w, bit0, av1_get_pred_prob_single_ref_p1(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 992 | |
| 993 | if (bit0) { |
| 994 | const int bit1 = mbmi->ref_frame[0] != GOLDEN_FRAME; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 995 | aom_write(w, bit1, av1_get_pred_prob_single_ref_p2(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 996 | } |
| 997 | #endif // CONFIG_EXT_REFS |
| 998 | } |
| 999 | } |
| 1000 | } |
| 1001 | |
hui su | 5db9743 | 2016-10-14 16:10:14 -0700 | [diff] [blame] | 1002 | #if CONFIG_FILTER_INTRA |
| 1003 | static void write_filter_intra_mode_info(const AV1_COMMON *const cm, |
| 1004 | const MB_MODE_INFO *const mbmi, |
| 1005 | aom_writer *w) { |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 1006 | if (mbmi->mode == DC_PRED |
| 1007 | #if CONFIG_PALETTE |
| 1008 | && mbmi->palette_mode_info.palette_size[0] == 0 |
| 1009 | #endif // CONFIG_PALETTE |
| 1010 | ) { |
hui su | 5db9743 | 2016-10-14 16:10:14 -0700 | [diff] [blame] | 1011 | aom_write(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[0], |
| 1012 | cm->fc->filter_intra_probs[0]); |
| 1013 | if (mbmi->filter_intra_mode_info.use_filter_intra_mode[0]) { |
| 1014 | const FILTER_INTRA_MODE mode = |
| 1015 | mbmi->filter_intra_mode_info.filter_intra_mode[0]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1016 | write_uniform(w, FILTER_INTRA_MODES, mode); |
| 1017 | } |
| 1018 | } |
| 1019 | |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 1020 | if (mbmi->uv_mode == DC_PRED |
| 1021 | #if CONFIG_PALETTE |
| 1022 | && mbmi->palette_mode_info.palette_size[1] == 0 |
| 1023 | #endif // CONFIG_PALETTE |
| 1024 | ) { |
hui su | 5db9743 | 2016-10-14 16:10:14 -0700 | [diff] [blame] | 1025 | aom_write(w, mbmi->filter_intra_mode_info.use_filter_intra_mode[1], |
| 1026 | cm->fc->filter_intra_probs[1]); |
| 1027 | if (mbmi->filter_intra_mode_info.use_filter_intra_mode[1]) { |
| 1028 | const FILTER_INTRA_MODE mode = |
| 1029 | mbmi->filter_intra_mode_info.filter_intra_mode[1]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1030 | write_uniform(w, FILTER_INTRA_MODES, mode); |
| 1031 | } |
| 1032 | } |
| 1033 | } |
hui su | 5db9743 | 2016-10-14 16:10:14 -0700 | [diff] [blame] | 1034 | #endif // CONFIG_FILTER_INTRA |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1035 | |
hui su | 5db9743 | 2016-10-14 16:10:14 -0700 | [diff] [blame] | 1036 | #if CONFIG_EXT_INTRA |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1037 | static void write_intra_angle_info(const AV1_COMMON *cm, const MACROBLOCKD *xd, |
| 1038 | aom_writer *w) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1039 | const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi; |
| 1040 | const BLOCK_SIZE bsize = mbmi->sb_type; |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 1041 | #if CONFIG_INTRA_INTERP |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1042 | const int intra_filter_ctx = av1_get_pred_context_intra_interp(xd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1043 | int p_angle; |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 1044 | #endif // CONFIG_INTRA_INTERP |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1045 | |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 1046 | (void)cm; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1047 | if (bsize < BLOCK_8X8) return; |
| 1048 | |
hui su | 45dc597 | 2016-12-08 17:42:50 -0800 | [diff] [blame] | 1049 | if (av1_is_directional_mode(mbmi->mode, bsize)) { |
| 1050 | const int max_angle_delta = av1_get_max_angle_delta(mbmi->sb_type, 0); |
| 1051 | write_uniform(w, 2 * max_angle_delta + 1, |
| 1052 | max_angle_delta + mbmi->angle_delta[0]); |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 1053 | #if CONFIG_INTRA_INTERP |
hui su | 45dc597 | 2016-12-08 17:42:50 -0800 | [diff] [blame] | 1054 | p_angle = mode_to_angle_map[mbmi->mode] + |
| 1055 | mbmi->angle_delta[0] * av1_get_angle_step(mbmi->sb_type, 0); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1056 | if (av1_is_intra_filter_switchable(p_angle)) { |
| 1057 | av1_write_token(w, av1_intra_filter_tree, |
| 1058 | cm->fc->intra_filter_probs[intra_filter_ctx], |
| 1059 | &intra_filter_encodings[mbmi->intra_filter]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1060 | } |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 1061 | #endif // CONFIG_INTRA_INTERP |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1062 | } |
| 1063 | |
hui su | 45dc597 | 2016-12-08 17:42:50 -0800 | [diff] [blame] | 1064 | if (av1_is_directional_mode(mbmi->uv_mode, bsize)) { |
| 1065 | write_uniform(w, 2 * MAX_ANGLE_DELTA_UV + 1, |
| 1066 | MAX_ANGLE_DELTA_UV + mbmi->angle_delta[1]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1067 | } |
| 1068 | } |
| 1069 | #endif // CONFIG_EXT_INTRA |
| 1070 | |
Angie Chiang | 5678ad9 | 2016-11-21 09:38:40 -0800 | [diff] [blame] | 1071 | static void write_mb_interp_filter(AV1_COMP *cpi, const MACROBLOCKD *xd, |
| 1072 | aom_writer *w) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1073 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1074 | const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi; |
| 1075 | #if CONFIG_DUAL_FILTER |
| 1076 | int dir; |
| 1077 | #endif |
| 1078 | if (cm->interp_filter == SWITCHABLE) { |
| 1079 | #if CONFIG_EXT_INTERP |
| 1080 | #if CONFIG_DUAL_FILTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1081 | if (!av1_is_interp_needed(xd)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1082 | assert(mbmi->interp_filter[0] == EIGHTTAP_REGULAR); |
| 1083 | return; |
| 1084 | } |
| 1085 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1086 | if (!av1_is_interp_needed(xd)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1087 | #if CONFIG_DUAL_FILTER |
| 1088 | assert(mbmi->interp_filter[0] == EIGHTTAP_REGULAR); |
| 1089 | assert(mbmi->interp_filter[1] == EIGHTTAP_REGULAR); |
| 1090 | #else |
| 1091 | assert(mbmi->interp_filter == EIGHTTAP_REGULAR); |
| 1092 | #endif |
| 1093 | return; |
| 1094 | } |
| 1095 | #endif // CONFIG_DUAL_FILTER |
| 1096 | #endif // CONFIG_EXT_INTERP |
| 1097 | #if CONFIG_DUAL_FILTER |
| 1098 | for (dir = 0; dir < 2; ++dir) { |
| 1099 | if (has_subpel_mv_component(xd->mi[0], xd, dir) || |
| 1100 | (mbmi->ref_frame[1] > INTRA_FRAME && |
| 1101 | has_subpel_mv_component(xd->mi[0], xd, dir + 2))) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1102 | const int ctx = av1_get_pred_context_switchable_interp(xd, dir); |
| 1103 | av1_write_token(w, av1_switchable_interp_tree, |
| 1104 | cm->fc->switchable_interp_prob[ctx], |
| 1105 | &switchable_interp_encodings[mbmi->interp_filter[dir]]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1106 | ++cpi->interp_filter_selected[0][mbmi->interp_filter[dir]]; |
| 1107 | } |
| 1108 | } |
| 1109 | #else |
| 1110 | { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1111 | const int ctx = av1_get_pred_context_switchable_interp(xd); |
Nathan E. Egge | 00b3331 | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 1112 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 56eeaa5 | 2016-07-25 10:23:33 -0400 | [diff] [blame] | 1113 | aom_write_symbol(w, av1_switchable_interp_ind[mbmi->interp_filter], |
| 1114 | cm->fc->switchable_interp_cdf[ctx], SWITCHABLE_FILTERS); |
Nathan E. Egge | 4947c29 | 2016-04-26 11:37:06 -0400 | [diff] [blame] | 1115 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1116 | av1_write_token(w, av1_switchable_interp_tree, |
| 1117 | cm->fc->switchable_interp_prob[ctx], |
| 1118 | &switchable_interp_encodings[mbmi->interp_filter]); |
Nathan E. Egge | 4947c29 | 2016-04-26 11:37:06 -0400 | [diff] [blame] | 1119 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1120 | ++cpi->interp_filter_selected[0][mbmi->interp_filter]; |
| 1121 | } |
| 1122 | #endif |
| 1123 | } |
| 1124 | } |
| 1125 | |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 1126 | #if CONFIG_PALETTE |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1127 | static void write_palette_mode_info(const AV1_COMMON *cm, const MACROBLOCKD *xd, |
| 1128 | const MODE_INFO *const mi, aom_writer *w) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1129 | const MB_MODE_INFO *const mbmi = &mi->mbmi; |
| 1130 | const MODE_INFO *const above_mi = xd->above_mi; |
| 1131 | const MODE_INFO *const left_mi = xd->left_mi; |
| 1132 | const BLOCK_SIZE bsize = mbmi->sb_type; |
| 1133 | const PALETTE_MODE_INFO *const pmi = &mbmi->palette_mode_info; |
| 1134 | int palette_ctx = 0; |
| 1135 | int n, i; |
| 1136 | |
| 1137 | if (mbmi->mode == DC_PRED) { |
| 1138 | n = pmi->palette_size[0]; |
| 1139 | if (above_mi) |
| 1140 | palette_ctx += (above_mi->mbmi.palette_mode_info.palette_size[0] > 0); |
| 1141 | if (left_mi) |
| 1142 | palette_ctx += (left_mi->mbmi.palette_mode_info.palette_size[0] > 0); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1143 | aom_write(w, n > 0, |
| 1144 | av1_default_palette_y_mode_prob[bsize - BLOCK_8X8][palette_ctx]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1145 | if (n > 0) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1146 | av1_write_token(w, av1_palette_size_tree, |
| 1147 | av1_default_palette_y_size_prob[bsize - BLOCK_8X8], |
| 1148 | &palette_size_encodings[n - 2]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1149 | for (i = 0; i < n; ++i) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1150 | aom_write_literal(w, pmi->palette_colors[i], cm->bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1151 | write_uniform(w, n, pmi->palette_first_color_idx[0]); |
| 1152 | } |
| 1153 | } |
| 1154 | |
| 1155 | if (mbmi->uv_mode == DC_PRED) { |
| 1156 | n = pmi->palette_size[1]; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1157 | aom_write(w, n > 0, |
| 1158 | av1_default_palette_uv_mode_prob[pmi->palette_size[0] > 0]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1159 | if (n > 0) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1160 | av1_write_token(w, av1_palette_size_tree, |
| 1161 | av1_default_palette_uv_size_prob[bsize - BLOCK_8X8], |
| 1162 | &palette_size_encodings[n - 2]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1163 | for (i = 0; i < n; ++i) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1164 | aom_write_literal(w, pmi->palette_colors[PALETTE_MAX_SIZE + i], |
| 1165 | cm->bit_depth); |
| 1166 | aom_write_literal(w, pmi->palette_colors[2 * PALETTE_MAX_SIZE + i], |
| 1167 | cm->bit_depth); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1168 | } |
| 1169 | write_uniform(w, n, pmi->palette_first_color_idx[1]); |
| 1170 | } |
| 1171 | } |
| 1172 | } |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 1173 | #endif // CONFIG_PALETTE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1174 | |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1175 | static void write_tx_type(const AV1_COMMON *const cm, |
| 1176 | const MB_MODE_INFO *const mbmi, |
| 1177 | #if CONFIG_SUPERTX |
| 1178 | const int supertx_enabled, |
| 1179 | #endif |
| 1180 | aom_writer *w) { |
| 1181 | const int is_inter = is_inter_block(mbmi); |
Jingning Han | e67b38a | 2016-11-04 10:30:00 -0700 | [diff] [blame] | 1182 | #if CONFIG_VAR_TX |
| 1183 | const TX_SIZE tx_size = is_inter ? mbmi->min_tx_size : mbmi->tx_size; |
| 1184 | #else |
Jingning Han | 641b1ad | 2016-11-04 09:58:36 -0700 | [diff] [blame] | 1185 | const TX_SIZE tx_size = mbmi->tx_size; |
Jingning Han | e67b38a | 2016-11-04 10:30:00 -0700 | [diff] [blame] | 1186 | #endif |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1187 | if (!FIXED_TX_TYPE) { |
| 1188 | #if CONFIG_EXT_TX |
| 1189 | const BLOCK_SIZE bsize = mbmi->sb_type; |
Jingning Han | 641b1ad | 2016-11-04 09:58:36 -0700 | [diff] [blame] | 1190 | if (get_ext_tx_types(tx_size, bsize, is_inter) > 1 && cm->base_qindex > 0 && |
| 1191 | !mbmi->skip && |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1192 | #if CONFIG_SUPERTX |
| 1193 | !supertx_enabled && |
| 1194 | #endif // CONFIG_SUPERTX |
| 1195 | !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) { |
Jingning Han | 641b1ad | 2016-11-04 09:58:36 -0700 | [diff] [blame] | 1196 | int eset = get_ext_tx_set(tx_size, bsize, is_inter); |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1197 | if (is_inter) { |
| 1198 | assert(ext_tx_used_inter[eset][mbmi->tx_type]); |
| 1199 | if (eset > 0) |
| 1200 | av1_write_token( |
| 1201 | w, av1_ext_tx_inter_tree[eset], |
Jingning Han | 641b1ad | 2016-11-04 09:58:36 -0700 | [diff] [blame] | 1202 | cm->fc->inter_ext_tx_prob[eset][txsize_sqr_map[tx_size]], |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1203 | &ext_tx_inter_encodings[eset][mbmi->tx_type]); |
| 1204 | } else if (ALLOW_INTRA_EXT_TX) { |
| 1205 | if (eset > 0) |
Jingning Han | 641b1ad | 2016-11-04 09:58:36 -0700 | [diff] [blame] | 1206 | av1_write_token(w, av1_ext_tx_intra_tree[eset], |
| 1207 | cm->fc->intra_ext_tx_prob[eset][tx_size][mbmi->mode], |
| 1208 | &ext_tx_intra_encodings[eset][mbmi->tx_type]); |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1209 | } |
| 1210 | } |
| 1211 | #else |
Jingning Han | 641b1ad | 2016-11-04 09:58:36 -0700 | [diff] [blame] | 1212 | if (tx_size < TX_32X32 && cm->base_qindex > 0 && !mbmi->skip && |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1213 | #if CONFIG_SUPERTX |
| 1214 | !supertx_enabled && |
| 1215 | #endif // CONFIG_SUPERTX |
| 1216 | !segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) { |
| 1217 | if (is_inter) { |
Nathan E. Egge | dfa33f2 | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 1218 | #if CONFIG_EC_MULTISYMBOL |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1219 | aom_write_symbol(w, av1_ext_tx_ind[mbmi->tx_type], |
Jingning Han | 641b1ad | 2016-11-04 09:58:36 -0700 | [diff] [blame] | 1220 | cm->fc->inter_ext_tx_cdf[tx_size], TX_TYPES); |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1221 | #else |
Jingning Han | 641b1ad | 2016-11-04 09:58:36 -0700 | [diff] [blame] | 1222 | av1_write_token(w, av1_ext_tx_tree, cm->fc->inter_ext_tx_prob[tx_size], |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1223 | &ext_tx_encodings[mbmi->tx_type]); |
| 1224 | #endif |
| 1225 | } else { |
Nathan E. Egge | 29ccee0 | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 1226 | #if CONFIG_EC_MULTISYMBOL |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1227 | aom_write_symbol( |
| 1228 | w, av1_ext_tx_ind[mbmi->tx_type], |
Jingning Han | 641b1ad | 2016-11-04 09:58:36 -0700 | [diff] [blame] | 1229 | cm->fc->intra_ext_tx_cdf[tx_size] |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1230 | [intra_mode_to_tx_type_context[mbmi->mode]], |
| 1231 | TX_TYPES); |
| 1232 | #else |
| 1233 | av1_write_token( |
| 1234 | w, av1_ext_tx_tree, |
| 1235 | cm->fc |
Jingning Han | 641b1ad | 2016-11-04 09:58:36 -0700 | [diff] [blame] | 1236 | ->intra_ext_tx_prob[tx_size] |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1237 | [intra_mode_to_tx_type_context[mbmi->mode]], |
| 1238 | &ext_tx_encodings[mbmi->tx_type]); |
| 1239 | #endif |
| 1240 | } |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1241 | } |
| 1242 | #endif // CONFIG_EXT_TX |
| 1243 | } |
| 1244 | } |
| 1245 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1246 | static void pack_inter_mode_mvs(AV1_COMP *cpi, const MODE_INFO *mi, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1247 | #if CONFIG_SUPERTX |
| 1248 | int supertx_enabled, |
| 1249 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1250 | aom_writer *w) { |
| 1251 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1252 | #if !CONFIG_REF_MV |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1253 | nmv_context *nmvc = &cm->fc->nmvc; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1254 | #endif |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1255 | |
| 1256 | #if CONFIG_DELTA_Q |
| 1257 | MACROBLOCK *const x = &cpi->td.mb; |
| 1258 | MACROBLOCKD *const xd = &x->e_mbd; |
| 1259 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1260 | const MACROBLOCK *x = &cpi->td.mb; |
| 1261 | const MACROBLOCKD *xd = &x->e_mbd; |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1262 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1263 | const struct segmentation *const seg = &cm->seg; |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1264 | struct segmentation_probs *const segp = &cm->fc->seg; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1265 | const MB_MODE_INFO *const mbmi = &mi->mbmi; |
| 1266 | const MB_MODE_INFO_EXT *const mbmi_ext = x->mbmi_ext; |
| 1267 | const PREDICTION_MODE mode = mbmi->mode; |
| 1268 | const int segment_id = mbmi->segment_id; |
| 1269 | const BLOCK_SIZE bsize = mbmi->sb_type; |
| 1270 | const int allow_hp = cm->allow_high_precision_mv; |
| 1271 | const int is_inter = is_inter_block(mbmi); |
| 1272 | const int is_compound = has_second_ref(mbmi); |
| 1273 | int skip, ref; |
Jingning Han | 5226184 | 2016-12-14 12:17:49 -0800 | [diff] [blame] | 1274 | #if CONFIG_CB4X4 |
| 1275 | const int unify_bsize = 1; |
| 1276 | #else |
| 1277 | const int unify_bsize = 0; |
| 1278 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1279 | |
| 1280 | if (seg->update_map) { |
| 1281 | if (seg->temporal_update) { |
| 1282 | const int pred_flag = mbmi->seg_id_predicted; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1283 | aom_prob pred_prob = av1_get_pred_prob_seg_id(segp, xd); |
| 1284 | aom_write(w, pred_flag, pred_prob); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1285 | if (!pred_flag) write_segment_id(w, seg, segp, segment_id); |
| 1286 | } else { |
| 1287 | write_segment_id(w, seg, segp, segment_id); |
| 1288 | } |
| 1289 | } |
| 1290 | |
| 1291 | #if CONFIG_SUPERTX |
| 1292 | if (supertx_enabled) |
| 1293 | skip = mbmi->skip; |
| 1294 | else |
| 1295 | skip = write_skip(cm, xd, segment_id, mi, w); |
| 1296 | #else |
| 1297 | skip = write_skip(cm, xd, segment_id, mi, w); |
| 1298 | #endif // CONFIG_SUPERTX |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1299 | #if CONFIG_DELTA_Q |
| 1300 | if (cm->delta_q_present_flag) { |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 1301 | int mi_row = (-xd->mb_to_top_edge) >> (MI_SIZE_LOG2 + 3); |
| 1302 | int mi_col = (-xd->mb_to_left_edge) >> (MI_SIZE_LOG2 + 3); |
| 1303 | int super_block_upper_left = |
| 1304 | ((mi_row & MAX_MIB_MASK) == 0) && ((mi_col & MAX_MIB_MASK) == 0); |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1305 | if ((bsize != BLOCK_64X64 || skip == 0) && super_block_upper_left) { |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 1306 | int reduced_delta_qindex = |
| 1307 | (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res; |
| 1308 | write_delta_qindex(cm, reduced_delta_qindex, w); |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1309 | xd->prev_qindex = mbmi->current_q_index; |
| 1310 | } |
| 1311 | } |
| 1312 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1313 | |
| 1314 | #if CONFIG_SUPERTX |
| 1315 | if (!supertx_enabled) |
| 1316 | #endif // CONFIG_SUPERTX |
| 1317 | if (!segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME)) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1318 | aom_write(w, is_inter, av1_get_intra_inter_prob(cm, xd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1319 | |
| 1320 | if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT && |
| 1321 | #if CONFIG_SUPERTX |
| 1322 | !supertx_enabled && |
| 1323 | #endif // CONFIG_SUPERTX |
| 1324 | !(is_inter && skip) && !xd->lossless[segment_id]) { |
| 1325 | #if CONFIG_VAR_TX |
| 1326 | if (is_inter) { // This implies skip flag is 0. |
Jingning Han | 70e5f3f | 2016-11-09 17:03:07 -0800 | [diff] [blame] | 1327 | const TX_SIZE max_tx_size = max_txsize_rect_lookup[bsize]; |
Jingning Han | f64062f | 2016-11-02 16:22:18 -0700 | [diff] [blame] | 1328 | const int bh = tx_size_high_unit[max_tx_size]; |
| 1329 | const int bw = tx_size_wide_unit[max_tx_size]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1330 | const int width = num_4x4_blocks_wide_lookup[bsize]; |
| 1331 | const int height = num_4x4_blocks_high_lookup[bsize]; |
| 1332 | int idx, idy; |
Jingning Han | fe45b21 | 2016-11-22 10:30:23 -0800 | [diff] [blame] | 1333 | for (idy = 0; idy < height; idy += bh) |
| 1334 | for (idx = 0; idx < width; idx += bw) |
| 1335 | write_tx_size_vartx(cm, xd, mbmi, max_tx_size, height != width, idy, |
| 1336 | idx, w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1337 | } else { |
Jingning Han | 1b1dc93 | 2016-11-09 10:55:30 -0800 | [diff] [blame] | 1338 | set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1339 | write_selected_tx_size(cm, xd, w); |
| 1340 | } |
| 1341 | } else { |
Jingning Han | 1b1dc93 | 2016-11-09 10:55:30 -0800 | [diff] [blame] | 1342 | set_txfm_ctxs(mbmi->tx_size, xd->n8_w, xd->n8_h, skip, xd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1343 | #else |
| 1344 | write_selected_tx_size(cm, xd, w); |
| 1345 | #endif |
| 1346 | } |
| 1347 | |
| 1348 | if (!is_inter) { |
Jingning Han | 5226184 | 2016-12-14 12:17:49 -0800 | [diff] [blame] | 1349 | if (bsize >= BLOCK_8X8 || unify_bsize) { |
Nathan E. Egge | ecc21ec | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 1350 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 5710c72 | 2016-09-08 10:01:16 -0400 | [diff] [blame] | 1351 | aom_write_symbol(w, av1_intra_mode_ind[mode], |
| 1352 | cm->fc->y_mode_cdf[size_group_lookup[bsize]], |
| 1353 | INTRA_MODES); |
| 1354 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1355 | write_intra_mode(w, mode, cm->fc->y_mode_prob[size_group_lookup[bsize]]); |
Nathan E. Egge | 5710c72 | 2016-09-08 10:01:16 -0400 | [diff] [blame] | 1356 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1357 | } else { |
| 1358 | int idx, idy; |
| 1359 | const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize]; |
| 1360 | const int num_4x4_h = num_4x4_blocks_high_lookup[bsize]; |
| 1361 | for (idy = 0; idy < 2; idy += num_4x4_h) { |
| 1362 | for (idx = 0; idx < 2; idx += num_4x4_w) { |
| 1363 | const PREDICTION_MODE b_mode = mi->bmi[idy * 2 + idx].as_mode; |
Nathan E. Egge | ecc21ec | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 1364 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 5710c72 | 2016-09-08 10:01:16 -0400 | [diff] [blame] | 1365 | aom_write_symbol(w, av1_intra_mode_ind[b_mode], cm->fc->y_mode_cdf[0], |
| 1366 | INTRA_MODES); |
| 1367 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1368 | write_intra_mode(w, b_mode, cm->fc->y_mode_prob[0]); |
Nathan E. Egge | 5710c72 | 2016-09-08 10:01:16 -0400 | [diff] [blame] | 1369 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1370 | } |
| 1371 | } |
| 1372 | } |
Nathan E. Egge | dd28aed | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 1373 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 380cb1a | 2016-09-08 10:13:42 -0400 | [diff] [blame] | 1374 | aom_write_symbol(w, av1_intra_mode_ind[mbmi->uv_mode], |
| 1375 | cm->fc->uv_mode_cdf[mode], INTRA_MODES); |
| 1376 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1377 | write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mode]); |
Nathan E. Egge | 380cb1a | 2016-09-08 10:13:42 -0400 | [diff] [blame] | 1378 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1379 | #if CONFIG_EXT_INTRA |
| 1380 | write_intra_angle_info(cm, xd, w); |
| 1381 | #endif // CONFIG_EXT_INTRA |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 1382 | #if CONFIG_PALETTE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1383 | if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools) |
| 1384 | write_palette_mode_info(cm, xd, mi, w); |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 1385 | #endif // CONFIG_PALETTE |
hui su | 5db9743 | 2016-10-14 16:10:14 -0700 | [diff] [blame] | 1386 | #if CONFIG_FILTER_INTRA |
| 1387 | if (bsize >= BLOCK_8X8) write_filter_intra_mode_info(cm, mbmi, w); |
| 1388 | #endif // CONFIG_FILTER_INTRA |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1389 | } else { |
Yaowu Xu | b0d0d00 | 2016-11-22 09:26:43 -0800 | [diff] [blame] | 1390 | int16_t mode_ctx; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1391 | write_ref_frames(cm, xd, w); |
| 1392 | |
| 1393 | #if CONFIG_REF_MV |
| 1394 | #if CONFIG_EXT_INTER |
| 1395 | if (is_compound) |
| 1396 | mode_ctx = mbmi_ext->compound_mode_context[mbmi->ref_frame[0]]; |
| 1397 | else |
| 1398 | #endif // CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1399 | mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context, |
| 1400 | mbmi->ref_frame, bsize, -1); |
Yaowu Xu | b0d0d00 | 2016-11-22 09:26:43 -0800 | [diff] [blame] | 1401 | #else // CONFIG_REF_MV |
| 1402 | mode_ctx = mbmi_ext->mode_context[mbmi->ref_frame[0]]; |
| 1403 | #endif // CONFIG_REF_MV |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1404 | |
| 1405 | // If segment skip is not enabled code the mode. |
| 1406 | if (!segfeature_active(seg, segment_id, SEG_LVL_SKIP)) { |
Jingning Han | 5226184 | 2016-12-14 12:17:49 -0800 | [diff] [blame] | 1407 | if (bsize >= BLOCK_8X8 || unify_bsize) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1408 | #if CONFIG_EXT_INTER |
| 1409 | if (is_inter_compound_mode(mode)) |
| 1410 | write_inter_compound_mode(cm, w, mode, mode_ctx); |
| 1411 | else if (is_inter_singleref_mode(mode)) |
| 1412 | #endif // CONFIG_EXT_INTER |
| 1413 | write_inter_mode(cm, w, mode, |
| 1414 | #if CONFIG_REF_MV && CONFIG_EXT_INTER |
| 1415 | is_compound, |
| 1416 | #endif // CONFIG_REF_MV && CONFIG_EXT_INTER |
| 1417 | mode_ctx); |
| 1418 | |
| 1419 | #if CONFIG_REF_MV |
| 1420 | if (mode == NEARMV || mode == NEWMV) |
| 1421 | write_drl_idx(cm, mbmi, mbmi_ext, w); |
| 1422 | #endif |
| 1423 | } |
| 1424 | } |
| 1425 | |
Yue Chen | 69f18e1 | 2016-09-08 14:48:15 -0700 | [diff] [blame] | 1426 | #if !CONFIG_EXT_INTERP && !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION |
Angie Chiang | 5678ad9 | 2016-11-21 09:38:40 -0800 | [diff] [blame] | 1427 | write_mb_interp_filter(cpi, xd, w); |
Yue Chen | 69f18e1 | 2016-09-08 14:48:15 -0700 | [diff] [blame] | 1428 | #endif // !CONFIG_EXT_INTERP && !CONFIG_DUAL_FILTER && !CONFIG_WARPED_MOTION |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1429 | |
Jingning Han | 5226184 | 2016-12-14 12:17:49 -0800 | [diff] [blame] | 1430 | if (bsize < BLOCK_8X8 && !unify_bsize) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1431 | const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize]; |
| 1432 | const int num_4x4_h = num_4x4_blocks_high_lookup[bsize]; |
| 1433 | int idx, idy; |
| 1434 | for (idy = 0; idy < 2; idy += num_4x4_h) { |
| 1435 | for (idx = 0; idx < 2; idx += num_4x4_w) { |
| 1436 | const int j = idy * 2 + idx; |
| 1437 | const PREDICTION_MODE b_mode = mi->bmi[j].as_mode; |
| 1438 | #if CONFIG_REF_MV |
| 1439 | #if CONFIG_EXT_INTER |
| 1440 | if (!is_compound) |
| 1441 | #endif // CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1442 | mode_ctx = av1_mode_context_analyzer(mbmi_ext->mode_context, |
| 1443 | mbmi->ref_frame, bsize, j); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1444 | #endif |
| 1445 | #if CONFIG_EXT_INTER |
| 1446 | if (is_inter_compound_mode(b_mode)) |
| 1447 | write_inter_compound_mode(cm, w, b_mode, mode_ctx); |
| 1448 | else if (is_inter_singleref_mode(b_mode)) |
| 1449 | #endif // CONFIG_EXT_INTER |
| 1450 | write_inter_mode(cm, w, b_mode, |
| 1451 | #if CONFIG_REF_MV && CONFIG_EXT_INTER |
| 1452 | has_second_ref(mbmi), |
| 1453 | #endif // CONFIG_REF_MV && CONFIG_EXT_INTER |
| 1454 | mode_ctx); |
| 1455 | |
| 1456 | #if CONFIG_EXT_INTER |
| 1457 | if (b_mode == NEWMV || b_mode == NEWFROMNEARMV || |
| 1458 | b_mode == NEW_NEWMV) { |
| 1459 | #else |
| 1460 | if (b_mode == NEWMV) { |
| 1461 | #endif // CONFIG_EXT_INTER |
| 1462 | for (ref = 0; ref < 1 + is_compound; ++ref) { |
| 1463 | #if CONFIG_REF_MV |
Yaowu Xu | 4306b6e | 2016-09-27 12:55:32 -0700 | [diff] [blame] | 1464 | int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame); |
| 1465 | int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type], |
| 1466 | mbmi_ext->ref_mv_stack[rf_type], ref, |
| 1467 | mbmi->ref_mv_idx); |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1468 | nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1469 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1470 | av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[ref].as_mv, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1471 | #if CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1472 | &mi->bmi[j].ref_mv[ref].as_mv, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1473 | #if CONFIG_REF_MV |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1474 | is_compound, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1475 | #endif |
| 1476 | #else |
| 1477 | #if CONFIG_REF_MV |
Yaowu Xu | f5bbbfa | 2016-09-26 09:13:38 -0700 | [diff] [blame] | 1478 | &mi->bmi[j].pred_mv[ref].as_mv, is_compound, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1479 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1480 | &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0].as_mv, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1481 | #endif // CONFIG_REF_MV |
| 1482 | #endif // CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1483 | nmvc, allow_hp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1484 | } |
| 1485 | } |
| 1486 | #if CONFIG_EXT_INTER |
| 1487 | else if (b_mode == NEAREST_NEWMV || b_mode == NEAR_NEWMV) { |
| 1488 | #if CONFIG_REF_MV |
Yaowu Xu | 4306b6e | 2016-09-27 12:55:32 -0700 | [diff] [blame] | 1489 | int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame); |
| 1490 | int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type], |
| 1491 | mbmi_ext->ref_mv_stack[rf_type], 1, |
| 1492 | mbmi->ref_mv_idx); |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1493 | nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1494 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1495 | av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[1].as_mv, |
| 1496 | &mi->bmi[j].ref_mv[1].as_mv, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1497 | #if CONFIG_REF_MV |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1498 | is_compound, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1499 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1500 | nmvc, allow_hp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1501 | } else if (b_mode == NEW_NEARESTMV || b_mode == NEW_NEARMV) { |
| 1502 | #if CONFIG_REF_MV |
Yaowu Xu | 4306b6e | 2016-09-27 12:55:32 -0700 | [diff] [blame] | 1503 | int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame); |
| 1504 | int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type], |
| 1505 | mbmi_ext->ref_mv_stack[rf_type], 0, |
| 1506 | mbmi->ref_mv_idx); |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1507 | nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1508 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1509 | av1_encode_mv(cpi, w, &mi->bmi[j].as_mv[0].as_mv, |
| 1510 | &mi->bmi[j].ref_mv[0].as_mv, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1511 | #if CONFIG_REF_MV |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1512 | is_compound, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1513 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1514 | nmvc, allow_hp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1515 | } |
| 1516 | #endif // CONFIG_EXT_INTER |
| 1517 | } |
| 1518 | } |
| 1519 | } else { |
| 1520 | #if CONFIG_EXT_INTER |
| 1521 | if (mode == NEWMV || mode == NEWFROMNEARMV || mode == NEW_NEWMV) { |
| 1522 | #else |
| 1523 | if (mode == NEWMV) { |
| 1524 | #endif // CONFIG_EXT_INTER |
| 1525 | int_mv ref_mv; |
| 1526 | for (ref = 0; ref < 1 + is_compound; ++ref) { |
| 1527 | #if CONFIG_REF_MV |
Yaowu Xu | 4306b6e | 2016-09-27 12:55:32 -0700 | [diff] [blame] | 1528 | int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame); |
| 1529 | int nmv_ctx = av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type], |
| 1530 | mbmi_ext->ref_mv_stack[rf_type], ref, |
| 1531 | mbmi->ref_mv_idx); |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1532 | nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1533 | #endif |
| 1534 | ref_mv = mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][0]; |
| 1535 | #if CONFIG_EXT_INTER |
| 1536 | if (mode == NEWFROMNEARMV) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1537 | av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, |
| 1538 | &mbmi_ext->ref_mvs[mbmi->ref_frame[ref]][1].as_mv, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1539 | #if CONFIG_REF_MV |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1540 | is_compound, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1541 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1542 | nmvc, allow_hp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1543 | else |
| 1544 | #endif // CONFIG_EXT_INTER |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1545 | av1_encode_mv(cpi, w, &mbmi->mv[ref].as_mv, &ref_mv.as_mv, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1546 | #if CONFIG_REF_MV |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1547 | is_compound, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1548 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1549 | nmvc, allow_hp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1550 | } |
| 1551 | #if CONFIG_EXT_INTER |
| 1552 | } else if (mode == NEAREST_NEWMV || mode == NEAR_NEWMV) { |
| 1553 | #if CONFIG_REF_MV |
Yaowu Xu | 4306b6e | 2016-09-27 12:55:32 -0700 | [diff] [blame] | 1554 | int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame); |
| 1555 | int nmv_ctx = |
| 1556 | av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type], |
| 1557 | mbmi_ext->ref_mv_stack[rf_type], 1, mbmi->ref_mv_idx); |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1558 | nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1559 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1560 | av1_encode_mv(cpi, w, &mbmi->mv[1].as_mv, |
| 1561 | &mbmi_ext->ref_mvs[mbmi->ref_frame[1]][0].as_mv, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1562 | #if CONFIG_REF_MV |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1563 | is_compound, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1564 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1565 | nmvc, allow_hp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1566 | } else if (mode == NEW_NEARESTMV || mode == NEW_NEARMV) { |
| 1567 | #if CONFIG_REF_MV |
Yaowu Xu | 4306b6e | 2016-09-27 12:55:32 -0700 | [diff] [blame] | 1568 | int8_t rf_type = av1_ref_frame_type(mbmi->ref_frame); |
| 1569 | int nmv_ctx = |
| 1570 | av1_nmv_ctx(mbmi_ext->ref_mv_count[rf_type], |
| 1571 | mbmi_ext->ref_mv_stack[rf_type], 0, mbmi->ref_mv_idx); |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1572 | nmv_context *nmvc = &cm->fc->nmvc[nmv_ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1573 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1574 | av1_encode_mv(cpi, w, &mbmi->mv[0].as_mv, |
| 1575 | &mbmi_ext->ref_mvs[mbmi->ref_frame[0]][0].as_mv, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1576 | #if CONFIG_REF_MV |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1577 | is_compound, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1578 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1579 | nmvc, allow_hp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1580 | #endif // CONFIG_EXT_INTER |
| 1581 | } |
| 1582 | } |
| 1583 | |
| 1584 | #if CONFIG_EXT_INTER |
| 1585 | if (cpi->common.reference_mode != COMPOUND_REFERENCE && |
| 1586 | #if CONFIG_SUPERTX |
| 1587 | !supertx_enabled && |
| 1588 | #endif // CONFIG_SUPERTX |
| 1589 | is_interintra_allowed(mbmi)) { |
| 1590 | const int interintra = mbmi->ref_frame[1] == INTRA_FRAME; |
| 1591 | const int bsize_group = size_group_lookup[bsize]; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1592 | aom_write(w, interintra, cm->fc->interintra_prob[bsize_group]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1593 | if (interintra) { |
| 1594 | write_interintra_mode(w, mbmi->interintra_mode, |
| 1595 | cm->fc->interintra_mode_prob[bsize_group]); |
| 1596 | if (is_interintra_wedge_used(bsize)) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1597 | aom_write(w, mbmi->use_wedge_interintra, |
| 1598 | cm->fc->wedge_interintra_prob[bsize]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1599 | if (mbmi->use_wedge_interintra) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1600 | aom_write_literal(w, mbmi->interintra_wedge_index, |
| 1601 | get_wedge_bits_lookup(bsize)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1602 | assert(mbmi->interintra_wedge_sign == 0); |
| 1603 | } |
| 1604 | } |
| 1605 | } |
| 1606 | } |
| 1607 | #endif // CONFIG_EXT_INTER |
| 1608 | |
Yue Chen | cb60b18 | 2016-10-13 15:18:22 -0700 | [diff] [blame] | 1609 | #if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1610 | #if CONFIG_SUPERTX |
| 1611 | if (!supertx_enabled) |
| 1612 | #endif // CONFIG_SUPERTX |
| 1613 | #if CONFIG_EXT_INTER |
| 1614 | if (mbmi->ref_frame[1] != INTRA_FRAME) |
| 1615 | #endif // CONFIG_EXT_INTER |
Yue Chen | 69f18e1 | 2016-09-08 14:48:15 -0700 | [diff] [blame] | 1616 | write_motion_mode(cm, mbmi, w); |
Yue Chen | cb60b18 | 2016-10-13 15:18:22 -0700 | [diff] [blame] | 1617 | #endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1618 | |
| 1619 | #if CONFIG_EXT_INTER |
| 1620 | if (cpi->common.reference_mode != SINGLE_REFERENCE && |
Sarah Parker | 6fdc853 | 2016-11-16 17:47:13 -0800 | [diff] [blame] | 1621 | is_inter_compound_mode(mbmi->mode) |
Yue Chen | cb60b18 | 2016-10-13 15:18:22 -0700 | [diff] [blame] | 1622 | #if CONFIG_MOTION_VAR |
Sarah Parker | 6fdc853 | 2016-11-16 17:47:13 -0800 | [diff] [blame] | 1623 | && mbmi->motion_mode == SIMPLE_TRANSLATION |
Yue Chen | cb60b18 | 2016-10-13 15:18:22 -0700 | [diff] [blame] | 1624 | #endif // CONFIG_MOTION_VAR |
Sarah Parker | 6fdc853 | 2016-11-16 17:47:13 -0800 | [diff] [blame] | 1625 | ) { |
| 1626 | av1_write_token( |
| 1627 | w, av1_compound_type_tree, cm->fc->compound_type_prob[bsize], |
| 1628 | &compound_type_encodings[mbmi->interinter_compound_data.type]); |
| 1629 | if (mbmi->interinter_compound_data.type == COMPOUND_WEDGE) { |
| 1630 | aom_write_literal(w, mbmi->interinter_compound_data.wedge_index, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1631 | get_wedge_bits_lookup(bsize)); |
Sarah Parker | 6fdc853 | 2016-11-16 17:47:13 -0800 | [diff] [blame] | 1632 | aom_write_bit(w, mbmi->interinter_compound_data.wedge_sign); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1633 | } |
Sarah Parker | 569edda | 2016-12-14 14:57:38 -0800 | [diff] [blame] | 1634 | #if CONFIG_COMPOUND_SEGMENT |
| 1635 | else if (mbmi->interinter_compound_data.type == COMPOUND_SEG) { |
| 1636 | aom_write_bit(w, mbmi->interinter_compound_data.which); |
| 1637 | } |
| 1638 | #endif // CONFIG_COMPOUND_SEGMENT |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1639 | } |
| 1640 | #endif // CONFIG_EXT_INTER |
| 1641 | |
Yue Chen | 69f18e1 | 2016-09-08 14:48:15 -0700 | [diff] [blame] | 1642 | #if CONFIG_WARPED_MOTION |
| 1643 | if (mbmi->motion_mode != WARPED_CAUSAL) |
| 1644 | #endif // CONFIG_WARPED_MOTION |
| 1645 | #if CONFIG_EXT_INTERP || CONFIG_DUAL_FILTER || CONFIG_WARPED_MOTION |
| 1646 | write_mb_interp_filter(cpi, xd, w); |
| 1647 | #endif // CONFIG_EXT_INTERP || CONFIG_DUAL_FILTE || CONFIG_WARPED_MOTION |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1648 | } |
| 1649 | |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1650 | write_tx_type(cm, mbmi, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1651 | #if CONFIG_SUPERTX |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1652 | supertx_enabled, |
Nathan E. Egge | 93878c4 | 2016-05-03 10:01:32 -0400 | [diff] [blame] | 1653 | #endif |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1654 | w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1655 | } |
| 1656 | |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1657 | #if CONFIG_DELTA_Q |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1658 | static void write_mb_modes_kf(AV1_COMMON *cm, MACROBLOCKD *xd, |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1659 | MODE_INFO **mi_8x8, aom_writer *w) { |
| 1660 | int skip; |
| 1661 | #else |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1662 | static void write_mb_modes_kf(AV1_COMMON *cm, const MACROBLOCKD *xd, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1663 | MODE_INFO **mi_8x8, aom_writer *w) { |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1664 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1665 | const struct segmentation *const seg = &cm->seg; |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 1666 | struct segmentation_probs *const segp = &cm->fc->seg; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1667 | const MODE_INFO *const mi = mi_8x8[0]; |
| 1668 | const MODE_INFO *const above_mi = xd->above_mi; |
| 1669 | const MODE_INFO *const left_mi = xd->left_mi; |
| 1670 | const MB_MODE_INFO *const mbmi = &mi->mbmi; |
| 1671 | const BLOCK_SIZE bsize = mbmi->sb_type; |
Jingning Han | 5226184 | 2016-12-14 12:17:49 -0800 | [diff] [blame] | 1672 | #if CONFIG_CB4X4 |
| 1673 | const int unify_bsize = 1; |
| 1674 | #else |
| 1675 | const int unify_bsize = 0; |
| 1676 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1677 | |
| 1678 | if (seg->update_map) write_segment_id(w, seg, segp, mbmi->segment_id); |
| 1679 | |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1680 | #if CONFIG_DELTA_Q |
| 1681 | skip = write_skip(cm, xd, mbmi->segment_id, mi, w); |
| 1682 | if (cm->delta_q_present_flag) { |
| 1683 | int mi_row = (-xd->mb_to_top_edge) >> 6; |
| 1684 | int mi_col = (-xd->mb_to_left_edge) >> 6; |
| 1685 | int super_block_upper_left = ((mi_row & 7) == 0) && ((mi_col & 7) == 0); |
| 1686 | if ((bsize != BLOCK_64X64 || skip == 0) && super_block_upper_left) { |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 1687 | int reduced_delta_qindex = |
| 1688 | (mbmi->current_q_index - xd->prev_qindex) / cm->delta_q_res; |
| 1689 | write_delta_qindex(cm, reduced_delta_qindex, w); |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1690 | xd->prev_qindex = mbmi->current_q_index; |
| 1691 | } |
| 1692 | } |
| 1693 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1694 | write_skip(cm, xd, mbmi->segment_id, mi, w); |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 1695 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1696 | |
| 1697 | if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT && |
| 1698 | !xd->lossless[mbmi->segment_id]) |
| 1699 | write_selected_tx_size(cm, xd, w); |
| 1700 | |
Jingning Han | 5226184 | 2016-12-14 12:17:49 -0800 | [diff] [blame] | 1701 | if (bsize >= BLOCK_8X8 || unify_bsize) { |
Nathan E. Egge | 10ba2be | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 1702 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 3ef926e | 2016-09-07 18:20:41 -0400 | [diff] [blame] | 1703 | aom_write_symbol(w, av1_intra_mode_ind[mbmi->mode], |
| 1704 | get_y_mode_cdf(cm, mi, above_mi, left_mi, 0), INTRA_MODES); |
| 1705 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1706 | write_intra_mode(w, mbmi->mode, |
| 1707 | get_y_mode_probs(cm, mi, above_mi, left_mi, 0)); |
Nathan E. Egge | 3ef926e | 2016-09-07 18:20:41 -0400 | [diff] [blame] | 1708 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1709 | } else { |
| 1710 | const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize]; |
| 1711 | const int num_4x4_h = num_4x4_blocks_high_lookup[bsize]; |
| 1712 | int idx, idy; |
| 1713 | |
| 1714 | for (idy = 0; idy < 2; idy += num_4x4_h) { |
| 1715 | for (idx = 0; idx < 2; idx += num_4x4_w) { |
| 1716 | const int block = idy * 2 + idx; |
Nathan E. Egge | 10ba2be | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 1717 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 3ef926e | 2016-09-07 18:20:41 -0400 | [diff] [blame] | 1718 | aom_write_symbol(w, av1_intra_mode_ind[mi->bmi[block].as_mode], |
| 1719 | get_y_mode_cdf(cm, mi, above_mi, left_mi, block), |
| 1720 | INTRA_MODES); |
| 1721 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1722 | write_intra_mode(w, mi->bmi[block].as_mode, |
| 1723 | get_y_mode_probs(cm, mi, above_mi, left_mi, block)); |
Nathan E. Egge | 3ef926e | 2016-09-07 18:20:41 -0400 | [diff] [blame] | 1724 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1725 | } |
| 1726 | } |
| 1727 | } |
Nathan E. Egge | dd28aed | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 1728 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 380cb1a | 2016-09-08 10:13:42 -0400 | [diff] [blame] | 1729 | aom_write_symbol(w, av1_intra_mode_ind[mbmi->uv_mode], |
| 1730 | cm->fc->uv_mode_cdf[mbmi->mode], INTRA_MODES); |
| 1731 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1732 | write_intra_mode(w, mbmi->uv_mode, cm->fc->uv_mode_prob[mbmi->mode]); |
Nathan E. Egge | 380cb1a | 2016-09-08 10:13:42 -0400 | [diff] [blame] | 1733 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1734 | #if CONFIG_EXT_INTRA |
| 1735 | write_intra_angle_info(cm, xd, w); |
| 1736 | #endif // CONFIG_EXT_INTRA |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 1737 | #if CONFIG_PALETTE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1738 | if (bsize >= BLOCK_8X8 && cm->allow_screen_content_tools) |
| 1739 | write_palette_mode_info(cm, xd, mi, w); |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 1740 | #endif // CONFIG_PALETTE |
hui su | 5db9743 | 2016-10-14 16:10:14 -0700 | [diff] [blame] | 1741 | #if CONFIG_FILTER_INTRA |
| 1742 | if (bsize >= BLOCK_8X8) write_filter_intra_mode_info(cm, mbmi, w); |
| 1743 | #endif // CONFIG_FILTER_INTRA |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1744 | |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1745 | write_tx_type(cm, mbmi, |
| 1746 | #if CONFIG_SUPERTX |
| 1747 | 0, |
Nathan E. Egge | 72762a2 | 2016-09-07 17:12:07 -0400 | [diff] [blame] | 1748 | #endif |
Jingning Han | 2a4da94 | 2016-11-03 18:31:30 -0700 | [diff] [blame] | 1749 | w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1750 | } |
| 1751 | |
| 1752 | #if CONFIG_SUPERTX |
| 1753 | #define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \ |
| 1754 | mi_row, mi_col) \ |
| 1755 | write_modes_b(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col) |
| 1756 | #else |
| 1757 | #define write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \ |
| 1758 | mi_row, mi_col) \ |
| 1759 | write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col) |
Alex Converse | ec6fb64 | 2016-10-19 11:31:48 -0700 | [diff] [blame] | 1760 | #endif // CONFIG_SUPERTX |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1761 | |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 1762 | #if CONFIG_RD_DEBUG |
| 1763 | static void dump_mode_info(MODE_INFO *mi) { |
| 1764 | printf("\nmi->mbmi.mi_row == %d\n", mi->mbmi.mi_row); |
| 1765 | printf("&& mi->mbmi.mi_col == %d\n", mi->mbmi.mi_col); |
| 1766 | printf("&& mi->mbmi.sb_type == %d\n", mi->mbmi.sb_type); |
| 1767 | printf("&& mi->mbmi.tx_size == %d\n", mi->mbmi.tx_size); |
| 1768 | if (mi->mbmi.sb_type >= BLOCK_8X8) { |
| 1769 | printf("&& mi->mbmi.mode == %d\n", mi->mbmi.mode); |
| 1770 | } else { |
| 1771 | printf("&& mi->bmi[0].as_mode == %d\n", mi->bmi[0].as_mode); |
| 1772 | } |
| 1773 | } |
Angie Chiang | d02001d | 2016-11-06 15:31:49 -0800 | [diff] [blame] | 1774 | static int rd_token_stats_mismatch(RD_STATS *rd_stats, TOKEN_STATS *token_stats, |
| 1775 | int plane) { |
| 1776 | if (rd_stats->txb_coeff_cost[plane] != token_stats->cost) { |
Angie Chiang | 3963d63 | 2016-11-10 18:41:40 -0800 | [diff] [blame] | 1777 | #if CONFIG_VAR_TX |
Angie Chiang | d02001d | 2016-11-06 15:31:49 -0800 | [diff] [blame] | 1778 | int r, c; |
Angie Chiang | 3963d63 | 2016-11-10 18:41:40 -0800 | [diff] [blame] | 1779 | #endif |
Angie Chiang | d02001d | 2016-11-06 15:31:49 -0800 | [diff] [blame] | 1780 | printf("\nplane %d rd_stats->txb_coeff_cost %d token_stats->cost %d\n", |
| 1781 | plane, rd_stats->txb_coeff_cost[plane], token_stats->cost); |
| 1782 | #if CONFIG_VAR_TX |
| 1783 | printf("rd txb_coeff_cost_map\n"); |
| 1784 | for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) { |
| 1785 | for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) { |
| 1786 | printf("%d ", rd_stats->txb_coeff_cost_map[plane][r][c]); |
| 1787 | } |
| 1788 | printf("\n"); |
| 1789 | } |
| 1790 | |
| 1791 | printf("pack txb_coeff_cost_map\n"); |
| 1792 | for (r = 0; r < TXB_COEFF_COST_MAP_SIZE; ++r) { |
| 1793 | for (c = 0; c < TXB_COEFF_COST_MAP_SIZE; ++c) { |
| 1794 | printf("%d ", token_stats->txb_coeff_cost_map[r][c]); |
| 1795 | } |
| 1796 | printf("\n"); |
| 1797 | } |
| 1798 | #endif |
| 1799 | return 1; |
| 1800 | } |
| 1801 | return 0; |
| 1802 | } |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 1803 | #endif |
| 1804 | |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 1805 | #if CONFIG_PVQ |
| 1806 | PVQ_INFO *get_pvq_block(PVQ_QUEUE *pvq_q) { |
| 1807 | PVQ_INFO *pvq; |
| 1808 | |
| 1809 | assert(pvq_q->curr_pos <= pvq_q->last_pos); |
| 1810 | assert(pvq_q->curr_pos < pvq_q->buf_len); |
| 1811 | |
| 1812 | pvq = pvq_q->buf + pvq_q->curr_pos; |
| 1813 | ++pvq_q->curr_pos; |
| 1814 | |
| 1815 | return pvq; |
| 1816 | } |
| 1817 | #endif |
| 1818 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1819 | static void write_modes_b(AV1_COMP *cpi, const TileInfo *const tile, |
| 1820 | aom_writer *w, const TOKENEXTRA **tok, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1821 | const TOKENEXTRA *const tok_end, |
| 1822 | #if CONFIG_SUPERTX |
| 1823 | int supertx_enabled, |
| 1824 | #endif |
| 1825 | int mi_row, int mi_col) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1826 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1827 | MACROBLOCKD *const xd = &cpi->td.mb.e_mbd; |
| 1828 | MODE_INFO *m; |
| 1829 | int plane; |
| 1830 | int bh, bw; |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 1831 | #if CONFIG_PVQ |
| 1832 | MB_MODE_INFO *mbmi; |
| 1833 | BLOCK_SIZE bsize; |
| 1834 | od_adapt_ctx *adapt; |
| 1835 | (void)tok; |
| 1836 | (void)tok_end; |
| 1837 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1838 | xd->mi = cm->mi_grid_visible + (mi_row * cm->mi_stride + mi_col); |
| 1839 | m = xd->mi[0]; |
| 1840 | |
| 1841 | assert(m->mbmi.sb_type <= cm->sb_size); |
| 1842 | |
Jingning Han | c709e1f | 2016-12-06 14:48:09 -0800 | [diff] [blame] | 1843 | bh = mi_size_high[m->mbmi.sb_type]; |
| 1844 | bw = mi_size_wide[m->mbmi.sb_type]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1845 | |
| 1846 | cpi->td.mb.mbmi_ext = cpi->mbmi_ext_base + (mi_row * cm->mi_cols + mi_col); |
| 1847 | |
| 1848 | set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols); |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 1849 | #if CONFIG_PVQ |
| 1850 | mbmi = &m->mbmi; |
| 1851 | bsize = mbmi->sb_type; |
| 1852 | adapt = &cpi->td.mb.daala_enc.state.adapt; |
| 1853 | #endif |
| 1854 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1855 | if (frame_is_intra_only(cm)) { |
| 1856 | write_mb_modes_kf(cm, xd, xd->mi, w); |
| 1857 | } else { |
| 1858 | #if CONFIG_VAR_TX |
| 1859 | xd->above_txfm_context = cm->above_txfm_context + mi_col; |
| 1860 | xd->left_txfm_context = |
| 1861 | xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK); |
| 1862 | #endif |
| 1863 | #if CONFIG_EXT_INTERP |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 1864 | // av1_is_interp_needed needs the ref frame buffers set up to look |
| 1865 | // up if they are scaled. av1_is_interp_needed is in turn needed by |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1866 | // write_switchable_interp_filter, which is called by pack_inter_mode_mvs. |
| 1867 | set_ref_ptrs(cm, xd, m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]); |
| 1868 | #endif // CONFIG_EXT_INTERP |
| 1869 | #if 0 |
| 1870 | // NOTE(zoeliu): For debug |
| 1871 | if (cm->current_video_frame == FRAME_TO_CHECK && cm->show_frame == 1) { |
| 1872 | const PREDICTION_MODE mode = m->mbmi.mode; |
| 1873 | const int segment_id = m->mbmi.segment_id; |
| 1874 | const BLOCK_SIZE bsize = m->mbmi.sb_type; |
| 1875 | |
| 1876 | // For sub8x8, simply dump out the first sub8x8 block info |
| 1877 | const PREDICTION_MODE b_mode = |
| 1878 | (bsize < BLOCK_8X8) ? m->bmi[0].as_mode : -1; |
| 1879 | const int mv_x = (bsize < BLOCK_8X8) ? |
| 1880 | m->bmi[0].as_mv[0].as_mv.row : m->mbmi.mv[0].as_mv.row; |
| 1881 | const int mv_y = (bsize < BLOCK_8X8) ? |
| 1882 | m->bmi[0].as_mv[0].as_mv.col : m->mbmi.mv[0].as_mv.col; |
| 1883 | |
| 1884 | printf("Before pack_inter_mode_mvs(): " |
| 1885 | "Frame=%d, (mi_row,mi_col)=(%d,%d), " |
| 1886 | "mode=%d, segment_id=%d, bsize=%d, b_mode=%d, " |
| 1887 | "mv[0]=(%d, %d), ref[0]=%d, ref[1]=%d\n", |
| 1888 | cm->current_video_frame, mi_row, mi_col, |
| 1889 | mode, segment_id, bsize, b_mode, mv_x, mv_y, |
| 1890 | m->mbmi.ref_frame[0], m->mbmi.ref_frame[1]); |
| 1891 | } |
| 1892 | #endif // 0 |
| 1893 | pack_inter_mode_mvs(cpi, m, |
| 1894 | #if CONFIG_SUPERTX |
| 1895 | supertx_enabled, |
| 1896 | #endif |
| 1897 | w); |
| 1898 | } |
| 1899 | |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 1900 | #if CONFIG_PALETTE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1901 | for (plane = 0; plane <= 1; ++plane) { |
| 1902 | if (m->mbmi.palette_mode_info.palette_size[plane] > 0) { |
Jingning Han | ae5cfde | 2016-11-30 12:01:44 -0800 | [diff] [blame] | 1903 | const int rows = |
| 1904 | block_size_high[m->mbmi.sb_type] >> (xd->plane[plane].subsampling_y); |
| 1905 | const int cols = |
| 1906 | block_size_wide[m->mbmi.sb_type] >> (xd->plane[plane].subsampling_x); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1907 | assert(*tok < tok_end); |
| 1908 | pack_palette_tokens(w, tok, m->mbmi.palette_mode_info.palette_size[plane], |
| 1909 | rows * cols - 1); |
| 1910 | assert(*tok < tok_end + m->mbmi.skip); |
| 1911 | } |
| 1912 | } |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 1913 | #endif // CONFIG_PALETTE |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 1914 | #if !CONFIG_PVQ |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1915 | #if CONFIG_SUPERTX |
| 1916 | if (supertx_enabled) return; |
| 1917 | #endif // CONFIG_SUPERTX |
| 1918 | |
iole moccagatta | f25a4cf | 2016-11-11 23:57:57 -0800 | [diff] [blame] | 1919 | #if CONFIG_COEF_INTERLEAVE |
| 1920 | if (!m->mbmi.skip) { |
| 1921 | const struct macroblockd_plane *const pd_y = &xd->plane[0]; |
| 1922 | const struct macroblockd_plane *const pd_c = &xd->plane[1]; |
| 1923 | const TX_SIZE tx_log2_y = m->mbmi.tx_size; |
| 1924 | const TX_SIZE tx_log2_c = get_uv_tx_size(&m->mbmi, pd_c); |
| 1925 | const int tx_sz_y = (1 << tx_log2_y); |
| 1926 | const int tx_sz_c = (1 << tx_log2_c); |
| 1927 | |
| 1928 | const BLOCK_SIZE plane_bsize_y = |
| 1929 | get_plane_block_size(AOMMAX(m->mbmi.sb_type, 3), pd_y); |
| 1930 | const BLOCK_SIZE plane_bsize_c = |
| 1931 | get_plane_block_size(AOMMAX(m->mbmi.sb_type, 3), pd_c); |
| 1932 | |
| 1933 | const int num_4x4_w_y = num_4x4_blocks_wide_lookup[plane_bsize_y]; |
| 1934 | const int num_4x4_w_c = num_4x4_blocks_wide_lookup[plane_bsize_c]; |
| 1935 | const int num_4x4_h_y = num_4x4_blocks_high_lookup[plane_bsize_y]; |
| 1936 | const int num_4x4_h_c = num_4x4_blocks_high_lookup[plane_bsize_c]; |
| 1937 | |
| 1938 | const int max_4x4_w_y = get_max_4x4_size(num_4x4_w_y, xd->mb_to_right_edge, |
| 1939 | pd_y->subsampling_x); |
| 1940 | const int max_4x4_h_y = get_max_4x4_size(num_4x4_h_y, xd->mb_to_bottom_edge, |
| 1941 | pd_y->subsampling_y); |
| 1942 | const int max_4x4_w_c = get_max_4x4_size(num_4x4_w_c, xd->mb_to_right_edge, |
| 1943 | pd_c->subsampling_x); |
| 1944 | const int max_4x4_h_c = get_max_4x4_size(num_4x4_h_c, xd->mb_to_bottom_edge, |
| 1945 | pd_c->subsampling_y); |
| 1946 | |
| 1947 | // The max_4x4_w/h may be smaller than tx_sz under some corner cases, |
| 1948 | // i.e. when the SB is splitted by tile boundaries. |
| 1949 | const int tu_num_w_y = (max_4x4_w_y + tx_sz_y - 1) / tx_sz_y; |
| 1950 | const int tu_num_h_y = (max_4x4_h_y + tx_sz_y - 1) / tx_sz_y; |
| 1951 | const int tu_num_w_c = (max_4x4_w_c + tx_sz_c - 1) / tx_sz_c; |
| 1952 | const int tu_num_h_c = (max_4x4_h_c + tx_sz_c - 1) / tx_sz_c; |
| 1953 | const int tu_num_y = tu_num_w_y * tu_num_h_y; |
| 1954 | const int tu_num_c = tu_num_w_c * tu_num_h_c; |
| 1955 | |
| 1956 | int tu_idx_y = 0, tu_idx_c = 0; |
| 1957 | TOKEN_STATS token_stats; |
| 1958 | init_token_stats(&token_stats); |
| 1959 | |
| 1960 | assert(*tok < tok_end); |
| 1961 | |
| 1962 | while (tu_idx_y < tu_num_y) { |
| 1963 | pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_y, &token_stats); |
| 1964 | assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN); |
| 1965 | (*tok)++; |
| 1966 | tu_idx_y++; |
| 1967 | |
| 1968 | if (tu_idx_c < tu_num_c) { |
| 1969 | pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats); |
| 1970 | assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN); |
| 1971 | (*tok)++; |
| 1972 | |
| 1973 | pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats); |
| 1974 | assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN); |
| 1975 | (*tok)++; |
| 1976 | |
| 1977 | tu_idx_c++; |
| 1978 | } |
| 1979 | } |
| 1980 | |
| 1981 | // In 422 case, it's possilbe that Chroma has more TUs than Luma |
| 1982 | while (tu_idx_c < tu_num_c) { |
| 1983 | pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats); |
| 1984 | assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN); |
| 1985 | (*tok)++; |
| 1986 | |
| 1987 | pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx_log2_c, &token_stats); |
| 1988 | assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN); |
| 1989 | (*tok)++; |
| 1990 | |
| 1991 | tu_idx_c++; |
| 1992 | } |
| 1993 | } |
| 1994 | #else // CONFIG_COEF_INTERLEAVE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1995 | if (!m->mbmi.skip) { |
| 1996 | assert(*tok < tok_end); |
| 1997 | for (plane = 0; plane < MAX_MB_PLANE; ++plane) { |
Angie Chiang | 3963d63 | 2016-11-10 18:41:40 -0800 | [diff] [blame] | 1998 | MB_MODE_INFO *mbmi = &m->mbmi; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 1999 | #if CONFIG_VAR_TX |
| 2000 | const struct macroblockd_plane *const pd = &xd->plane[plane]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2001 | BLOCK_SIZE bsize = mbmi->sb_type; |
| 2002 | const BLOCK_SIZE plane_bsize = |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2003 | get_plane_block_size(AOMMAX(bsize, BLOCK_8X8), pd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2004 | |
Jingning Han | 42a0fb3 | 2016-10-31 10:43:31 -0700 | [diff] [blame] | 2005 | const int num_4x4_w = |
| 2006 | block_size_wide[plane_bsize] >> tx_size_wide_log2[0]; |
| 2007 | const int num_4x4_h = |
| 2008 | block_size_high[plane_bsize] >> tx_size_wide_log2[0]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2009 | int row, col; |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 2010 | TOKEN_STATS token_stats; |
Angie Chiang | d02001d | 2016-11-06 15:31:49 -0800 | [diff] [blame] | 2011 | init_token_stats(&token_stats); |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 2012 | |
Jingning Han | fe45b21 | 2016-11-22 10:30:23 -0800 | [diff] [blame] | 2013 | if (is_inter_block(mbmi)) { |
Jingning Han | 70e5f3f | 2016-11-09 17:03:07 -0800 | [diff] [blame] | 2014 | const TX_SIZE max_tx_size = max_txsize_rect_lookup[plane_bsize]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2015 | int block = 0; |
Jingning Han | 42a0fb3 | 2016-10-31 10:43:31 -0700 | [diff] [blame] | 2016 | const int step = |
| 2017 | tx_size_wide_unit[max_tx_size] * tx_size_high_unit[max_tx_size]; |
| 2018 | const int bkw = tx_size_wide_unit[max_tx_size]; |
| 2019 | const int bkh = tx_size_high_unit[max_tx_size]; |
| 2020 | for (row = 0; row < num_4x4_h; row += bkh) { |
| 2021 | for (col = 0; col < num_4x4_w; col += bkw) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2022 | pack_txb_tokens(w, tok, tok_end, xd, mbmi, plane, plane_bsize, |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 2023 | cm->bit_depth, block, row, col, max_tx_size, |
| 2024 | &token_stats); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2025 | block += step; |
| 2026 | } |
| 2027 | } |
Angie Chiang | d02001d | 2016-11-06 15:31:49 -0800 | [diff] [blame] | 2028 | #if CONFIG_RD_DEBUG |
Angie Chiang | 3963d63 | 2016-11-10 18:41:40 -0800 | [diff] [blame] | 2029 | if (mbmi->sb_type >= BLOCK_8X8 && |
| 2030 | rd_token_stats_mismatch(&m->mbmi.rd_stats, &token_stats, plane)) { |
Angie Chiang | d02001d | 2016-11-06 15:31:49 -0800 | [diff] [blame] | 2031 | dump_mode_info(m); |
| 2032 | assert(0); |
| 2033 | } |
Jingning Han | fe45b21 | 2016-11-22 10:30:23 -0800 | [diff] [blame] | 2034 | #endif // CONFIG_RD_DEBUG |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2035 | } else { |
| 2036 | TX_SIZE tx = plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane]) |
| 2037 | : m->mbmi.tx_size; |
Jingning Han | 42a0fb3 | 2016-10-31 10:43:31 -0700 | [diff] [blame] | 2038 | const int bkw = tx_size_wide_unit[tx]; |
| 2039 | const int bkh = tx_size_high_unit[tx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2040 | |
Jingning Han | 42a0fb3 | 2016-10-31 10:43:31 -0700 | [diff] [blame] | 2041 | for (row = 0; row < num_4x4_h; row += bkh) |
| 2042 | for (col = 0; col < num_4x4_w; col += bkw) |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 2043 | pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2044 | } |
| 2045 | #else |
| 2046 | TX_SIZE tx = |
| 2047 | plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane]) : m->mbmi.tx_size; |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 2048 | TOKEN_STATS token_stats; |
Angie Chiang | 3963d63 | 2016-11-10 18:41:40 -0800 | [diff] [blame] | 2049 | init_token_stats(&token_stats); |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 2050 | pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats); |
Angie Chiang | 3963d63 | 2016-11-10 18:41:40 -0800 | [diff] [blame] | 2051 | #if CONFIG_RD_DEBUG |
| 2052 | if (is_inter_block(mbmi) && mbmi->sb_type >= BLOCK_8X8 && |
| 2053 | rd_token_stats_mismatch(&m->mbmi.rd_stats, &token_stats, plane)) { |
| 2054 | dump_mode_info(m); |
| 2055 | assert(0); |
| 2056 | } |
| 2057 | #else |
| 2058 | (void)mbmi; |
Jingning Han | fe45b21 | 2016-11-22 10:30:23 -0800 | [diff] [blame] | 2059 | #endif // CONFIG_RD_DEBUG |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2060 | #endif // CONFIG_VAR_TX |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 2061 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2062 | assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN); |
| 2063 | (*tok)++; |
| 2064 | } |
| 2065 | } |
iole moccagatta | f25a4cf | 2016-11-11 23:57:57 -0800 | [diff] [blame] | 2066 | #endif // CONFIG_COEF_INTERLEAVE |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 2067 | #else |
| 2068 | // PVQ writes its tokens (i.e. symbols) here. |
| 2069 | if (!m->mbmi.skip) { |
| 2070 | for (plane = 0; plane < MAX_MB_PLANE; ++plane) { |
| 2071 | PVQ_INFO *pvq; |
| 2072 | TX_SIZE tx_size = |
| 2073 | plane ? get_uv_tx_size(&m->mbmi, &xd->plane[plane]) : m->mbmi.tx_size; |
| 2074 | int idx, idy; |
| 2075 | const struct macroblockd_plane *const pd = &xd->plane[plane]; |
| 2076 | int num_4x4_w; |
| 2077 | int num_4x4_h; |
| 2078 | int max_blocks_wide; |
| 2079 | int max_blocks_high; |
| 2080 | int step = (1 << tx_size); |
| 2081 | const int step_xy = 1 << (tx_size << 1); |
| 2082 | int block = 0; |
| 2083 | |
| 2084 | if (tx_size == TX_4X4 && bsize <= BLOCK_8X8) { |
| 2085 | num_4x4_w = 2 >> xd->plane[plane].subsampling_x; |
| 2086 | num_4x4_h = 2 >> xd->plane[plane].subsampling_y; |
| 2087 | } else { |
| 2088 | num_4x4_w = |
| 2089 | num_4x4_blocks_wide_lookup[bsize] >> xd->plane[plane].subsampling_x; |
| 2090 | num_4x4_h = |
| 2091 | num_4x4_blocks_high_lookup[bsize] >> xd->plane[plane].subsampling_y; |
| 2092 | } |
| 2093 | // TODO: Do we need below for 4x4,4x8,8x4 cases as well? |
| 2094 | max_blocks_wide = |
| 2095 | num_4x4_w + (xd->mb_to_right_edge >= 0 |
| 2096 | ? 0 |
| 2097 | : xd->mb_to_right_edge >> (5 + pd->subsampling_x)); |
| 2098 | max_blocks_high = |
| 2099 | num_4x4_h + (xd->mb_to_bottom_edge >= 0 |
| 2100 | ? 0 |
| 2101 | : xd->mb_to_bottom_edge >> (5 + pd->subsampling_y)); |
| 2102 | |
| 2103 | // TODO(yushin) Try to use av1_foreach_transformed_block_in_plane(). |
| 2104 | // Logic like the mb_to_right_edge/mb_to_bottom_edge stuff should |
| 2105 | // really be centralized in one place. |
| 2106 | |
| 2107 | for (idy = 0; idy < max_blocks_high; idy += step) { |
| 2108 | for (idx = 0; idx < max_blocks_wide; idx += step) { |
| 2109 | const int is_keyframe = 0; |
| 2110 | const int encode_flip = 0; |
| 2111 | const int flip = 0; |
| 2112 | const int robust = 1; |
| 2113 | int i; |
| 2114 | const int has_dc_skip = 1; |
| 2115 | int *exg = &adapt->pvq.pvq_exg[plane][tx_size][0]; |
| 2116 | int *ext = adapt->pvq.pvq_ext + tx_size * PVQ_MAX_PARTITIONS; |
| 2117 | generic_encoder *model = adapt->pvq.pvq_param_model; |
| 2118 | |
| 2119 | pvq = get_pvq_block(cpi->td.mb.pvq_q); |
| 2120 | |
| 2121 | // encode block skip info |
| 2122 | od_encode_cdf_adapt(&w->ec, pvq->ac_dc_coded, |
| 2123 | adapt->skip_cdf[2 * tx_size + (plane != 0)], 4, |
| 2124 | adapt->skip_increment); |
| 2125 | |
| 2126 | // AC coeffs coded? |
| 2127 | if (pvq->ac_dc_coded & 0x02) { |
| 2128 | assert(pvq->bs <= tx_size); |
| 2129 | for (i = 0; i < pvq->nb_bands; i++) { |
| 2130 | if (i == 0 || (!pvq->skip_rest && |
| 2131 | !(pvq->skip_dir & (1 << ((i - 1) % 3))))) { |
| 2132 | pvq_encode_partition( |
Nathan E. Egge | 6b0b4a9 | 2016-12-22 09:21:06 -0500 | [diff] [blame] | 2133 | w, pvq->qg[i], pvq->theta[i], pvq->max_theta[i], |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 2134 | pvq->y + pvq->off[i], pvq->size[i], pvq->k[i], model, adapt, |
| 2135 | exg + i, ext + i, robust || is_keyframe, |
Yushin Cho | 48f84db | 2016-11-07 21:20:17 -0800 | [diff] [blame] | 2136 | (plane != 0) * OD_TXSIZES * PVQ_MAX_PARTITIONS + |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 2137 | pvq->bs * PVQ_MAX_PARTITIONS + i, |
| 2138 | is_keyframe, i == 0 && (i < pvq->nb_bands - 1), |
| 2139 | pvq->skip_rest, encode_flip, flip); |
| 2140 | } |
| 2141 | if (i == 0 && !pvq->skip_rest && pvq->bs > 0) { |
| 2142 | od_encode_cdf_adapt( |
| 2143 | &w->ec, pvq->skip_dir, |
| 2144 | &adapt->pvq |
| 2145 | .pvq_skip_dir_cdf[(plane != 0) + 2 * (pvq->bs - 1)][0], |
| 2146 | 7, adapt->pvq.pvq_skip_dir_increment); |
| 2147 | } |
| 2148 | } |
| 2149 | } |
| 2150 | // Encode residue of DC coeff, if exist. |
| 2151 | if (!has_dc_skip || (pvq->ac_dc_coded & 1)) { // DC coded? |
Nathan E. Egge | 760c27f | 2016-12-22 12:30:00 -0500 | [diff] [blame] | 2152 | generic_encode(w, &adapt->model_dc[plane], |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 2153 | abs(pvq->dq_dc_residue) - has_dc_skip, -1, |
| 2154 | &adapt->ex_dc[plane][pvq->bs][0], 2); |
| 2155 | } |
| 2156 | if ((pvq->ac_dc_coded & 1)) { // DC coded? |
Nathan E. Egge | e335fb7 | 2016-12-29 20:19:08 -0500 | [diff] [blame] | 2157 | aom_write_bit(w, pvq->dq_dc_residue < 0); |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 2158 | } |
| 2159 | block += step_xy; |
| 2160 | } |
| 2161 | } // for (idy = 0; |
| 2162 | } // for (plane = |
| 2163 | } // if (!m->mbmi.skip) |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 2164 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2165 | } |
| 2166 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2167 | static void write_partition(const AV1_COMMON *const cm, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2168 | const MACROBLOCKD *const xd, int hbs, int mi_row, |
| 2169 | int mi_col, PARTITION_TYPE p, BLOCK_SIZE bsize, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2170 | aom_writer *w) { |
Jingning Han | bf9c6b7 | 2016-12-14 14:50:45 -0800 | [diff] [blame] | 2171 | const int is_partition_point = bsize >= BLOCK_8X8; |
| 2172 | const int ctx = is_partition_point |
| 2173 | ? partition_plane_context(xd, mi_row, mi_col, bsize) |
| 2174 | : 0; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2175 | const aom_prob *const probs = cm->fc->partition_prob[ctx]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2176 | const int has_rows = (mi_row + hbs) < cm->mi_rows; |
| 2177 | const int has_cols = (mi_col + hbs) < cm->mi_cols; |
| 2178 | |
Jingning Han | bf9c6b7 | 2016-12-14 14:50:45 -0800 | [diff] [blame] | 2179 | if (!is_partition_point) return; |
| 2180 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2181 | if (has_rows && has_cols) { |
| 2182 | #if CONFIG_EXT_PARTITION_TYPES |
| 2183 | if (bsize <= BLOCK_8X8) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2184 | av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2185 | else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2186 | av1_write_token(w, av1_ext_partition_tree, probs, |
| 2187 | &ext_partition_encodings[p]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2188 | #else |
Nathan E. Egge | 9d9eb6c | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 2189 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 56eeaa5 | 2016-07-25 10:23:33 -0400 | [diff] [blame] | 2190 | aom_write_symbol(w, p, cm->fc->partition_cdf[ctx], PARTITION_TYPES); |
Nathan E. Egge | fba2be6 | 2016-05-03 09:48:54 -0400 | [diff] [blame] | 2191 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2192 | av1_write_token(w, av1_partition_tree, probs, &partition_encodings[p]); |
Nathan E. Egge | fba2be6 | 2016-05-03 09:48:54 -0400 | [diff] [blame] | 2193 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2194 | #endif // CONFIG_EXT_PARTITION_TYPES |
| 2195 | } else if (!has_rows && has_cols) { |
| 2196 | assert(p == PARTITION_SPLIT || p == PARTITION_HORZ); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2197 | aom_write(w, p == PARTITION_SPLIT, probs[1]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2198 | } else if (has_rows && !has_cols) { |
| 2199 | assert(p == PARTITION_SPLIT || p == PARTITION_VERT); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2200 | aom_write(w, p == PARTITION_SPLIT, probs[2]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2201 | } else { |
| 2202 | assert(p == PARTITION_SPLIT); |
| 2203 | } |
| 2204 | } |
| 2205 | |
| 2206 | #if CONFIG_SUPERTX |
| 2207 | #define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \ |
| 2208 | mi_row, mi_col, bsize) \ |
| 2209 | write_modes_sb(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, mi_col, \ |
| 2210 | bsize) |
| 2211 | #else |
| 2212 | #define write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, \ |
| 2213 | mi_row, mi_col, bsize) \ |
| 2214 | write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, bsize) |
Alex Converse | ec6fb64 | 2016-10-19 11:31:48 -0700 | [diff] [blame] | 2215 | #endif // CONFIG_SUPERTX |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2216 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2217 | static void write_modes_sb(AV1_COMP *const cpi, const TileInfo *const tile, |
| 2218 | aom_writer *const w, const TOKENEXTRA **tok, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2219 | const TOKENEXTRA *const tok_end, |
| 2220 | #if CONFIG_SUPERTX |
| 2221 | int supertx_enabled, |
| 2222 | #endif |
| 2223 | int mi_row, int mi_col, BLOCK_SIZE bsize) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2224 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2225 | MACROBLOCKD *const xd = &cpi->td.mb.e_mbd; |
Jingning Han | c709e1f | 2016-12-06 14:48:09 -0800 | [diff] [blame] | 2226 | const int hbs = mi_size_wide[bsize] / 2; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2227 | const PARTITION_TYPE partition = get_partition(cm, mi_row, mi_col, bsize); |
| 2228 | const BLOCK_SIZE subsize = get_subsize(bsize, partition); |
Jingning Han | 5226184 | 2016-12-14 12:17:49 -0800 | [diff] [blame] | 2229 | #if CONFIG_CB4X4 |
| 2230 | const int unify_bsize = 1; |
| 2231 | #else |
| 2232 | const int unify_bsize = 0; |
| 2233 | #endif |
| 2234 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2235 | #if CONFIG_SUPERTX |
| 2236 | const int mi_offset = mi_row * cm->mi_stride + mi_col; |
| 2237 | MB_MODE_INFO *mbmi; |
| 2238 | const int pack_token = !supertx_enabled; |
| 2239 | TX_SIZE supertx_size; |
| 2240 | int plane; |
| 2241 | #endif |
| 2242 | |
| 2243 | if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) return; |
| 2244 | |
| 2245 | write_partition(cm, xd, hbs, mi_row, mi_col, partition, bsize, w); |
| 2246 | #if CONFIG_SUPERTX |
| 2247 | mbmi = &cm->mi_grid_visible[mi_offset]->mbmi; |
| 2248 | xd->mi = cm->mi_grid_visible + mi_offset; |
Jingning Han | 5b7706a | 2016-12-21 09:55:10 -0800 | [diff] [blame] | 2249 | set_mi_row_col(xd, tile, mi_row, mi_size_high[bsize], mi_col, |
| 2250 | mi_size_wide[bsize], cm->mi_rows, cm->mi_cols); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2251 | if (!supertx_enabled && !frame_is_intra_only(cm) && |
| 2252 | partition != PARTITION_NONE && bsize <= MAX_SUPERTX_BLOCK_SIZE && |
| 2253 | !xd->lossless[0]) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2254 | aom_prob prob; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2255 | supertx_size = max_txsize_lookup[bsize]; |
| 2256 | prob = cm->fc->supertx_prob[partition_supertx_context_lookup[partition]] |
| 2257 | [supertx_size]; |
| 2258 | supertx_enabled = (xd->mi[0]->mbmi.tx_size == supertx_size); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2259 | aom_write(w, supertx_enabled, prob); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2260 | } |
| 2261 | #endif // CONFIG_SUPERTX |
Jingning Han | 5226184 | 2016-12-14 12:17:49 -0800 | [diff] [blame] | 2262 | if (subsize < BLOCK_8X8 && !unify_bsize) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2263 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, mi_row, |
| 2264 | mi_col); |
| 2265 | } else { |
| 2266 | switch (partition) { |
| 2267 | case PARTITION_NONE: |
| 2268 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2269 | mi_row, mi_col); |
| 2270 | break; |
| 2271 | case PARTITION_HORZ: |
| 2272 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2273 | mi_row, mi_col); |
| 2274 | if (mi_row + hbs < cm->mi_rows) |
| 2275 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2276 | mi_row + hbs, mi_col); |
| 2277 | break; |
| 2278 | case PARTITION_VERT: |
| 2279 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2280 | mi_row, mi_col); |
| 2281 | if (mi_col + hbs < cm->mi_cols) |
| 2282 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2283 | mi_row, mi_col + hbs); |
| 2284 | break; |
| 2285 | case PARTITION_SPLIT: |
| 2286 | write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2287 | mi_row, mi_col, subsize); |
| 2288 | write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2289 | mi_row, mi_col + hbs, subsize); |
| 2290 | write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2291 | mi_row + hbs, mi_col, subsize); |
| 2292 | write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2293 | mi_row + hbs, mi_col + hbs, subsize); |
| 2294 | break; |
| 2295 | #if CONFIG_EXT_PARTITION_TYPES |
| 2296 | case PARTITION_HORZ_A: |
| 2297 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2298 | mi_row, mi_col); |
| 2299 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2300 | mi_row, mi_col + hbs); |
| 2301 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2302 | mi_row + hbs, mi_col); |
| 2303 | break; |
| 2304 | case PARTITION_HORZ_B: |
| 2305 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2306 | mi_row, mi_col); |
| 2307 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2308 | mi_row + hbs, mi_col); |
| 2309 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2310 | mi_row + hbs, mi_col + hbs); |
| 2311 | break; |
| 2312 | case PARTITION_VERT_A: |
| 2313 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2314 | mi_row, mi_col); |
| 2315 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2316 | mi_row + hbs, mi_col); |
| 2317 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2318 | mi_row, mi_col + hbs); |
| 2319 | break; |
| 2320 | case PARTITION_VERT_B: |
| 2321 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2322 | mi_row, mi_col); |
| 2323 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2324 | mi_row, mi_col + hbs); |
| 2325 | write_modes_b_wrapper(cpi, tile, w, tok, tok_end, supertx_enabled, |
| 2326 | mi_row + hbs, mi_col + hbs); |
| 2327 | break; |
| 2328 | #endif // CONFIG_EXT_PARTITION_TYPES |
| 2329 | default: assert(0); |
| 2330 | } |
| 2331 | } |
| 2332 | #if CONFIG_SUPERTX |
| 2333 | if (partition != PARTITION_NONE && supertx_enabled && pack_token) { |
| 2334 | int skip; |
Jingning Han | 5b7706a | 2016-12-21 09:55:10 -0800 | [diff] [blame] | 2335 | const int bsw = mi_size_wide[bsize]; |
| 2336 | const int bsh = mi_size_high[bsize]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2337 | xd->mi = cm->mi_grid_visible + mi_offset; |
| 2338 | supertx_size = mbmi->tx_size; |
Jingning Han | e92bf1c | 2016-11-21 10:41:56 -0800 | [diff] [blame] | 2339 | set_mi_row_col(xd, tile, mi_row, bsh, mi_col, bsw, cm->mi_rows, |
| 2340 | cm->mi_cols); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2341 | |
| 2342 | assert(IMPLIES(!cm->seg.enabled, mbmi->segment_id_supertx == 0)); |
| 2343 | assert(mbmi->segment_id_supertx < MAX_SEGMENTS); |
| 2344 | |
| 2345 | skip = write_skip(cm, xd, mbmi->segment_id_supertx, xd->mi[0], w); |
| 2346 | #if CONFIG_EXT_TX |
| 2347 | if (get_ext_tx_types(supertx_size, bsize, 1) > 1 && !skip) { |
| 2348 | int eset = get_ext_tx_set(supertx_size, bsize, 1); |
| 2349 | if (eset > 0) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2350 | av1_write_token(w, av1_ext_tx_inter_tree[eset], |
| 2351 | cm->fc->inter_ext_tx_prob[eset][supertx_size], |
| 2352 | &ext_tx_inter_encodings[eset][mbmi->tx_type]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2353 | } |
| 2354 | } |
| 2355 | #else |
| 2356 | if (supertx_size < TX_32X32 && !skip) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2357 | av1_write_token(w, av1_ext_tx_tree, |
| 2358 | cm->fc->inter_ext_tx_prob[supertx_size], |
| 2359 | &ext_tx_encodings[mbmi->tx_type]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2360 | } |
| 2361 | #endif // CONFIG_EXT_TX |
| 2362 | |
| 2363 | if (!skip) { |
| 2364 | assert(*tok < tok_end); |
| 2365 | for (plane = 0; plane < MAX_MB_PLANE; ++plane) { |
Jingning Han | 5b7706a | 2016-12-21 09:55:10 -0800 | [diff] [blame] | 2366 | const struct macroblockd_plane *const pd = &xd->plane[plane]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2367 | const int mbmi_txb_size = txsize_to_bsize[mbmi->tx_size]; |
Jingning Han | 5b7706a | 2016-12-21 09:55:10 -0800 | [diff] [blame] | 2368 | const BLOCK_SIZE plane_bsize = get_plane_block_size(mbmi_txb_size, pd); |
| 2369 | |
| 2370 | const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane); |
| 2371 | const int max_blocks_high = max_block_high(xd, plane_bsize, plane); |
| 2372 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2373 | int row, col; |
| 2374 | TX_SIZE tx = |
| 2375 | plane ? get_uv_tx_size(mbmi, &xd->plane[plane]) : mbmi->tx_size; |
| 2376 | BLOCK_SIZE txb_size = txsize_to_bsize[tx]; |
Jingning Han | 5b7706a | 2016-12-21 09:55:10 -0800 | [diff] [blame] | 2377 | |
| 2378 | const int stepr = tx_size_high_unit[txb_size]; |
| 2379 | const int stepc = tx_size_wide_unit[txb_size]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2380 | |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 2381 | TOKEN_STATS token_stats; |
| 2382 | token_stats.cost = 0; |
Jingning Han | 5b7706a | 2016-12-21 09:55:10 -0800 | [diff] [blame] | 2383 | for (row = 0; row < max_blocks_high; row += stepr) |
| 2384 | for (col = 0; col < max_blocks_wide; col += stepc) |
Angie Chiang | d402282 | 2016-11-02 18:30:25 -0700 | [diff] [blame] | 2385 | pack_mb_tokens(w, tok, tok_end, cm->bit_depth, tx, &token_stats); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2386 | assert(*tok < tok_end && (*tok)->token == EOSB_TOKEN); |
| 2387 | (*tok)++; |
| 2388 | } |
| 2389 | } |
Jingning Han | e92bf1c | 2016-11-21 10:41:56 -0800 | [diff] [blame] | 2390 | #if CONFIG_VAR_TX |
| 2391 | xd->above_txfm_context = cm->above_txfm_context + mi_col; |
| 2392 | xd->left_txfm_context = |
| 2393 | xd->left_txfm_context_buffer + (mi_row & MAX_MIB_MASK); |
| 2394 | set_txfm_ctxs(xd->mi[0]->mbmi.tx_size, bsw, bsh, skip, xd); |
| 2395 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2396 | } |
| 2397 | #endif // CONFIG_SUPERTX |
| 2398 | |
| 2399 | // update partition context |
| 2400 | #if CONFIG_EXT_PARTITION_TYPES |
| 2401 | update_ext_partition_context(xd, mi_row, mi_col, subsize, bsize, partition); |
| 2402 | #else |
| 2403 | if (bsize >= BLOCK_8X8 && |
| 2404 | (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT)) |
| 2405 | update_partition_context(xd, mi_row, mi_col, subsize, bsize); |
David Barker | f8935c9 | 2016-10-26 14:54:06 +0100 | [diff] [blame] | 2406 | #endif // CONFIG_EXT_PARTITION_TYPES |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2407 | |
Steinar Midtskogen | 5d56f4d | 2016-09-25 09:23:16 +0200 | [diff] [blame] | 2408 | #if CONFIG_DERING |
David Barker | 9739f36 | 2016-11-10 09:29:32 +0000 | [diff] [blame] | 2409 | #if CONFIG_EXT_PARTITION |
| 2410 | if (cm->sb_size == BLOCK_128X128 && bsize == BLOCK_128X128 && |
| 2411 | cm->dering_level != 0 && !sb_all_skip(cm, mi_row, mi_col)) { |
| 2412 | aom_write_literal( |
| 2413 | w, |
| 2414 | cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain, |
| 2415 | DERING_REFINEMENT_BITS); |
| 2416 | } else if (cm->sb_size == BLOCK_64X64 && bsize == BLOCK_64X64 && |
| 2417 | #else |
| 2418 | if (bsize == BLOCK_64X64 && |
| 2419 | #endif // CONFIG_EXT_PARTITION |
| 2420 | cm->dering_level != 0 && !sb_all_skip(cm, mi_row, mi_col)) { |
Steinar Midtskogen | 5d56f4d | 2016-09-25 09:23:16 +0200 | [diff] [blame] | 2421 | aom_write_literal( |
| 2422 | w, |
| 2423 | cm->mi_grid_visible[mi_row * cm->mi_stride + mi_col]->mbmi.dering_gain, |
| 2424 | DERING_REFINEMENT_BITS); |
| 2425 | } |
| 2426 | #endif |
| 2427 | |
Yaowu Xu | d71be78 | 2016-10-14 08:47:03 -0700 | [diff] [blame] | 2428 | #if CONFIG_CLPF |
David Barker | 9739f36 | 2016-11-10 09:29:32 +0000 | [diff] [blame] | 2429 | #if CONFIG_EXT_PARTITION |
| 2430 | if (cm->sb_size == BLOCK_128X128 && bsize == BLOCK_128X128 && |
| 2431 | cm->clpf_blocks && cm->clpf_strength_y && cm->clpf_size != CLPF_NOSIZE) { |
| 2432 | const int tl = mi_row * MI_SIZE / MIN_FB_SIZE * cm->clpf_stride + |
| 2433 | mi_col * MI_SIZE / MIN_FB_SIZE; |
| 2434 | if (cm->clpf_size == CLPF_128X128 && cm->clpf_blocks[tl] != CLPF_NOFLAG) { |
| 2435 | aom_write_literal(w, cm->clpf_blocks[tl], 1); |
| 2436 | } else if (cm->clpf_size == CLPF_64X64) { |
| 2437 | const int tr = tl + 2; |
| 2438 | const int bl = tl + 2 * cm->clpf_stride; |
| 2439 | const int br = tr + 2 * cm->clpf_stride; |
| 2440 | |
| 2441 | // Up to four bits per SB. |
| 2442 | if (cm->clpf_blocks[tl] != CLPF_NOFLAG) |
| 2443 | aom_write_literal(w, cm->clpf_blocks[tl], 1); |
| 2444 | |
| 2445 | if (mi_col + MI_SIZE < cm->mi_cols && cm->clpf_blocks[tr] != CLPF_NOFLAG) |
| 2446 | aom_write_literal(w, cm->clpf_blocks[tr], 1); |
| 2447 | |
| 2448 | if (mi_row + MI_SIZE < cm->mi_rows && cm->clpf_blocks[bl] != CLPF_NOFLAG) |
| 2449 | aom_write_literal(w, cm->clpf_blocks[bl], 1); |
| 2450 | |
| 2451 | if (mi_row + MI_SIZE < cm->mi_rows && mi_col + MI_SIZE < cm->mi_cols && |
| 2452 | cm->clpf_blocks[br] != CLPF_NOFLAG) |
| 2453 | aom_write_literal(w, cm->clpf_blocks[br], 1); |
| 2454 | } else if (cm->clpf_size == CLPF_32X32) { |
| 2455 | int i, j; |
| 2456 | const int size = 32 / MI_SIZE; |
| 2457 | // Up to sixteen bits per SB. |
| 2458 | for (i = 0; i < 4; ++i) |
| 2459 | for (j = 0; j < 4; ++j) { |
| 2460 | const int index = tl + i * cm->clpf_stride + j; |
| 2461 | if (mi_row + i * size < cm->mi_rows && |
| 2462 | mi_col + j * size < cm->mi_cols && |
| 2463 | cm->clpf_blocks[index] != CLPF_NOFLAG) |
| 2464 | aom_write_literal(w, cm->clpf_blocks[index], 1); |
| 2465 | } |
| 2466 | } |
| 2467 | } else if (cm->sb_size == BLOCK_64X64 && bsize == BLOCK_64X64 && |
| 2468 | #else |
| 2469 | if (bsize == BLOCK_64X64 && |
| 2470 | #endif // CONFIG_EXT_PARTITION |
| 2471 | cm->clpf_blocks && cm->clpf_strength_y && |
| 2472 | cm->clpf_size != CLPF_NOSIZE) { |
Yaowu Xu | d71be78 | 2016-10-14 08:47:03 -0700 | [diff] [blame] | 2473 | const int tl = mi_row * MI_SIZE / MIN_FB_SIZE * cm->clpf_stride + |
| 2474 | mi_col * MI_SIZE / MIN_FB_SIZE; |
| 2475 | const int tr = tl + 1; |
| 2476 | const int bl = tl + cm->clpf_stride; |
| 2477 | const int br = tr + cm->clpf_stride; |
| 2478 | |
| 2479 | // Up to four bits per SB. |
| 2480 | // When clpf_size indicates a size larger than the SB size |
| 2481 | // (CLPF_128X128), one bit for every fourth SB will be transmitted |
| 2482 | // regardless of skip blocks. |
| 2483 | if (cm->clpf_blocks[tl] != CLPF_NOFLAG) |
| 2484 | aom_write_literal(w, cm->clpf_blocks[tl], 1); |
| 2485 | |
| 2486 | if (mi_col + MI_SIZE / 2 < cm->mi_cols && |
| 2487 | cm->clpf_blocks[tr] != CLPF_NOFLAG) |
| 2488 | aom_write_literal(w, cm->clpf_blocks[tr], 1); |
| 2489 | |
| 2490 | if (mi_row + MI_SIZE / 2 < cm->mi_rows && |
| 2491 | cm->clpf_blocks[bl] != CLPF_NOFLAG) |
| 2492 | aom_write_literal(w, cm->clpf_blocks[bl], 1); |
| 2493 | |
| 2494 | if (mi_row + MI_SIZE / 2 < cm->mi_rows && |
| 2495 | mi_col + MI_SIZE / 2 < cm->mi_cols && |
| 2496 | cm->clpf_blocks[br] != CLPF_NOFLAG) |
| 2497 | aom_write_literal(w, cm->clpf_blocks[br], 1); |
| 2498 | } |
David Barker | 9739f36 | 2016-11-10 09:29:32 +0000 | [diff] [blame] | 2499 | #endif // CONFIG_CLPF |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2500 | } |
| 2501 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2502 | static void write_modes(AV1_COMP *const cpi, const TileInfo *const tile, |
| 2503 | aom_writer *const w, const TOKENEXTRA **tok, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2504 | const TOKENEXTRA *const tok_end) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2505 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2506 | MACROBLOCKD *const xd = &cpi->td.mb.e_mbd; |
| 2507 | const int mi_row_start = tile->mi_row_start; |
| 2508 | const int mi_row_end = tile->mi_row_end; |
| 2509 | const int mi_col_start = tile->mi_col_start; |
| 2510 | const int mi_col_end = tile->mi_col_end; |
| 2511 | int mi_row, mi_col; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2512 | av1_zero_above_context(cm, mi_col_start, mi_col_end); |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 2513 | #if CONFIG_PVQ |
| 2514 | assert(cpi->td.mb.pvq_q->curr_pos == 0); |
| 2515 | #endif |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 2516 | #if CONFIG_DELTA_Q |
| 2517 | if (cpi->common.delta_q_present_flag) { |
| 2518 | xd->prev_qindex = cpi->common.base_qindex; |
| 2519 | } |
| 2520 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2521 | |
| 2522 | for (mi_row = mi_row_start; mi_row < mi_row_end; mi_row += cm->mib_size) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2523 | av1_zero_left_context(xd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2524 | |
| 2525 | for (mi_col = mi_col_start; mi_col < mi_col_end; mi_col += cm->mib_size) { |
| 2526 | write_modes_sb_wrapper(cpi, tile, w, tok, tok_end, 0, mi_row, mi_col, |
| 2527 | cm->sb_size); |
| 2528 | } |
| 2529 | } |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 2530 | #if CONFIG_PVQ |
| 2531 | // Check that the number of PVQ blocks encoded and written to the bitstream |
| 2532 | // are the same |
| 2533 | assert(cpi->td.mb.pvq_q->curr_pos == cpi->td.mb.pvq_q->last_pos); |
| 2534 | // Reset curr_pos in case we repack the bitstream |
| 2535 | cpi->td.mb.pvq_q->curr_pos = 0; |
| 2536 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2537 | } |
| 2538 | |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 2539 | #if !CONFIG_PVQ |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2540 | static void build_tree_distribution(AV1_COMP *cpi, TX_SIZE tx_size, |
| 2541 | av1_coeff_stats *coef_branch_ct, |
| 2542 | av1_coeff_probs_model *coef_probs) { |
| 2543 | av1_coeff_count *coef_counts = cpi->td.rd_counts.coef_counts[tx_size]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2544 | unsigned int(*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] = |
| 2545 | cpi->common.counts.eob_branch[tx_size]; |
| 2546 | int i, j, k, l, m; |
| 2547 | |
| 2548 | for (i = 0; i < PLANE_TYPES; ++i) { |
| 2549 | for (j = 0; j < REF_TYPES; ++j) { |
| 2550 | for (k = 0; k < COEF_BANDS; ++k) { |
| 2551 | for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2552 | av1_tree_probs_from_distribution(av1_coef_tree, |
| 2553 | coef_branch_ct[i][j][k][l], |
| 2554 | coef_counts[i][j][k][l]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2555 | coef_branch_ct[i][j][k][l][0][1] = |
| 2556 | eob_branch_ct[i][j][k][l] - coef_branch_ct[i][j][k][l][0][0]; |
| 2557 | for (m = 0; m < UNCONSTRAINED_NODES; ++m) |
| 2558 | coef_probs[i][j][k][l][m] = |
| 2559 | get_binary_prob(coef_branch_ct[i][j][k][l][m][0], |
| 2560 | coef_branch_ct[i][j][k][l][m][1]); |
| 2561 | } |
| 2562 | } |
| 2563 | } |
| 2564 | } |
| 2565 | } |
| 2566 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2567 | static void update_coef_probs_common(aom_writer *const bc, AV1_COMP *cpi, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2568 | TX_SIZE tx_size, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2569 | av1_coeff_stats *frame_branch_ct, |
| 2570 | av1_coeff_probs_model *new_coef_probs) { |
| 2571 | av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size]; |
| 2572 | const aom_prob upd = DIFF_UPDATE_PROB; |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 2573 | #if CONFIG_EC_ADAPT |
Thomas Davies | 09ebbfb | 2016-10-20 18:28:47 +0100 | [diff] [blame] | 2574 | const int entropy_nodes_update = UNCONSTRAINED_NODES - 1; |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 2575 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2576 | const int entropy_nodes_update = UNCONSTRAINED_NODES; |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 2577 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2578 | int i, j, k, l, t; |
| 2579 | int stepsize = cpi->sf.coeff_prob_appx_step; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 2580 | #if CONFIG_TILE_GROUPS |
| 2581 | const int probwt = cpi->common.num_tg; |
| 2582 | #else |
| 2583 | const int probwt = 1; |
| 2584 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2585 | |
| 2586 | switch (cpi->sf.use_fast_coef_updates) { |
| 2587 | case TWO_LOOP: { |
| 2588 | /* dry run to see if there is any update at all needed */ |
| 2589 | int savings = 0; |
| 2590 | int update[2] = { 0, 0 }; |
| 2591 | for (i = 0; i < PLANE_TYPES; ++i) { |
| 2592 | for (j = 0; j < REF_TYPES; ++j) { |
| 2593 | for (k = 0; k < COEF_BANDS; ++k) { |
| 2594 | for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) { |
| 2595 | for (t = 0; t < entropy_nodes_update; ++t) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2596 | aom_prob newp = new_coef_probs[i][j][k][l][t]; |
| 2597 | const aom_prob oldp = old_coef_probs[i][j][k][l][t]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2598 | int s; |
| 2599 | int u = 0; |
| 2600 | if (t == PIVOT_NODE) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2601 | s = av1_prob_diff_update_savings_search_model( |
Alex Converse | a9ce4b7 | 2016-05-25 10:28:03 -0700 | [diff] [blame] | 2602 | frame_branch_ct[i][j][k][l][0], oldp, &newp, upd, |
| 2603 | stepsize, probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2604 | else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2605 | s = av1_prob_diff_update_savings_search( |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 2606 | frame_branch_ct[i][j][k][l][t], oldp, &newp, upd, probwt); |
| 2607 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2608 | if (s > 0 && newp != oldp) u = 1; |
| 2609 | if (u) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2610 | savings += s - (int)(av1_cost_zero(upd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2611 | else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2612 | savings -= (int)(av1_cost_zero(upd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2613 | update[u]++; |
| 2614 | } |
| 2615 | } |
| 2616 | } |
| 2617 | } |
| 2618 | } |
| 2619 | |
| 2620 | /* Is coef updated at all */ |
| 2621 | if (update[1] == 0 || savings < 0) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2622 | aom_write_bit(bc, 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2623 | return; |
| 2624 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2625 | aom_write_bit(bc, 1); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2626 | for (i = 0; i < PLANE_TYPES; ++i) { |
| 2627 | for (j = 0; j < REF_TYPES; ++j) { |
| 2628 | for (k = 0; k < COEF_BANDS; ++k) { |
| 2629 | for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) { |
| 2630 | // calc probs and branch cts for this frame only |
| 2631 | for (t = 0; t < entropy_nodes_update; ++t) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2632 | aom_prob newp = new_coef_probs[i][j][k][l][t]; |
| 2633 | aom_prob *oldp = old_coef_probs[i][j][k][l] + t; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2634 | int s; |
| 2635 | int u = 0; |
| 2636 | if (t == PIVOT_NODE) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2637 | s = av1_prob_diff_update_savings_search_model( |
Alex Converse | a9ce4b7 | 2016-05-25 10:28:03 -0700 | [diff] [blame] | 2638 | frame_branch_ct[i][j][k][l][0], *oldp, &newp, upd, |
| 2639 | stepsize, probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2640 | else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2641 | s = av1_prob_diff_update_savings_search( |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 2642 | frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd, |
| 2643 | probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2644 | if (s > 0 && newp != *oldp) u = 1; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2645 | aom_write(bc, u, upd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2646 | if (u) { |
| 2647 | /* send/use new probability */ |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2648 | av1_write_prob_diff_update(bc, newp, *oldp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2649 | *oldp = newp; |
| 2650 | } |
| 2651 | } |
| 2652 | } |
| 2653 | } |
| 2654 | } |
| 2655 | } |
| 2656 | return; |
| 2657 | } |
| 2658 | |
| 2659 | case ONE_LOOP_REDUCED: { |
| 2660 | int updates = 0; |
| 2661 | int noupdates_before_first = 0; |
| 2662 | for (i = 0; i < PLANE_TYPES; ++i) { |
| 2663 | for (j = 0; j < REF_TYPES; ++j) { |
| 2664 | for (k = 0; k < COEF_BANDS; ++k) { |
| 2665 | for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) { |
| 2666 | // calc probs and branch cts for this frame only |
| 2667 | for (t = 0; t < entropy_nodes_update; ++t) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2668 | aom_prob newp = new_coef_probs[i][j][k][l][t]; |
| 2669 | aom_prob *oldp = old_coef_probs[i][j][k][l] + t; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2670 | int s; |
| 2671 | int u = 0; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2672 | if (t == PIVOT_NODE) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2673 | s = av1_prob_diff_update_savings_search_model( |
Alex Converse | a9ce4b7 | 2016-05-25 10:28:03 -0700 | [diff] [blame] | 2674 | frame_branch_ct[i][j][k][l][0], *oldp, &newp, upd, |
| 2675 | stepsize, probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2676 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2677 | s = av1_prob_diff_update_savings_search( |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 2678 | frame_branch_ct[i][j][k][l][t], *oldp, &newp, upd, |
| 2679 | probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2680 | } |
| 2681 | |
| 2682 | if (s > 0 && newp != *oldp) u = 1; |
| 2683 | updates += u; |
| 2684 | if (u == 0 && updates == 0) { |
| 2685 | noupdates_before_first++; |
| 2686 | continue; |
| 2687 | } |
| 2688 | if (u == 1 && updates == 1) { |
| 2689 | int v; |
| 2690 | // first update |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2691 | aom_write_bit(bc, 1); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2692 | for (v = 0; v < noupdates_before_first; ++v) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2693 | aom_write(bc, 0, upd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2694 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2695 | aom_write(bc, u, upd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2696 | if (u) { |
| 2697 | /* send/use new probability */ |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2698 | av1_write_prob_diff_update(bc, newp, *oldp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2699 | *oldp = newp; |
| 2700 | } |
| 2701 | } |
| 2702 | } |
| 2703 | } |
| 2704 | } |
| 2705 | } |
| 2706 | if (updates == 0) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2707 | aom_write_bit(bc, 0); // no updates |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2708 | } |
| 2709 | return; |
| 2710 | } |
| 2711 | default: assert(0); |
| 2712 | } |
| 2713 | } |
| 2714 | |
| 2715 | #if CONFIG_ENTROPY |
| 2716 | // Calculate the token counts between subsequent subframe updates. |
clang-format | 67948d3 | 2016-09-07 22:40:40 -0700 | [diff] [blame] | 2717 | static void get_coef_counts_diff(AV1_COMP *cpi, int index, |
| 2718 | av1_coeff_count coef_counts[TX_SIZES] |
| 2719 | [PLANE_TYPES], |
| 2720 | unsigned int eob_counts[TX_SIZES][PLANE_TYPES] |
| 2721 | [REF_TYPES][COEF_BANDS] |
| 2722 | [COEFF_CONTEXTS]) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2723 | int i, j, k, l, m, tx_size, val; |
| 2724 | const int max_idx = cpi->common.coef_probs_update_idx; |
| 2725 | const TX_MODE tx_mode = cpi->common.tx_mode; |
Urvang Joshi | cb586f3 | 2016-09-20 11:36:33 -0700 | [diff] [blame] | 2726 | const int max_tx_size = tx_mode_to_biggest_tx_size[tx_mode]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2727 | const SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats; |
| 2728 | |
| 2729 | assert(max_idx < COEF_PROBS_BUFS); |
| 2730 | |
| 2731 | for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size) |
| 2732 | for (i = 0; i < PLANE_TYPES; ++i) |
| 2733 | for (j = 0; j < REF_TYPES; ++j) |
| 2734 | for (k = 0; k < COEF_BANDS; ++k) |
| 2735 | for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) { |
| 2736 | if (index == max_idx) { |
| 2737 | val = |
| 2738 | cpi->common.counts.eob_branch[tx_size][i][j][k][l] - |
| 2739 | subframe_stats->eob_counts_buf[max_idx][tx_size][i][j][k][l]; |
| 2740 | } else { |
clang-format | 67948d3 | 2016-09-07 22:40:40 -0700 | [diff] [blame] | 2741 | val = subframe_stats->eob_counts_buf[index + 1][tx_size][i][j][k] |
| 2742 | [l] - |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2743 | subframe_stats->eob_counts_buf[index][tx_size][i][j][k][l]; |
| 2744 | } |
| 2745 | assert(val >= 0); |
| 2746 | eob_counts[tx_size][i][j][k][l] = val; |
| 2747 | |
| 2748 | for (m = 0; m < ENTROPY_TOKENS; ++m) { |
| 2749 | if (index == max_idx) { |
| 2750 | val = cpi->td.rd_counts.coef_counts[tx_size][i][j][k][l][m] - |
clang-format | 67948d3 | 2016-09-07 22:40:40 -0700 | [diff] [blame] | 2751 | subframe_stats->coef_counts_buf[max_idx][tx_size][i][j][k] |
| 2752 | [l][m]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2753 | } else { |
clang-format | 67948d3 | 2016-09-07 22:40:40 -0700 | [diff] [blame] | 2754 | val = subframe_stats->coef_counts_buf[index + 1][tx_size][i][j] |
| 2755 | [k][l][m] - |
| 2756 | subframe_stats->coef_counts_buf[index][tx_size][i][j][k] |
| 2757 | [l][m]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2758 | } |
| 2759 | assert(val >= 0); |
| 2760 | coef_counts[tx_size][i][j][k][l][m] = val; |
| 2761 | } |
| 2762 | } |
| 2763 | } |
| 2764 | |
| 2765 | static void update_coef_probs_subframe( |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2766 | aom_writer *const bc, AV1_COMP *cpi, TX_SIZE tx_size, |
| 2767 | av1_coeff_stats branch_ct[COEF_PROBS_BUFS][TX_SIZES][PLANE_TYPES], |
| 2768 | av1_coeff_probs_model *new_coef_probs) { |
| 2769 | av1_coeff_probs_model *old_coef_probs = cpi->common.fc->coef_probs[tx_size]; |
| 2770 | const aom_prob upd = DIFF_UPDATE_PROB; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2771 | const int entropy_nodes_update = UNCONSTRAINED_NODES; |
| 2772 | int i, j, k, l, t; |
| 2773 | int stepsize = cpi->sf.coeff_prob_appx_step; |
| 2774 | const int max_idx = cpi->common.coef_probs_update_idx; |
| 2775 | int idx; |
| 2776 | unsigned int this_branch_ct[ENTROPY_NODES][COEF_PROBS_BUFS][2]; |
| 2777 | |
| 2778 | switch (cpi->sf.use_fast_coef_updates) { |
| 2779 | case TWO_LOOP: { |
| 2780 | /* dry run to see if there is any update at all needed */ |
| 2781 | int savings = 0; |
| 2782 | int update[2] = { 0, 0 }; |
| 2783 | for (i = 0; i < PLANE_TYPES; ++i) { |
| 2784 | for (j = 0; j < REF_TYPES; ++j) { |
| 2785 | for (k = 0; k < COEF_BANDS; ++k) { |
| 2786 | for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) { |
| 2787 | for (t = 0; t < ENTROPY_NODES; ++t) { |
| 2788 | for (idx = 0; idx <= max_idx; ++idx) { |
| 2789 | memcpy(this_branch_ct[t][idx], |
| 2790 | branch_ct[idx][tx_size][i][j][k][l][t], |
| 2791 | 2 * sizeof(this_branch_ct[t][idx][0])); |
| 2792 | } |
| 2793 | } |
| 2794 | for (t = 0; t < entropy_nodes_update; ++t) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2795 | aom_prob newp = new_coef_probs[i][j][k][l][t]; |
| 2796 | const aom_prob oldp = old_coef_probs[i][j][k][l][t]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2797 | int s, u = 0; |
| 2798 | |
| 2799 | if (t == PIVOT_NODE) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2800 | s = av1_prob_update_search_model_subframe( |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2801 | this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd, |
| 2802 | stepsize, max_idx); |
| 2803 | else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2804 | s = av1_prob_update_search_subframe(this_branch_ct[t], oldp, |
| 2805 | &newp, upd, max_idx); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2806 | if (s > 0 && newp != oldp) u = 1; |
| 2807 | if (u) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2808 | savings += s - (int)(av1_cost_zero(upd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2809 | else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2810 | savings -= (int)(av1_cost_zero(upd)); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2811 | update[u]++; |
| 2812 | } |
| 2813 | } |
| 2814 | } |
| 2815 | } |
| 2816 | } |
| 2817 | |
| 2818 | /* Is coef updated at all */ |
| 2819 | if (update[1] == 0 || savings < 0) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2820 | aom_write_bit(bc, 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2821 | return; |
| 2822 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2823 | aom_write_bit(bc, 1); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2824 | for (i = 0; i < PLANE_TYPES; ++i) { |
| 2825 | for (j = 0; j < REF_TYPES; ++j) { |
| 2826 | for (k = 0; k < COEF_BANDS; ++k) { |
| 2827 | for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) { |
| 2828 | for (t = 0; t < ENTROPY_NODES; ++t) { |
| 2829 | for (idx = 0; idx <= max_idx; ++idx) { |
| 2830 | memcpy(this_branch_ct[t][idx], |
| 2831 | branch_ct[idx][tx_size][i][j][k][l][t], |
| 2832 | 2 * sizeof(this_branch_ct[t][idx][0])); |
| 2833 | } |
| 2834 | } |
| 2835 | for (t = 0; t < entropy_nodes_update; ++t) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2836 | aom_prob newp = new_coef_probs[i][j][k][l][t]; |
| 2837 | aom_prob *oldp = old_coef_probs[i][j][k][l] + t; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2838 | int s; |
| 2839 | int u = 0; |
| 2840 | |
| 2841 | if (t == PIVOT_NODE) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2842 | s = av1_prob_update_search_model_subframe( |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2843 | this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd, |
| 2844 | stepsize, max_idx); |
| 2845 | else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2846 | s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp, |
| 2847 | &newp, upd, max_idx); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2848 | if (s > 0 && newp != *oldp) u = 1; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2849 | aom_write(bc, u, upd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2850 | if (u) { |
| 2851 | /* send/use new probability */ |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2852 | av1_write_prob_diff_update(bc, newp, *oldp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2853 | *oldp = newp; |
| 2854 | } |
| 2855 | } |
| 2856 | } |
| 2857 | } |
| 2858 | } |
| 2859 | } |
| 2860 | return; |
| 2861 | } |
| 2862 | |
| 2863 | case ONE_LOOP_REDUCED: { |
| 2864 | int updates = 0; |
| 2865 | int noupdates_before_first = 0; |
| 2866 | for (i = 0; i < PLANE_TYPES; ++i) { |
| 2867 | for (j = 0; j < REF_TYPES; ++j) { |
| 2868 | for (k = 0; k < COEF_BANDS; ++k) { |
| 2869 | for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) { |
| 2870 | for (t = 0; t < ENTROPY_NODES; ++t) { |
| 2871 | for (idx = 0; idx <= max_idx; ++idx) { |
| 2872 | memcpy(this_branch_ct[t][idx], |
| 2873 | branch_ct[idx][tx_size][i][j][k][l][t], |
| 2874 | 2 * sizeof(this_branch_ct[t][idx][0])); |
| 2875 | } |
| 2876 | } |
| 2877 | for (t = 0; t < entropy_nodes_update; ++t) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2878 | aom_prob newp = new_coef_probs[i][j][k][l][t]; |
| 2879 | aom_prob *oldp = old_coef_probs[i][j][k][l] + t; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2880 | int s; |
| 2881 | int u = 0; |
| 2882 | |
| 2883 | if (t == PIVOT_NODE) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2884 | s = av1_prob_update_search_model_subframe( |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2885 | this_branch_ct, old_coef_probs[i][j][k][l], &newp, upd, |
| 2886 | stepsize, max_idx); |
| 2887 | else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2888 | s = av1_prob_update_search_subframe(this_branch_ct[t], *oldp, |
| 2889 | &newp, upd, max_idx); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2890 | if (s > 0 && newp != *oldp) u = 1; |
| 2891 | updates += u; |
| 2892 | if (u == 0 && updates == 0) { |
| 2893 | noupdates_before_first++; |
| 2894 | continue; |
| 2895 | } |
| 2896 | if (u == 1 && updates == 1) { |
| 2897 | int v; |
| 2898 | // first update |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2899 | aom_write_bit(bc, 1); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2900 | for (v = 0; v < noupdates_before_first; ++v) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2901 | aom_write(bc, 0, upd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2902 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2903 | aom_write(bc, u, upd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2904 | if (u) { |
| 2905 | /* send/use new probability */ |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2906 | av1_write_prob_diff_update(bc, newp, *oldp); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2907 | *oldp = newp; |
| 2908 | } |
| 2909 | } |
| 2910 | } |
| 2911 | } |
| 2912 | } |
| 2913 | } |
| 2914 | if (updates == 0) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2915 | aom_write_bit(bc, 0); // no updates |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2916 | } |
| 2917 | return; |
| 2918 | } |
| 2919 | default: assert(0); |
| 2920 | } |
| 2921 | } |
| 2922 | #endif // CONFIG_ENTROPY |
| 2923 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2924 | static void update_coef_probs(AV1_COMP *cpi, aom_writer *w) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2925 | const TX_MODE tx_mode = cpi->common.tx_mode; |
| 2926 | const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode]; |
| 2927 | TX_SIZE tx_size; |
Alex Converse | 1e4e29f | 2016-11-08 14:12:14 -0800 | [diff] [blame] | 2928 | #if CONFIG_EC_MULTISYMBOL |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2929 | int update = 0; |
Alex Converse | 1e4e29f | 2016-11-08 14:12:14 -0800 | [diff] [blame] | 2930 | #endif // CONFIG_EC_MULTISYMBOL |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2931 | #if CONFIG_ENTROPY |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2932 | AV1_COMMON *cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2933 | SUBFRAME_STATS *subframe_stats = &cpi->subframe_stats; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2934 | int i; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2935 | av1_coeff_probs_model dummy_frame_coef_probs[PLANE_TYPES]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2936 | |
| 2937 | if (cm->do_subframe_update && |
| 2938 | cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2939 | av1_copy(cpi->common.fc->coef_probs, |
| 2940 | subframe_stats->enc_starting_coef_probs); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2941 | for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) { |
| 2942 | get_coef_counts_diff(cpi, i, cpi->wholeframe_stats.coef_counts_buf[i], |
| 2943 | cpi->wholeframe_stats.eob_counts_buf[i]); |
| 2944 | } |
| 2945 | } |
| 2946 | #endif // CONFIG_ENTROPY |
| 2947 | |
Jingning Han | 8363063 | 2016-12-16 11:27:25 -0800 | [diff] [blame] | 2948 | for (tx_size = 0; tx_size <= max_tx_size; ++tx_size) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2949 | av1_coeff_stats frame_branch_ct[PLANE_TYPES]; |
| 2950 | av1_coeff_probs_model frame_coef_probs[PLANE_TYPES]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2951 | if (cpi->td.counts->tx_size_totals[tx_size] <= 20 || |
| 2952 | (tx_size >= TX_16X16 && cpi->sf.tx_size_search_method == USE_TX_8X8)) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2953 | aom_write_bit(w, 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2954 | } else { |
| 2955 | #if CONFIG_ENTROPY |
| 2956 | if (cm->do_subframe_update && |
| 2957 | cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) { |
Urvang Joshi | 43e6281 | 2016-10-20 14:51:01 -0700 | [diff] [blame] | 2958 | unsigned int this_eob_counts_copy[PLANE_TYPES][REF_TYPES][COEF_BANDS] |
| 2959 | [COEFF_CONTEXTS]; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2960 | av1_coeff_count coef_counts_copy[PLANE_TYPES]; |
Urvang Joshi | 43e6281 | 2016-10-20 14:51:01 -0700 | [diff] [blame] | 2961 | av1_copy(this_eob_counts_copy, cpi->common.counts.eob_branch[tx_size]); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2962 | av1_copy(coef_counts_copy, cpi->td.rd_counts.coef_counts[tx_size]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2963 | build_tree_distribution(cpi, tx_size, frame_branch_ct, |
| 2964 | frame_coef_probs); |
| 2965 | for (i = 0; i <= cpi->common.coef_probs_update_idx; ++i) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2966 | av1_copy(cpi->common.counts.eob_branch[tx_size], |
| 2967 | cpi->wholeframe_stats.eob_counts_buf[i][tx_size]); |
| 2968 | av1_copy(cpi->td.rd_counts.coef_counts[tx_size], |
| 2969 | cpi->wholeframe_stats.coef_counts_buf[i][tx_size]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2970 | build_tree_distribution(cpi, tx_size, cpi->branch_ct_buf[i][tx_size], |
| 2971 | dummy_frame_coef_probs); |
| 2972 | } |
Urvang Joshi | 43e6281 | 2016-10-20 14:51:01 -0700 | [diff] [blame] | 2973 | av1_copy(cpi->common.counts.eob_branch[tx_size], this_eob_counts_copy); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2974 | av1_copy(cpi->td.rd_counts.coef_counts[tx_size], coef_counts_copy); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2975 | |
| 2976 | update_coef_probs_subframe(w, cpi, tx_size, cpi->branch_ct_buf, |
| 2977 | frame_coef_probs); |
Alex Converse | 1e4e29f | 2016-11-08 14:12:14 -0800 | [diff] [blame] | 2978 | #if CONFIG_EC_MULTISYMBOL |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2979 | update = 1; |
Alex Converse | 1e4e29f | 2016-11-08 14:12:14 -0800 | [diff] [blame] | 2980 | #endif // CONFIG_EC_MULTISYMBOL |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2981 | } else { |
| 2982 | #endif // CONFIG_ENTROPY |
| 2983 | build_tree_distribution(cpi, tx_size, frame_branch_ct, |
| 2984 | frame_coef_probs); |
| 2985 | update_coef_probs_common(w, cpi, tx_size, frame_branch_ct, |
| 2986 | frame_coef_probs); |
Alex Converse | 1e4e29f | 2016-11-08 14:12:14 -0800 | [diff] [blame] | 2987 | #if CONFIG_EC_MULTISYMBOL |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2988 | update = 1; |
Alex Converse | 1e4e29f | 2016-11-08 14:12:14 -0800 | [diff] [blame] | 2989 | #endif // CONFIG_EC_MULTISYMBOL |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2990 | #if CONFIG_ENTROPY |
| 2991 | } |
| 2992 | #endif // CONFIG_ENTROPY |
| 2993 | } |
| 2994 | } |
| 2995 | |
| 2996 | #if CONFIG_ENTROPY |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 2997 | av1_copy(cm->starting_coef_probs, cm->fc->coef_probs); |
| 2998 | av1_copy(subframe_stats->coef_probs_buf[0], cm->fc->coef_probs); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 2999 | if (cm->do_subframe_update && |
| 3000 | cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_BACKWARD) { |
Urvang Joshi | 43e6281 | 2016-10-20 14:51:01 -0700 | [diff] [blame] | 3001 | unsigned int eob_counts_copy[TX_SIZES][PLANE_TYPES][REF_TYPES][COEF_BANDS] |
| 3002 | [COEFF_CONTEXTS]; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3003 | av1_copy(eob_counts_copy, cm->counts.eob_branch); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3004 | for (i = 1; i <= cpi->common.coef_probs_update_idx; ++i) { |
| 3005 | for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3006 | av1_full_to_model_counts(cm->counts.coef[tx_size], |
| 3007 | subframe_stats->coef_counts_buf[i][tx_size]); |
| 3008 | av1_copy(cm->counts.eob_branch, subframe_stats->eob_counts_buf[i]); |
| 3009 | av1_partial_adapt_probs(cm, 0, 0); |
| 3010 | av1_copy(subframe_stats->coef_probs_buf[i], cm->fc->coef_probs); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3011 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3012 | av1_copy(cm->fc->coef_probs, subframe_stats->coef_probs_buf[0]); |
| 3013 | av1_copy(cm->counts.eob_branch, eob_counts_copy); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3014 | } |
| 3015 | #endif // CONFIG_ENTROPY |
Alex Converse | 1e4e29f | 2016-11-08 14:12:14 -0800 | [diff] [blame] | 3016 | #if CONFIG_EC_MULTISYMBOL |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3017 | if (update) av1_coef_pareto_cdfs(cpi->common.fc); |
Alex Converse | 1e4e29f | 2016-11-08 14:12:14 -0800 | [diff] [blame] | 3018 | #endif // CONFIG_EC_MULTISYMBOL |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3019 | } |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 3020 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3021 | |
| 3022 | #if CONFIG_LOOP_RESTORATION |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 3023 | static void encode_restoration_mode(AV1_COMMON *cm, |
| 3024 | struct aom_write_bit_buffer *wb) { |
| 3025 | RestorationInfo *rst = &cm->rst_info; |
| 3026 | switch (rst->frame_restoration_type) { |
| 3027 | case RESTORE_NONE: |
| 3028 | aom_wb_write_bit(wb, 0); |
| 3029 | aom_wb_write_bit(wb, 0); |
| 3030 | break; |
Debargha Mukherjee | 0e67b25 | 2016-12-08 09:22:44 -0800 | [diff] [blame] | 3031 | case RESTORE_WIENER: |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 3032 | aom_wb_write_bit(wb, 1); |
Debargha Mukherjee | 0e67b25 | 2016-12-08 09:22:44 -0800 | [diff] [blame] | 3033 | aom_wb_write_bit(wb, 0); |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 3034 | break; |
Debargha Mukherjee | 8f209a8 | 2016-10-12 10:47:01 -0700 | [diff] [blame] | 3035 | case RESTORE_SGRPROJ: |
| 3036 | aom_wb_write_bit(wb, 1); |
Debargha Mukherjee | 0e67b25 | 2016-12-08 09:22:44 -0800 | [diff] [blame] | 3037 | aom_wb_write_bit(wb, 1); |
Debargha Mukherjee | 3981be9 | 2016-11-21 09:35:44 -0800 | [diff] [blame] | 3038 | aom_wb_write_bit(wb, 0); |
| 3039 | break; |
| 3040 | case RESTORE_DOMAINTXFMRF: |
| 3041 | aom_wb_write_bit(wb, 1); |
Debargha Mukherjee | 0e67b25 | 2016-12-08 09:22:44 -0800 | [diff] [blame] | 3042 | aom_wb_write_bit(wb, 1); |
Debargha Mukherjee | 3981be9 | 2016-11-21 09:35:44 -0800 | [diff] [blame] | 3043 | aom_wb_write_bit(wb, 1); |
Debargha Mukherjee | 8f209a8 | 2016-10-12 10:47:01 -0700 | [diff] [blame] | 3044 | break; |
Debargha Mukherjee | 0e67b25 | 2016-12-08 09:22:44 -0800 | [diff] [blame] | 3045 | case RESTORE_SWITCHABLE: |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 3046 | aom_wb_write_bit(wb, 0); |
Debargha Mukherjee | 8f209a8 | 2016-10-12 10:47:01 -0700 | [diff] [blame] | 3047 | aom_wb_write_bit(wb, 1); |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 3048 | break; |
| 3049 | default: assert(0); |
| 3050 | } |
| 3051 | } |
| 3052 | |
Debargha Mukherjee | 8f209a8 | 2016-10-12 10:47:01 -0700 | [diff] [blame] | 3053 | static void write_wiener_filter(WienerInfo *wiener_info, aom_writer *wb) { |
| 3054 | aom_write_literal(wb, wiener_info->vfilter[0] - WIENER_FILT_TAP0_MINV, |
| 3055 | WIENER_FILT_TAP0_BITS); |
| 3056 | aom_write_literal(wb, wiener_info->vfilter[1] - WIENER_FILT_TAP1_MINV, |
| 3057 | WIENER_FILT_TAP1_BITS); |
| 3058 | aom_write_literal(wb, wiener_info->vfilter[2] - WIENER_FILT_TAP2_MINV, |
| 3059 | WIENER_FILT_TAP2_BITS); |
| 3060 | aom_write_literal(wb, wiener_info->hfilter[0] - WIENER_FILT_TAP0_MINV, |
| 3061 | WIENER_FILT_TAP0_BITS); |
| 3062 | aom_write_literal(wb, wiener_info->hfilter[1] - WIENER_FILT_TAP1_MINV, |
| 3063 | WIENER_FILT_TAP1_BITS); |
| 3064 | aom_write_literal(wb, wiener_info->hfilter[2] - WIENER_FILT_TAP2_MINV, |
| 3065 | WIENER_FILT_TAP2_BITS); |
| 3066 | } |
| 3067 | |
| 3068 | static void write_sgrproj_filter(SgrprojInfo *sgrproj_info, aom_writer *wb) { |
| 3069 | aom_write_literal(wb, sgrproj_info->ep, SGRPROJ_PARAMS_BITS); |
| 3070 | aom_write_literal(wb, sgrproj_info->xqd[0] - SGRPROJ_PRJ_MIN0, |
| 3071 | SGRPROJ_PRJ_BITS); |
| 3072 | aom_write_literal(wb, sgrproj_info->xqd[1] - SGRPROJ_PRJ_MIN1, |
| 3073 | SGRPROJ_PRJ_BITS); |
| 3074 | } |
| 3075 | |
Debargha Mukherjee | 3981be9 | 2016-11-21 09:35:44 -0800 | [diff] [blame] | 3076 | static void write_domaintxfmrf_filter(DomaintxfmrfInfo *domaintxfmrf_info, |
| 3077 | aom_writer *wb) { |
| 3078 | aom_write_literal(wb, domaintxfmrf_info->sigma_r, DOMAINTXFMRF_PARAMS_BITS); |
| 3079 | } |
| 3080 | |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 3081 | static void encode_restoration(AV1_COMMON *cm, aom_writer *wb) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3082 | int i; |
Debargha Mukherjee | 5d89a63 | 2016-09-17 13:16:58 -0700 | [diff] [blame] | 3083 | RestorationInfo *rsi = &cm->rst_info; |
| 3084 | if (rsi->frame_restoration_type != RESTORE_NONE) { |
| 3085 | if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) { |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 3086 | // RESTORE_SWITCHABLE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3087 | for (i = 0; i < cm->rst_internal.ntiles; ++i) { |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 3088 | av1_write_token( |
clang-format | bda8d61 | 2016-09-19 15:55:46 -0700 | [diff] [blame] | 3089 | wb, av1_switchable_restore_tree, cm->fc->switchable_restore_prob, |
Debargha Mukherjee | 5d89a63 | 2016-09-17 13:16:58 -0700 | [diff] [blame] | 3090 | &switchable_restore_encodings[rsi->restoration_type[i]]); |
Debargha Mukherjee | 0e67b25 | 2016-12-08 09:22:44 -0800 | [diff] [blame] | 3091 | if (rsi->restoration_type[i] == RESTORE_WIENER) { |
Debargha Mukherjee | 8f209a8 | 2016-10-12 10:47:01 -0700 | [diff] [blame] | 3092 | write_wiener_filter(&rsi->wiener_info[i], wb); |
| 3093 | } else if (rsi->restoration_type[i] == RESTORE_SGRPROJ) { |
| 3094 | write_sgrproj_filter(&rsi->sgrproj_info[i], wb); |
Debargha Mukherjee | 3981be9 | 2016-11-21 09:35:44 -0800 | [diff] [blame] | 3095 | } else if (rsi->restoration_type[i] == RESTORE_DOMAINTXFMRF) { |
| 3096 | write_domaintxfmrf_filter(&rsi->domaintxfmrf_info[i], wb); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3097 | } |
| 3098 | } |
Debargha Mukherjee | 5d89a63 | 2016-09-17 13:16:58 -0700 | [diff] [blame] | 3099 | } else if (rsi->frame_restoration_type == RESTORE_WIENER) { |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 3100 | for (i = 0; i < cm->rst_internal.ntiles; ++i) { |
Debargha Mukherjee | 5d89a63 | 2016-09-17 13:16:58 -0700 | [diff] [blame] | 3101 | aom_write(wb, rsi->wiener_info[i].level != 0, RESTORE_NONE_WIENER_PROB); |
| 3102 | if (rsi->wiener_info[i].level) { |
Debargha Mukherjee | 8f209a8 | 2016-10-12 10:47:01 -0700 | [diff] [blame] | 3103 | write_wiener_filter(&rsi->wiener_info[i], wb); |
| 3104 | } |
| 3105 | } |
| 3106 | } else if (rsi->frame_restoration_type == RESTORE_SGRPROJ) { |
| 3107 | for (i = 0; i < cm->rst_internal.ntiles; ++i) { |
| 3108 | aom_write(wb, rsi->sgrproj_info[i].level != 0, |
| 3109 | RESTORE_NONE_SGRPROJ_PROB); |
| 3110 | if (rsi->sgrproj_info[i].level) { |
| 3111 | write_sgrproj_filter(&rsi->sgrproj_info[i], wb); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3112 | } |
| 3113 | } |
Debargha Mukherjee | 3981be9 | 2016-11-21 09:35:44 -0800 | [diff] [blame] | 3114 | } else if (rsi->frame_restoration_type == RESTORE_DOMAINTXFMRF) { |
| 3115 | for (i = 0; i < cm->rst_internal.ntiles; ++i) { |
| 3116 | aom_write(wb, rsi->domaintxfmrf_info[i].level != 0, |
| 3117 | RESTORE_NONE_DOMAINTXFMRF_PROB); |
| 3118 | if (rsi->domaintxfmrf_info[i].level) { |
| 3119 | write_domaintxfmrf_filter(&rsi->domaintxfmrf_info[i], wb); |
| 3120 | } |
| 3121 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3122 | } |
| 3123 | } |
| 3124 | } |
| 3125 | #endif // CONFIG_LOOP_RESTORATION |
| 3126 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3127 | static void encode_loopfilter(AV1_COMMON *cm, struct aom_write_bit_buffer *wb) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3128 | int i; |
| 3129 | struct loopfilter *lf = &cm->lf; |
| 3130 | |
| 3131 | // Encode the loop filter level and type |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3132 | aom_wb_write_literal(wb, lf->filter_level, 6); |
| 3133 | aom_wb_write_literal(wb, lf->sharpness_level, 3); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3134 | |
| 3135 | // Write out loop filter deltas applied at the MB level based on mode or |
| 3136 | // ref frame (if they are enabled). |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3137 | aom_wb_write_bit(wb, lf->mode_ref_delta_enabled); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3138 | |
| 3139 | if (lf->mode_ref_delta_enabled) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3140 | aom_wb_write_bit(wb, lf->mode_ref_delta_update); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3141 | if (lf->mode_ref_delta_update) { |
| 3142 | for (i = 0; i < TOTAL_REFS_PER_FRAME; i++) { |
| 3143 | const int delta = lf->ref_deltas[i]; |
| 3144 | const int changed = delta != lf->last_ref_deltas[i]; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3145 | aom_wb_write_bit(wb, changed); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3146 | if (changed) { |
| 3147 | lf->last_ref_deltas[i] = delta; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3148 | aom_wb_write_inv_signed_literal(wb, delta, 6); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3149 | } |
| 3150 | } |
| 3151 | |
| 3152 | for (i = 0; i < MAX_MODE_LF_DELTAS; i++) { |
| 3153 | const int delta = lf->mode_deltas[i]; |
| 3154 | const int changed = delta != lf->last_mode_deltas[i]; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3155 | aom_wb_write_bit(wb, changed); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3156 | if (changed) { |
| 3157 | lf->last_mode_deltas[i] = delta; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3158 | aom_wb_write_inv_signed_literal(wb, delta, 6); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3159 | } |
| 3160 | } |
| 3161 | } |
| 3162 | } |
| 3163 | } |
| 3164 | |
| 3165 | #if CONFIG_CLPF |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3166 | static void encode_clpf(const AV1_COMMON *cm, struct aom_write_bit_buffer *wb) { |
Steinar Midtskogen | ecf9a0c | 2016-09-13 16:37:13 +0200 | [diff] [blame] | 3167 | aom_wb_write_literal(wb, cm->clpf_strength_y, 2); |
| 3168 | aom_wb_write_literal(wb, cm->clpf_strength_u, 2); |
| 3169 | aom_wb_write_literal(wb, cm->clpf_strength_v, 2); |
| 3170 | if (cm->clpf_strength_y) { |
Steinar Midtskogen | d06588a | 2016-05-06 13:48:20 +0200 | [diff] [blame] | 3171 | aom_wb_write_literal(wb, cm->clpf_size, 2); |
Steinar Midtskogen | d06588a | 2016-05-06 13:48:20 +0200 | [diff] [blame] | 3172 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3173 | } |
| 3174 | #endif |
| 3175 | |
| 3176 | #if CONFIG_DERING |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3177 | static void encode_dering(int level, struct aom_write_bit_buffer *wb) { |
| 3178 | aom_wb_write_literal(wb, level, DERING_LEVEL_BITS); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3179 | } |
| 3180 | #endif // CONFIG_DERING |
| 3181 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3182 | static void write_delta_q(struct aom_write_bit_buffer *wb, int delta_q) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3183 | if (delta_q != 0) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3184 | aom_wb_write_bit(wb, 1); |
| 3185 | aom_wb_write_inv_signed_literal(wb, delta_q, 6); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3186 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3187 | aom_wb_write_bit(wb, 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3188 | } |
| 3189 | } |
| 3190 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3191 | static void encode_quantization(const AV1_COMMON *const cm, |
| 3192 | struct aom_write_bit_buffer *wb) { |
| 3193 | aom_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3194 | write_delta_q(wb, cm->y_dc_delta_q); |
| 3195 | write_delta_q(wb, cm->uv_dc_delta_q); |
| 3196 | write_delta_q(wb, cm->uv_ac_delta_q); |
| 3197 | #if CONFIG_AOM_QM |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3198 | aom_wb_write_bit(wb, cm->using_qmatrix); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3199 | if (cm->using_qmatrix) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3200 | aom_wb_write_literal(wb, cm->min_qmlevel, QM_LEVEL_BITS); |
| 3201 | aom_wb_write_literal(wb, cm->max_qmlevel, QM_LEVEL_BITS); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3202 | } |
| 3203 | #endif |
| 3204 | } |
| 3205 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3206 | static void encode_segmentation(AV1_COMMON *cm, MACROBLOCKD *xd, |
| 3207 | struct aom_write_bit_buffer *wb) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3208 | int i, j; |
| 3209 | const struct segmentation *seg = &cm->seg; |
| 3210 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3211 | aom_wb_write_bit(wb, seg->enabled); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3212 | if (!seg->enabled) return; |
| 3213 | |
| 3214 | // Segmentation map |
| 3215 | if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3216 | aom_wb_write_bit(wb, seg->update_map); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3217 | } else { |
| 3218 | assert(seg->update_map == 1); |
| 3219 | } |
| 3220 | if (seg->update_map) { |
| 3221 | // Select the coding strategy (temporal or spatial) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3222 | av1_choose_segmap_coding_method(cm, xd); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3223 | |
| 3224 | // Write out the chosen coding method. |
| 3225 | if (!frame_is_intra_only(cm) && !cm->error_resilient_mode) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3226 | aom_wb_write_bit(wb, seg->temporal_update); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3227 | } else { |
| 3228 | assert(seg->temporal_update == 0); |
| 3229 | } |
| 3230 | } |
| 3231 | |
| 3232 | // Segmentation data |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3233 | aom_wb_write_bit(wb, seg->update_data); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3234 | if (seg->update_data) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3235 | aom_wb_write_bit(wb, seg->abs_delta); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3236 | |
| 3237 | for (i = 0; i < MAX_SEGMENTS; i++) { |
| 3238 | for (j = 0; j < SEG_LVL_MAX; j++) { |
| 3239 | const int active = segfeature_active(seg, i, j); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3240 | aom_wb_write_bit(wb, active); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3241 | if (active) { |
| 3242 | const int data = get_segdata(seg, i, j); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3243 | const int data_max = av1_seg_feature_data_max(j); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3244 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3245 | if (av1_is_segfeature_signed(j)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3246 | encode_unsigned_max(wb, abs(data), data_max); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3247 | aom_wb_write_bit(wb, data < 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3248 | } else { |
| 3249 | encode_unsigned_max(wb, data, data_max); |
| 3250 | } |
| 3251 | } |
| 3252 | } |
| 3253 | } |
| 3254 | } |
| 3255 | } |
| 3256 | |
Nathan E. Egge | baaaa16 | 2016-10-24 09:50:52 -0400 | [diff] [blame] | 3257 | #if !CONFIG_EC_ADAPT |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3258 | static void update_seg_probs(AV1_COMP *cpi, aom_writer *w) { |
| 3259 | AV1_COMMON *cm = &cpi->common; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3260 | #if CONFIG_TILE_GROUPS |
| 3261 | const int probwt = cm->num_tg; |
| 3262 | #else |
| 3263 | const int probwt = 1; |
| 3264 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3265 | |
| 3266 | if (!cm->seg.enabled || !cm->seg.update_map) return; |
| 3267 | |
| 3268 | if (cm->seg.temporal_update) { |
| 3269 | int i; |
| 3270 | |
| 3271 | for (i = 0; i < PREDICTION_PROBS; i++) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3272 | av1_cond_prob_diff_update(w, &cm->fc->seg.pred_probs[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3273 | cm->counts.seg.pred[i], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3274 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3275 | prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs, |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3276 | cm->counts.seg.tree_mispred, MAX_SEGMENTS, probwt, w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3277 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3278 | prob_diff_update(av1_segment_tree, cm->fc->seg.tree_probs, |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3279 | cm->counts.seg.tree_total, MAX_SEGMENTS, probwt, w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3280 | } |
| 3281 | } |
Thomas Davies | 6519beb | 2016-10-19 14:46:07 +0100 | [diff] [blame] | 3282 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3283 | |
Debargha Mukherjee | 18d38f6 | 2016-11-17 20:30:16 -0800 | [diff] [blame] | 3284 | static void write_tx_mode(TX_MODE mode, struct aom_write_bit_buffer *wb) { |
| 3285 | #if CONFIG_TX64X64 |
| 3286 | aom_wb_write_bit(wb, mode == TX_MODE_SELECT); |
| 3287 | if (mode != TX_MODE_SELECT) { |
| 3288 | aom_wb_write_literal(wb, AOMMIN(mode, ALLOW_32X32), 2); |
| 3289 | if (mode >= ALLOW_32X32) aom_wb_write_bit(wb, mode == ALLOW_64X64); |
| 3290 | } |
| 3291 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3292 | aom_wb_write_bit(wb, mode == TX_MODE_SELECT); |
| 3293 | if (mode != TX_MODE_SELECT) aom_wb_write_literal(wb, mode, 2); |
Debargha Mukherjee | 18d38f6 | 2016-11-17 20:30:16 -0800 | [diff] [blame] | 3294 | #endif // CONFIG_TX64X64 |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3295 | } |
| 3296 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3297 | static void update_txfm_probs(AV1_COMMON *cm, aom_writer *w, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3298 | FRAME_COUNTS *counts) { |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3299 | #if CONFIG_TILE_GROUPS |
| 3300 | const int probwt = cm->num_tg; |
| 3301 | #else |
| 3302 | const int probwt = 1; |
| 3303 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3304 | if (cm->tx_mode == TX_MODE_SELECT) { |
| 3305 | int i, j; |
Jingning Han | aae72a6 | 2016-10-25 15:35:29 -0700 | [diff] [blame] | 3306 | for (i = 0; i < MAX_TX_DEPTH; ++i) |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3307 | for (j = 0; j < TX_SIZE_CONTEXTS; ++j) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3308 | prob_diff_update(av1_tx_size_tree[i], cm->fc->tx_size_probs[i][j], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3309 | counts->tx_size[i][j], i + 2, probwt, w); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3310 | } |
| 3311 | } |
| 3312 | |
Angie Chiang | 5678ad9 | 2016-11-21 09:38:40 -0800 | [diff] [blame] | 3313 | static void write_frame_interp_filter(InterpFilter filter, |
| 3314 | struct aom_write_bit_buffer *wb) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3315 | aom_wb_write_bit(wb, filter == SWITCHABLE); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3316 | if (filter != SWITCHABLE) |
Angie Chiang | 6305abe | 2016-10-24 12:24:44 -0700 | [diff] [blame] | 3317 | aom_wb_write_literal(wb, filter, LOG_SWITCHABLE_FILTERS); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3318 | } |
| 3319 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3320 | static void fix_interp_filter(AV1_COMMON *cm, FRAME_COUNTS *counts) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3321 | if (cm->interp_filter == SWITCHABLE) { |
| 3322 | // Check to see if only one of the filters is actually used |
| 3323 | int count[SWITCHABLE_FILTERS]; |
| 3324 | int i, j, c = 0; |
| 3325 | for (i = 0; i < SWITCHABLE_FILTERS; ++i) { |
| 3326 | count[i] = 0; |
| 3327 | for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) |
| 3328 | count[i] += counts->switchable_interp[j][i]; |
| 3329 | c += (count[i] > 0); |
| 3330 | } |
| 3331 | if (c == 1) { |
| 3332 | // Only one filter is used. So set the filter at frame level |
| 3333 | for (i = 0; i < SWITCHABLE_FILTERS; ++i) { |
| 3334 | if (count[i]) { |
| 3335 | cm->interp_filter = i; |
| 3336 | break; |
| 3337 | } |
| 3338 | } |
| 3339 | } |
| 3340 | } |
| 3341 | } |
| 3342 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3343 | static void write_tile_info(const AV1_COMMON *const cm, |
| 3344 | struct aom_write_bit_buffer *wb) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3345 | #if CONFIG_EXT_TILE |
| 3346 | const int tile_width = |
| 3347 | ALIGN_POWER_OF_TWO(cm->tile_width, cm->mib_size_log2) >> |
| 3348 | cm->mib_size_log2; |
| 3349 | const int tile_height = |
| 3350 | ALIGN_POWER_OF_TWO(cm->tile_height, cm->mib_size_log2) >> |
| 3351 | cm->mib_size_log2; |
| 3352 | |
| 3353 | assert(tile_width > 0); |
| 3354 | assert(tile_height > 0); |
| 3355 | |
| 3356 | // Write the tile sizes |
| 3357 | #if CONFIG_EXT_PARTITION |
| 3358 | if (cm->sb_size == BLOCK_128X128) { |
| 3359 | assert(tile_width <= 32); |
| 3360 | assert(tile_height <= 32); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3361 | aom_wb_write_literal(wb, tile_width - 1, 5); |
| 3362 | aom_wb_write_literal(wb, tile_height - 1, 5); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3363 | } else |
| 3364 | #endif // CONFIG_EXT_PARTITION |
| 3365 | { |
| 3366 | assert(tile_width <= 64); |
| 3367 | assert(tile_height <= 64); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3368 | aom_wb_write_literal(wb, tile_width - 1, 6); |
| 3369 | aom_wb_write_literal(wb, tile_height - 1, 6); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3370 | } |
| 3371 | #else |
| 3372 | int min_log2_tile_cols, max_log2_tile_cols, ones; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3373 | av1_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3374 | |
| 3375 | // columns |
| 3376 | ones = cm->log2_tile_cols - min_log2_tile_cols; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3377 | while (ones--) aom_wb_write_bit(wb, 1); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3378 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3379 | if (cm->log2_tile_cols < max_log2_tile_cols) aom_wb_write_bit(wb, 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3380 | |
| 3381 | // rows |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3382 | aom_wb_write_bit(wb, cm->log2_tile_rows != 0); |
| 3383 | if (cm->log2_tile_rows != 0) aom_wb_write_bit(wb, cm->log2_tile_rows != 1); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3384 | #endif // CONFIG_EXT_TILE |
Ryan Lei | 7386eda | 2016-12-08 21:08:31 -0800 | [diff] [blame] | 3385 | |
| 3386 | #if CONFIG_DEBLOCKING_ACROSS_TILES |
| 3387 | aom_wb_write_bit(wb, cm->loop_filter_across_tiles_enabled); |
| 3388 | #endif // CONFIG_DEBLOCKING_ACROSS_TILES |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3389 | } |
| 3390 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3391 | static int get_refresh_mask(AV1_COMP *cpi) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3392 | int refresh_mask = 0; |
| 3393 | |
| 3394 | #if CONFIG_EXT_REFS |
| 3395 | // NOTE(zoeliu): When LAST_FRAME is to get refreshed, the decoder will be |
| 3396 | // notified to get LAST3_FRAME refreshed and then the virtual indexes for all |
| 3397 | // the 3 LAST reference frames will be updated accordingly, i.e.: |
| 3398 | // (1) The original virtual index for LAST3_FRAME will become the new virtual |
| 3399 | // index for LAST_FRAME; and |
| 3400 | // (2) The original virtual indexes for LAST_FRAME and LAST2_FRAME will be |
| 3401 | // shifted and become the new virtual indexes for LAST2_FRAME and |
| 3402 | // LAST3_FRAME. |
| 3403 | refresh_mask |= |
| 3404 | (cpi->refresh_last_frame << cpi->lst_fb_idxes[LAST_REF_FRAMES - 1]); |
| 3405 | if (cpi->rc.is_bwd_ref_frame && cpi->num_extra_arfs) { |
| 3406 | // We have swapped the virtual indices |
| 3407 | refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->arf_map[0]); |
| 3408 | } else { |
| 3409 | refresh_mask |= (cpi->refresh_bwd_ref_frame << cpi->bwd_fb_idx); |
| 3410 | } |
| 3411 | #else |
| 3412 | refresh_mask |= (cpi->refresh_last_frame << cpi->lst_fb_idx); |
| 3413 | #endif // CONFIG_EXT_REFS |
| 3414 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3415 | if (av1_preserve_existing_gf(cpi)) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3416 | // We have decided to preserve the previously existing golden frame as our |
| 3417 | // new ARF frame. However, in the short term we leave it in the GF slot and, |
| 3418 | // if we're updating the GF with the current decoded frame, we save it |
| 3419 | // instead to the ARF slot. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3420 | // Later, in the function av1_encoder.c:av1_update_reference_frames() we |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3421 | // will swap gld_fb_idx and alt_fb_idx to achieve our objective. We do it |
| 3422 | // there so that it can be done outside of the recode loop. |
| 3423 | // Note: This is highly specific to the use of ARF as a forward reference, |
| 3424 | // and this needs to be generalized as other uses are implemented |
| 3425 | // (like RTC/temporal scalability). |
| 3426 | return refresh_mask | (cpi->refresh_golden_frame << cpi->alt_fb_idx); |
| 3427 | } else { |
| 3428 | int arf_idx = cpi->alt_fb_idx; |
| 3429 | #if CONFIG_EXT_REFS |
| 3430 | const GF_GROUP *const gf_group = &cpi->twopass.gf_group; |
| 3431 | arf_idx = cpi->arf_map[gf_group->arf_update_idx[gf_group->index]]; |
| 3432 | #else |
| 3433 | if ((cpi->oxcf.pass == 2) && cpi->multi_arf_allowed) { |
| 3434 | const GF_GROUP *const gf_group = &cpi->twopass.gf_group; |
| 3435 | arf_idx = gf_group->arf_update_idx[gf_group->index]; |
| 3436 | } |
| 3437 | #endif // CONFIG_EXT_REFS |
| 3438 | return refresh_mask | (cpi->refresh_golden_frame << cpi->gld_fb_idx) | |
| 3439 | (cpi->refresh_alt_ref_frame << arf_idx); |
| 3440 | } |
| 3441 | } |
| 3442 | |
| 3443 | #if CONFIG_EXT_TILE |
| 3444 | static INLINE int find_identical_tile( |
| 3445 | const int tile_row, const int tile_col, |
| 3446 | TileBufferEnc (*const tile_buffers)[1024]) { |
| 3447 | const MV32 candidate_offset[1] = { { 1, 0 } }; |
| 3448 | const uint8_t *const cur_tile_data = |
| 3449 | tile_buffers[tile_row][tile_col].data + 4; |
| 3450 | const unsigned int cur_tile_size = tile_buffers[tile_row][tile_col].size; |
| 3451 | |
| 3452 | int i; |
| 3453 | |
| 3454 | if (tile_row == 0) return 0; |
| 3455 | |
| 3456 | // (TODO: yunqingwang) For now, only above tile is checked and used. |
| 3457 | // More candidates such as left tile can be added later. |
| 3458 | for (i = 0; i < 1; i++) { |
| 3459 | int row_offset = candidate_offset[0].row; |
| 3460 | int col_offset = candidate_offset[0].col; |
| 3461 | int row = tile_row - row_offset; |
| 3462 | int col = tile_col - col_offset; |
| 3463 | uint8_t tile_hdr; |
| 3464 | const uint8_t *tile_data; |
| 3465 | TileBufferEnc *candidate; |
| 3466 | |
| 3467 | if (row < 0 || col < 0) continue; |
| 3468 | |
| 3469 | tile_hdr = *(tile_buffers[row][col].data); |
| 3470 | |
| 3471 | // Read out tcm bit |
| 3472 | if ((tile_hdr >> 7) == 1) { |
| 3473 | // The candidate is a copy tile itself |
| 3474 | row_offset += tile_hdr & 0x7f; |
| 3475 | row = tile_row - row_offset; |
| 3476 | } |
| 3477 | |
| 3478 | candidate = &tile_buffers[row][col]; |
| 3479 | |
| 3480 | if (row_offset >= 128 || candidate->size != cur_tile_size) continue; |
| 3481 | |
| 3482 | tile_data = candidate->data + 4; |
| 3483 | |
| 3484 | if (memcmp(tile_data, cur_tile_data, cur_tile_size) != 0) continue; |
| 3485 | |
| 3486 | // Identical tile found |
| 3487 | assert(row_offset > 0); |
| 3488 | return row_offset; |
| 3489 | } |
| 3490 | |
| 3491 | // No identical tile found |
| 3492 | return 0; |
| 3493 | } |
| 3494 | #endif // CONFIG_EXT_TILE |
| 3495 | |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3496 | #if CONFIG_TILE_GROUPS |
| 3497 | static uint32_t write_tiles(AV1_COMP *const cpi, |
| 3498 | struct aom_write_bit_buffer *wb, |
| 3499 | unsigned int *max_tile_size, |
| 3500 | unsigned int *max_tile_col_size) { |
| 3501 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3502 | static uint32_t write_tiles(AV1_COMP *const cpi, uint8_t *const dst, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3503 | unsigned int *max_tile_size, |
| 3504 | unsigned int *max_tile_col_size) { |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3505 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3506 | const AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3507 | #if CONFIG_ANS |
Alex Converse | 2a1b3af | 2016-10-26 13:11:26 -0700 | [diff] [blame] | 3508 | struct BufAnsCoder *buf_ans = &cpi->buf_ans; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3509 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3510 | aom_writer mode_bc; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3511 | #endif // CONFIG_ANS |
| 3512 | int tile_row, tile_col; |
| 3513 | TOKENEXTRA *(*const tok_buffers)[MAX_TILE_COLS] = cpi->tile_tok; |
clang-format | 67948d3 | 2016-09-07 22:40:40 -0700 | [diff] [blame] | 3514 | TileBufferEnc(*const tile_buffers)[MAX_TILE_COLS] = cpi->tile_buffers; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3515 | size_t total_size = 0; |
| 3516 | const int tile_cols = cm->tile_cols; |
| 3517 | const int tile_rows = cm->tile_rows; |
Thomas Davies | af6df17 | 2016-11-09 14:04:18 +0000 | [diff] [blame] | 3518 | unsigned int tile_size = 0; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3519 | #if CONFIG_TILE_GROUPS |
| 3520 | const int n_log2_tiles = cm->log2_tile_rows + cm->log2_tile_cols; |
| 3521 | const int have_tiles = n_log2_tiles > 0; |
| 3522 | size_t comp_hdr_size; |
| 3523 | // Fixed size tile groups for the moment |
| 3524 | const int num_tg_hdrs = cm->num_tg; |
| 3525 | const int tg_size = (tile_rows * tile_cols + num_tg_hdrs - 1) / num_tg_hdrs; |
| 3526 | int tile_count = 0; |
| 3527 | int uncompressed_hdr_size = 0; |
| 3528 | uint8_t *dst = NULL; |
| 3529 | struct aom_write_bit_buffer comp_hdr_len_wb; |
| 3530 | struct aom_write_bit_buffer tg_params_wb; |
| 3531 | int saved_offset; |
Thomas Davies | af6df17 | 2016-11-09 14:04:18 +0000 | [diff] [blame] | 3532 | int mtu_size = cpi->oxcf.mtu; |
| 3533 | int curr_tg_data_size = 0; |
| 3534 | int hdr_size; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3535 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3536 | #if CONFIG_EXT_TILE |
| 3537 | const int have_tiles = tile_cols * tile_rows > 1; |
| 3538 | #endif // CONFIG_EXT_TILE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3539 | |
| 3540 | *max_tile_size = 0; |
| 3541 | *max_tile_col_size = 0; |
| 3542 | |
| 3543 | // All tile size fields are output on 4 bytes. A call to remux_tiles will |
| 3544 | // later compact the data if smaller headers are adequate. |
| 3545 | |
| 3546 | #if CONFIG_EXT_TILE |
| 3547 | for (tile_col = 0; tile_col < tile_cols; tile_col++) { |
| 3548 | TileInfo tile_info; |
| 3549 | const int is_last_col = (tile_col == tile_cols - 1); |
| 3550 | const size_t col_offset = total_size; |
| 3551 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3552 | av1_tile_set_col(&tile_info, cm, tile_col); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3553 | |
| 3554 | // The last column does not have a column header |
| 3555 | if (!is_last_col) total_size += 4; |
| 3556 | |
| 3557 | for (tile_row = 0; tile_row < tile_rows; tile_row++) { |
| 3558 | TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col]; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3559 | const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col]; |
| 3560 | const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col]; |
| 3561 | const int data_offset = have_tiles ? 4 : 0; |
| 3562 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3563 | av1_tile_set_row(&tile_info, cm, tile_row); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3564 | |
| 3565 | buf->data = dst + total_size; |
| 3566 | |
| 3567 | // Is CONFIG_EXT_TILE = 1, every tile in the row has a header, |
| 3568 | // even for the last one, unless no tiling is used at all. |
| 3569 | total_size += data_offset; |
| 3570 | #if !CONFIG_ANS |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3571 | aom_start_encode(&mode_bc, buf->data + data_offset); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3572 | write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end); |
| 3573 | assert(tok == tok_end); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3574 | aom_stop_encode(&mode_bc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3575 | tile_size = mode_bc.pos; |
| 3576 | #else |
Alex Converse | 2a1b3af | 2016-10-26 13:11:26 -0700 | [diff] [blame] | 3577 | buf_ans_write_init(buf_ans, buf->data + data_offset); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3578 | write_modes(cpi, &tile_info, buf_ans, &tok, tok_end); |
| 3579 | assert(tok == tok_end); |
Alex Converse | 1ecdf2b | 2016-11-30 15:51:12 -0800 | [diff] [blame] | 3580 | aom_buf_ans_flush(buf_ans); |
Alex Converse | 2a1b3af | 2016-10-26 13:11:26 -0700 | [diff] [blame] | 3581 | tile_size = buf_ans_write_end(buf_ans); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3582 | #endif // !CONFIG_ANS |
| 3583 | |
| 3584 | buf->size = tile_size; |
| 3585 | |
| 3586 | // Record the maximum tile size we see, so we can compact headers later. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3587 | *max_tile_size = AOMMAX(*max_tile_size, tile_size); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3588 | |
| 3589 | if (have_tiles) { |
| 3590 | // tile header: size of this tile, or copy offset |
| 3591 | uint32_t tile_header = tile_size; |
| 3592 | |
| 3593 | // Check if this tile is a copy tile. |
| 3594 | // Very low chances to have copy tiles on the key frames, so don't |
| 3595 | // search on key frames to reduce unnecessary search. |
| 3596 | if (cm->frame_type != KEY_FRAME) { |
| 3597 | const int idendical_tile_offset = |
| 3598 | find_identical_tile(tile_row, tile_col, tile_buffers); |
| 3599 | |
| 3600 | if (idendical_tile_offset > 0) { |
| 3601 | tile_size = 0; |
| 3602 | tile_header = idendical_tile_offset | 0x80; |
| 3603 | tile_header <<= 24; |
| 3604 | } |
| 3605 | } |
| 3606 | |
| 3607 | mem_put_le32(buf->data, tile_header); |
| 3608 | } |
| 3609 | |
| 3610 | total_size += tile_size; |
| 3611 | } |
| 3612 | |
| 3613 | if (!is_last_col) { |
| 3614 | size_t col_size = total_size - col_offset - 4; |
| 3615 | mem_put_le32(dst + col_offset, col_size); |
| 3616 | |
| 3617 | // If it is not final packing, record the maximum tile column size we see, |
| 3618 | // otherwise, check if the tile size is out of the range. |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3619 | *max_tile_col_size = AOMMAX(*max_tile_col_size, col_size); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3620 | } |
| 3621 | } |
| 3622 | #else |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3623 | #if CONFIG_TILE_GROUPS |
| 3624 | write_uncompressed_header(cpi, wb); |
| 3625 | |
| 3626 | // Write the tile length code. Use full 32 bit length fields for the moment |
Thomas Davies | 4974e52 | 2016-11-07 17:44:05 +0000 | [diff] [blame] | 3627 | aom_wb_write_literal(wb, 3, 2); |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3628 | |
| 3629 | /* Write a placeholder for the number of tiles in each tile group */ |
| 3630 | tg_params_wb = *wb; |
| 3631 | saved_offset = wb->bit_offset; |
Thomas Davies | 4974e52 | 2016-11-07 17:44:05 +0000 | [diff] [blame] | 3632 | if (have_tiles) aom_wb_write_literal(wb, 0, n_log2_tiles * 2); |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3633 | |
| 3634 | /* Write a placeholder for the compressed header length */ |
| 3635 | comp_hdr_len_wb = *wb; |
| 3636 | aom_wb_write_literal(wb, 0, 16); |
| 3637 | |
| 3638 | uncompressed_hdr_size = aom_wb_bytes_written(wb); |
| 3639 | dst = wb->bit_buffer; |
| 3640 | comp_hdr_size = write_compressed_header(cpi, dst + uncompressed_hdr_size); |
Thomas Davies | faa7fcf | 2016-11-14 11:59:43 +0000 | [diff] [blame] | 3641 | aom_wb_overwrite_literal(&comp_hdr_len_wb, (int)(comp_hdr_size), 16); |
Thomas Davies | af6df17 | 2016-11-09 14:04:18 +0000 | [diff] [blame] | 3642 | hdr_size = uncompressed_hdr_size + comp_hdr_size; |
| 3643 | total_size += hdr_size; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3644 | #endif |
| 3645 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3646 | for (tile_row = 0; tile_row < tile_rows; tile_row++) { |
| 3647 | TileInfo tile_info; |
Yaowu Xu | 8acaa55 | 2016-11-21 09:50:22 -0800 | [diff] [blame] | 3648 | #if !CONFIG_TILE_GROUPS |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3649 | const int is_last_row = (tile_row == tile_rows - 1); |
Yaowu Xu | 8acaa55 | 2016-11-21 09:50:22 -0800 | [diff] [blame] | 3650 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3651 | av1_tile_set_row(&tile_info, cm, tile_row); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3652 | |
| 3653 | for (tile_col = 0; tile_col < tile_cols; tile_col++) { |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 3654 | const int tile_idx = tile_row * tile_cols + tile_col; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3655 | TileBufferEnc *const buf = &tile_buffers[tile_row][tile_col]; |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 3656 | #if CONFIG_PVQ |
| 3657 | TileDataEnc *this_tile = &cpi->tile_data[tile_idx]; |
| 3658 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3659 | const TOKENEXTRA *tok = tok_buffers[tile_row][tile_col]; |
| 3660 | const TOKENEXTRA *tok_end = tok + cpi->tok_count[tile_row][tile_col]; |
Thomas Davies | 8fe64a3 | 2016-10-04 13:19:31 +0100 | [diff] [blame] | 3661 | #if !CONFIG_TILE_GROUPS |
Yaowu Xu | 8acaa55 | 2016-11-21 09:50:22 -0800 | [diff] [blame] | 3662 | const int is_last_col = (tile_col == tile_cols - 1); |
Thomas Davies | 8fe64a3 | 2016-10-04 13:19:31 +0100 | [diff] [blame] | 3663 | const int is_last_tile = is_last_col && is_last_row; |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 3664 | (void)tile_idx; |
Thomas Davies | 8fe64a3 | 2016-10-04 13:19:31 +0100 | [diff] [blame] | 3665 | #else |
Thomas Davies | 8fe64a3 | 2016-10-04 13:19:31 +0100 | [diff] [blame] | 3666 | // All tiles in a tile group have a length |
| 3667 | const int is_last_tile = 0; |
Thomas Davies | af6df17 | 2016-11-09 14:04:18 +0000 | [diff] [blame] | 3668 | |
| 3669 | if ((!mtu_size && tile_count > tg_size) || |
| 3670 | (mtu_size && tile_count && curr_tg_data_size >= mtu_size)) { |
| 3671 | // We've exceeded the packet size |
| 3672 | if (tile_count > 1) { |
| 3673 | /* The last tile exceeded the packet size. The tile group size |
| 3674 | should therefore be tile_count-1. |
| 3675 | Move the last tile and insert headers before it |
| 3676 | */ |
| 3677 | int old_total_size = total_size - tile_size - 4; |
| 3678 | memmove(dst + old_total_size + hdr_size, dst + old_total_size, |
| 3679 | (tile_size + 4) * sizeof(uint8_t)); |
| 3680 | // Copy uncompressed header |
| 3681 | memmove(dst + old_total_size, dst, |
| 3682 | uncompressed_hdr_size * sizeof(uint8_t)); |
| 3683 | // Write the number of tiles in the group into the last uncompressed |
| 3684 | // header before the one we've just inserted |
Thomas Davies | faa7fcf | 2016-11-14 11:59:43 +0000 | [diff] [blame] | 3685 | aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count, |
| 3686 | n_log2_tiles); |
| 3687 | aom_wb_overwrite_literal(&tg_params_wb, tile_count - 2, n_log2_tiles); |
Thomas Davies | af6df17 | 2016-11-09 14:04:18 +0000 | [diff] [blame] | 3688 | // Update the pointer to the last TG params |
| 3689 | tg_params_wb.bit_offset = saved_offset + 8 * old_total_size; |
| 3690 | // Copy compressed header |
| 3691 | memmove(dst + old_total_size + uncompressed_hdr_size, |
| 3692 | dst + uncompressed_hdr_size, comp_hdr_size * sizeof(uint8_t)); |
| 3693 | total_size += hdr_size; |
| 3694 | tile_count = 1; |
| 3695 | curr_tg_data_size = hdr_size + tile_size + 4; |
| 3696 | |
| 3697 | } else { |
| 3698 | // We exceeded the packet size in just one tile |
| 3699 | // Copy uncompressed header |
| 3700 | memmove(dst + total_size, dst, |
| 3701 | uncompressed_hdr_size * sizeof(uint8_t)); |
| 3702 | // Write the number of tiles in the group into the last uncompressed |
| 3703 | // header |
Thomas Davies | faa7fcf | 2016-11-14 11:59:43 +0000 | [diff] [blame] | 3704 | aom_wb_overwrite_literal(&tg_params_wb, tile_idx - tile_count, |
| 3705 | n_log2_tiles); |
| 3706 | aom_wb_overwrite_literal(&tg_params_wb, tile_count - 1, n_log2_tiles); |
Thomas Davies | af6df17 | 2016-11-09 14:04:18 +0000 | [diff] [blame] | 3707 | tg_params_wb.bit_offset = saved_offset + 8 * total_size; |
| 3708 | // Copy compressed header |
| 3709 | memmove(dst + total_size + uncompressed_hdr_size, |
| 3710 | dst + uncompressed_hdr_size, comp_hdr_size * sizeof(uint8_t)); |
| 3711 | total_size += hdr_size; |
| 3712 | tile_count = 0; |
| 3713 | curr_tg_data_size = hdr_size; |
| 3714 | } |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3715 | } |
| 3716 | tile_count++; |
| 3717 | #endif |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3718 | av1_tile_set_col(&tile_info, cm, tile_col); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3719 | |
| 3720 | buf->data = dst + total_size; |
| 3721 | |
| 3722 | // The last tile does not have a header. |
| 3723 | if (!is_last_tile) total_size += 4; |
| 3724 | |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3725 | #if CONFIG_ANS |
Alex Converse | 2a1b3af | 2016-10-26 13:11:26 -0700 | [diff] [blame] | 3726 | buf_ans_write_init(buf_ans, dst + total_size); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3727 | write_modes(cpi, &tile_info, buf_ans, &tok, tok_end); |
| 3728 | assert(tok == tok_end); |
Alex Converse | 1ecdf2b | 2016-11-30 15:51:12 -0800 | [diff] [blame] | 3729 | aom_buf_ans_flush(buf_ans); |
Alex Converse | 2a1b3af | 2016-10-26 13:11:26 -0700 | [diff] [blame] | 3730 | tile_size = buf_ans_write_end(buf_ans); |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3731 | #else |
| 3732 | aom_start_encode(&mode_bc, dst + total_size); |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 3733 | #if CONFIG_PVQ |
| 3734 | // NOTE: This will not work with CONFIG_ANS turned on. |
| 3735 | od_adapt_ctx_reset(&cpi->td.mb.daala_enc.state.adapt, 0); |
| 3736 | cpi->td.mb.pvq_q = &this_tile->pvq_q; |
| 3737 | #endif |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3738 | write_modes(cpi, &tile_info, &mode_bc, &tok, tok_end); |
| 3739 | assert(tok == tok_end); |
| 3740 | aom_stop_encode(&mode_bc); |
| 3741 | tile_size = mode_bc.pos; |
Alex Converse | 2a1b3af | 2016-10-26 13:11:26 -0700 | [diff] [blame] | 3742 | #endif // CONFIG_ANS |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 3743 | #if CONFIG_PVQ |
| 3744 | cpi->td.mb.pvq_q = NULL; |
Alex Converse | 2a1b3af | 2016-10-26 13:11:26 -0700 | [diff] [blame] | 3745 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3746 | |
| 3747 | assert(tile_size > 0); |
| 3748 | |
Thomas Davies | af6df17 | 2016-11-09 14:04:18 +0000 | [diff] [blame] | 3749 | #if CONFIG_TILE_GROUPS |
| 3750 | curr_tg_data_size += tile_size + 4; |
| 3751 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3752 | buf->size = tile_size; |
| 3753 | |
| 3754 | if (!is_last_tile) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3755 | *max_tile_size = AOMMAX(*max_tile_size, tile_size); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3756 | // size of this tile |
| 3757 | mem_put_le32(buf->data, tile_size); |
| 3758 | } |
| 3759 | |
| 3760 | total_size += tile_size; |
| 3761 | } |
| 3762 | } |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 3763 | #if CONFIG_TILE_GROUPS |
| 3764 | // Write the final tile group size |
| 3765 | if (n_log2_tiles) { |
| 3766 | aom_wb_write_literal(&tg_params_wb, (1 << n_log2_tiles) - tile_count, |
| 3767 | n_log2_tiles); |
| 3768 | aom_wb_write_literal(&tg_params_wb, tile_count - 1, n_log2_tiles); |
| 3769 | } |
| 3770 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3771 | #endif // CONFIG_EXT_TILE |
| 3772 | return (uint32_t)total_size; |
| 3773 | } |
| 3774 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3775 | static void write_render_size(const AV1_COMMON *cm, |
| 3776 | struct aom_write_bit_buffer *wb) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3777 | const int scaling_active = |
| 3778 | cm->width != cm->render_width || cm->height != cm->render_height; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3779 | aom_wb_write_bit(wb, scaling_active); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3780 | if (scaling_active) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3781 | aom_wb_write_literal(wb, cm->render_width - 1, 16); |
| 3782 | aom_wb_write_literal(wb, cm->render_height - 1, 16); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3783 | } |
| 3784 | } |
| 3785 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3786 | static void write_frame_size(const AV1_COMMON *cm, |
| 3787 | struct aom_write_bit_buffer *wb) { |
| 3788 | aom_wb_write_literal(wb, cm->width - 1, 16); |
| 3789 | aom_wb_write_literal(wb, cm->height - 1, 16); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3790 | |
| 3791 | write_render_size(cm, wb); |
| 3792 | } |
| 3793 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3794 | static void write_frame_size_with_refs(AV1_COMP *cpi, |
| 3795 | struct aom_write_bit_buffer *wb) { |
| 3796 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3797 | int found = 0; |
| 3798 | |
| 3799 | MV_REFERENCE_FRAME ref_frame; |
| 3800 | for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) { |
| 3801 | YV12_BUFFER_CONFIG *cfg = get_ref_frame_buffer(cpi, ref_frame); |
| 3802 | |
| 3803 | if (cfg != NULL) { |
| 3804 | found = |
| 3805 | cm->width == cfg->y_crop_width && cm->height == cfg->y_crop_height; |
| 3806 | found &= cm->render_width == cfg->render_width && |
| 3807 | cm->render_height == cfg->render_height; |
| 3808 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3809 | aom_wb_write_bit(wb, found); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3810 | if (found) { |
| 3811 | break; |
| 3812 | } |
| 3813 | } |
| 3814 | |
| 3815 | if (!found) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3816 | aom_wb_write_literal(wb, cm->width - 1, 16); |
| 3817 | aom_wb_write_literal(wb, cm->height - 1, 16); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3818 | write_render_size(cm, wb); |
| 3819 | } |
| 3820 | } |
| 3821 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3822 | static void write_sync_code(struct aom_write_bit_buffer *wb) { |
| 3823 | aom_wb_write_literal(wb, AV1_SYNC_CODE_0, 8); |
| 3824 | aom_wb_write_literal(wb, AV1_SYNC_CODE_1, 8); |
| 3825 | aom_wb_write_literal(wb, AV1_SYNC_CODE_2, 8); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3826 | } |
| 3827 | |
| 3828 | static void write_profile(BITSTREAM_PROFILE profile, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3829 | struct aom_write_bit_buffer *wb) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3830 | switch (profile) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3831 | case PROFILE_0: aom_wb_write_literal(wb, 0, 2); break; |
| 3832 | case PROFILE_1: aom_wb_write_literal(wb, 2, 2); break; |
| 3833 | case PROFILE_2: aom_wb_write_literal(wb, 1, 2); break; |
| 3834 | case PROFILE_3: aom_wb_write_literal(wb, 6, 3); break; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3835 | default: assert(0); |
| 3836 | } |
| 3837 | } |
| 3838 | |
| 3839 | static void write_bitdepth_colorspace_sampling( |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3840 | AV1_COMMON *const cm, struct aom_write_bit_buffer *wb) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3841 | if (cm->profile >= PROFILE_2) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3842 | assert(cm->bit_depth > AOM_BITS_8); |
| 3843 | aom_wb_write_bit(wb, cm->bit_depth == AOM_BITS_10 ? 0 : 1); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3844 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3845 | aom_wb_write_literal(wb, cm->color_space, 3); |
| 3846 | if (cm->color_space != AOM_CS_SRGB) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3847 | // 0: [16, 235] (i.e. xvYCC), 1: [0, 255] |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3848 | aom_wb_write_bit(wb, cm->color_range); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3849 | if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) { |
| 3850 | assert(cm->subsampling_x != 1 || cm->subsampling_y != 1); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3851 | aom_wb_write_bit(wb, cm->subsampling_x); |
| 3852 | aom_wb_write_bit(wb, cm->subsampling_y); |
| 3853 | aom_wb_write_bit(wb, 0); // unused |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3854 | } else { |
| 3855 | assert(cm->subsampling_x == 1 && cm->subsampling_y == 1); |
| 3856 | } |
| 3857 | } else { |
| 3858 | assert(cm->profile == PROFILE_1 || cm->profile == PROFILE_3); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3859 | aom_wb_write_bit(wb, 0); // unused |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3860 | } |
| 3861 | } |
| 3862 | |
Arild Fuldseth (arilfuld) | 5114b7b | 2016-11-09 13:32:54 +0100 | [diff] [blame] | 3863 | #if CONFIG_REFERENCE_BUFFER |
| 3864 | void write_sequence_header(SequenceHeader *seq_params) { |
| 3865 | /* Placeholder for actually writing to the bitstream */ |
| 3866 | seq_params->frame_id_numbers_present_flag = FRAME_ID_NUMBERS_PRESENT_FLAG; |
| 3867 | seq_params->frame_id_length_minus7 = FRAME_ID_LENGTH_MINUS7; |
| 3868 | seq_params->delta_frame_id_length_minus2 = DELTA_FRAME_ID_LENGTH_MINUS2; |
| 3869 | } |
| 3870 | #endif |
| 3871 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3872 | static void write_uncompressed_header(AV1_COMP *cpi, |
| 3873 | struct aom_write_bit_buffer *wb) { |
| 3874 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3875 | MACROBLOCKD *const xd = &cpi->td.mb.e_mbd; |
| 3876 | |
Arild Fuldseth (arilfuld) | 5114b7b | 2016-11-09 13:32:54 +0100 | [diff] [blame] | 3877 | #if CONFIG_REFERENCE_BUFFER |
| 3878 | /* TODO: Move outside frame loop or inside key-frame branch */ |
| 3879 | write_sequence_header(&cpi->seq_params); |
| 3880 | #endif |
| 3881 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3882 | aom_wb_write_literal(wb, AOM_FRAME_MARKER, 2); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3883 | |
| 3884 | write_profile(cm->profile, wb); |
| 3885 | |
| 3886 | #if CONFIG_EXT_REFS |
| 3887 | // NOTE: By default all coded frames to be used as a reference |
| 3888 | cm->is_reference_frame = 1; |
| 3889 | |
| 3890 | if (cm->show_existing_frame) { |
| 3891 | RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs; |
| 3892 | const int frame_to_show = cm->ref_frame_map[cpi->existing_fb_idx_to_show]; |
| 3893 | |
| 3894 | if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3895 | aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3896 | "Buffer %d does not contain a reconstructed frame", |
| 3897 | frame_to_show); |
| 3898 | } |
| 3899 | ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show); |
| 3900 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3901 | aom_wb_write_bit(wb, 1); // show_existing_frame |
| 3902 | aom_wb_write_literal(wb, cpi->existing_fb_idx_to_show, 3); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3903 | |
Arild Fuldseth (arilfuld) | 788dc23 | 2016-12-20 17:55:52 +0100 | [diff] [blame] | 3904 | #if CONFIG_REFERENCE_BUFFER |
| 3905 | if (cpi->seq_params.frame_id_numbers_present_flag) { |
| 3906 | int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7; |
| 3907 | int display_frame_id = cm->ref_frame_id[cpi->existing_fb_idx_to_show]; |
| 3908 | aom_wb_write_literal(wb, display_frame_id, frame_id_len); |
| 3909 | /* Add a zero byte to prevent emulation of superframe marker */ |
| 3910 | /* Same logic as when when terminating the entropy coder */ |
| 3911 | /* Consider to have this logic only one place */ |
| 3912 | aom_wb_write_literal(wb, 0, 8); |
| 3913 | } |
| 3914 | #endif |
| 3915 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3916 | return; |
| 3917 | } else { |
| 3918 | #endif // CONFIG_EXT_REFS |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3919 | aom_wb_write_bit(wb, 0); // show_existing_frame |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3920 | #if CONFIG_EXT_REFS |
| 3921 | } |
| 3922 | #endif // CONFIG_EXT_REFS |
| 3923 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3924 | aom_wb_write_bit(wb, cm->frame_type); |
| 3925 | aom_wb_write_bit(wb, cm->show_frame); |
| 3926 | aom_wb_write_bit(wb, cm->error_resilient_mode); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3927 | |
Arild Fuldseth (arilfuld) | 5114b7b | 2016-11-09 13:32:54 +0100 | [diff] [blame] | 3928 | #if CONFIG_REFERENCE_BUFFER |
| 3929 | cm->invalid_delta_frame_id_minus1 = 0; |
| 3930 | if (cpi->seq_params.frame_id_numbers_present_flag) { |
Arild Fuldseth (arilfuld) | 788dc23 | 2016-12-20 17:55:52 +0100 | [diff] [blame] | 3931 | int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7; |
| 3932 | aom_wb_write_literal(wb, cm->current_frame_id, frame_id_len); |
Arild Fuldseth (arilfuld) | 5114b7b | 2016-11-09 13:32:54 +0100 | [diff] [blame] | 3933 | } |
| 3934 | #endif |
| 3935 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3936 | if (cm->frame_type == KEY_FRAME) { |
| 3937 | write_sync_code(wb); |
| 3938 | write_bitdepth_colorspace_sampling(cm, wb); |
| 3939 | write_frame_size(cm, wb); |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 3940 | #if CONFIG_PALETTE |
hui su | 24f7b07 | 2016-10-12 11:36:24 -0700 | [diff] [blame] | 3941 | aom_wb_write_bit(wb, cm->allow_screen_content_tools); |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 3942 | #endif // CONFIG_PALETTE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3943 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3944 | if (!cm->show_frame) aom_wb_write_bit(wb, cm->intra_only); |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 3945 | #if CONFIG_PALETTE |
hui su | 24f7b07 | 2016-10-12 11:36:24 -0700 | [diff] [blame] | 3946 | if (cm->intra_only) aom_wb_write_bit(wb, cm->allow_screen_content_tools); |
Urvang Joshi | b100db7 | 2016-10-12 16:28:56 -0700 | [diff] [blame] | 3947 | #endif // CONFIG_PALETTE |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3948 | if (!cm->error_resilient_mode) { |
| 3949 | if (cm->intra_only) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3950 | aom_wb_write_bit(wb, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3951 | cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL); |
| 3952 | } else { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3953 | aom_wb_write_bit(wb, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3954 | cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE); |
| 3955 | if (cm->reset_frame_context != RESET_FRAME_CONTEXT_NONE) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3956 | aom_wb_write_bit(wb, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3957 | cm->reset_frame_context == RESET_FRAME_CONTEXT_ALL); |
| 3958 | } |
| 3959 | } |
| 3960 | |
| 3961 | #if CONFIG_EXT_REFS |
| 3962 | cpi->refresh_frame_mask = get_refresh_mask(cpi); |
| 3963 | #endif // CONFIG_EXT_REFS |
| 3964 | |
| 3965 | if (cm->intra_only) { |
| 3966 | write_sync_code(wb); |
| 3967 | write_bitdepth_colorspace_sampling(cm, wb); |
| 3968 | |
| 3969 | #if CONFIG_EXT_REFS |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3970 | aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3971 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3972 | aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3973 | #endif // CONFIG_EXT_REFS |
| 3974 | write_frame_size(cm, wb); |
| 3975 | } else { |
| 3976 | MV_REFERENCE_FRAME ref_frame; |
| 3977 | |
| 3978 | #if CONFIG_EXT_REFS |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3979 | aom_wb_write_literal(wb, cpi->refresh_frame_mask, REF_FRAMES); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3980 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3981 | aom_wb_write_literal(wb, get_refresh_mask(cpi), REF_FRAMES); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3982 | #endif // CONFIG_EXT_REFS |
| 3983 | |
| 3984 | #if CONFIG_EXT_REFS |
| 3985 | if (!cpi->refresh_frame_mask) { |
| 3986 | // NOTE: "cpi->refresh_frame_mask == 0" indicates that the coded frame |
| 3987 | // will not be used as a reference |
| 3988 | cm->is_reference_frame = 0; |
| 3989 | } |
| 3990 | #endif // CONFIG_EXT_REFS |
| 3991 | |
| 3992 | for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) { |
| 3993 | assert(get_ref_frame_map_idx(cpi, ref_frame) != INVALID_IDX); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3994 | aom_wb_write_literal(wb, get_ref_frame_map_idx(cpi, ref_frame), |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 3995 | REF_FRAMES_LOG2); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 3996 | aom_wb_write_bit(wb, cm->ref_frame_sign_bias[ref_frame]); |
Arild Fuldseth (arilfuld) | 5114b7b | 2016-11-09 13:32:54 +0100 | [diff] [blame] | 3997 | #if CONFIG_REFERENCE_BUFFER |
| 3998 | if (cpi->seq_params.frame_id_numbers_present_flag) { |
| 3999 | int i = get_ref_frame_map_idx(cpi, ref_frame); |
Arild Fuldseth (arilfuld) | 788dc23 | 2016-12-20 17:55:52 +0100 | [diff] [blame] | 4000 | int frame_id_len = cpi->seq_params.frame_id_length_minus7 + 7; |
| 4001 | int diff_len = cpi->seq_params.delta_frame_id_length_minus2 + 2; |
Arild Fuldseth (arilfuld) | 5114b7b | 2016-11-09 13:32:54 +0100 | [diff] [blame] | 4002 | int delta_frame_id_minus1 = |
Arild Fuldseth (arilfuld) | 788dc23 | 2016-12-20 17:55:52 +0100 | [diff] [blame] | 4003 | ((cm->current_frame_id - cm->ref_frame_id[i] + |
| 4004 | (1 << frame_id_len)) % |
| 4005 | (1 << frame_id_len)) - |
Arild Fuldseth (arilfuld) | 5114b7b | 2016-11-09 13:32:54 +0100 | [diff] [blame] | 4006 | 1; |
| 4007 | if (delta_frame_id_minus1 < 0 || |
Arild Fuldseth (arilfuld) | 788dc23 | 2016-12-20 17:55:52 +0100 | [diff] [blame] | 4008 | delta_frame_id_minus1 >= (1 << diff_len)) |
Arild Fuldseth (arilfuld) | 5114b7b | 2016-11-09 13:32:54 +0100 | [diff] [blame] | 4009 | cm->invalid_delta_frame_id_minus1 = 1; |
Arild Fuldseth (arilfuld) | 788dc23 | 2016-12-20 17:55:52 +0100 | [diff] [blame] | 4010 | aom_wb_write_literal(wb, delta_frame_id_minus1, diff_len); |
Arild Fuldseth (arilfuld) | 5114b7b | 2016-11-09 13:32:54 +0100 | [diff] [blame] | 4011 | } |
| 4012 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4013 | } |
| 4014 | |
Arild Fuldseth | 842e9b0 | 2016-09-02 13:00:05 +0200 | [diff] [blame] | 4015 | #if CONFIG_FRAME_SIZE |
| 4016 | if (cm->error_resilient_mode == 0) { |
| 4017 | write_frame_size_with_refs(cpi, wb); |
| 4018 | } else { |
| 4019 | write_frame_size(cm, wb); |
| 4020 | } |
| 4021 | #else |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4022 | write_frame_size_with_refs(cpi, wb); |
Arild Fuldseth | 842e9b0 | 2016-09-02 13:00:05 +0200 | [diff] [blame] | 4023 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4024 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4025 | aom_wb_write_bit(wb, cm->allow_high_precision_mv); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4026 | |
| 4027 | fix_interp_filter(cm, cpi->td.counts); |
Angie Chiang | 5678ad9 | 2016-11-21 09:38:40 -0800 | [diff] [blame] | 4028 | write_frame_interp_filter(cm->interp_filter, wb); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4029 | } |
| 4030 | } |
| 4031 | |
Arild Fuldseth (arilfuld) | 5114b7b | 2016-11-09 13:32:54 +0100 | [diff] [blame] | 4032 | #if CONFIG_REFERENCE_BUFFER |
| 4033 | cm->refresh_mask = cm->frame_type == KEY_FRAME ? 0xFF : get_refresh_mask(cpi); |
| 4034 | #endif |
| 4035 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4036 | if (!cm->error_resilient_mode) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4037 | aom_wb_write_bit( |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4038 | wb, cm->refresh_frame_context == REFRESH_FRAME_CONTEXT_FORWARD); |
| 4039 | } |
| 4040 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4041 | aom_wb_write_literal(wb, cm->frame_context_idx, FRAME_CONTEXTS_LOG2); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4042 | |
Jingning Han | c709e1f | 2016-12-06 14:48:09 -0800 | [diff] [blame] | 4043 | assert(cm->mib_size == mi_size_wide[cm->sb_size]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4044 | assert(cm->mib_size == 1 << cm->mib_size_log2); |
| 4045 | #if CONFIG_EXT_PARTITION |
| 4046 | assert(cm->sb_size == BLOCK_128X128 || cm->sb_size == BLOCK_64X64); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4047 | aom_wb_write_bit(wb, cm->sb_size == BLOCK_128X128 ? 1 : 0); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4048 | #else |
| 4049 | assert(cm->sb_size == BLOCK_64X64); |
| 4050 | #endif // CONFIG_EXT_PARTITION |
| 4051 | |
| 4052 | encode_loopfilter(cm, wb); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4053 | #if CONFIG_DERING |
| 4054 | encode_dering(cm->dering_level, wb); |
| 4055 | #endif // CONFIG_DERING |
Steinar Midtskogen | 5d56f4d | 2016-09-25 09:23:16 +0200 | [diff] [blame] | 4056 | #if CONFIG_CLPF |
| 4057 | encode_clpf(cm, wb); |
| 4058 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4059 | #if CONFIG_LOOP_RESTORATION |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 4060 | encode_restoration_mode(cm, wb); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4061 | #endif // CONFIG_LOOP_RESTORATION |
| 4062 | encode_quantization(cm, wb); |
| 4063 | encode_segmentation(cm, xd, wb); |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 4064 | #if CONFIG_DELTA_Q |
| 4065 | { |
| 4066 | int i; |
| 4067 | struct segmentation *const seg = &cm->seg; |
| 4068 | int segment_quantizer_active = 0; |
| 4069 | for (i = 0; i < MAX_SEGMENTS; i++) { |
| 4070 | if (segfeature_active(seg, i, SEG_LVL_ALT_Q)) { |
| 4071 | segment_quantizer_active = 1; |
| 4072 | } |
| 4073 | } |
| 4074 | if (segment_quantizer_active == 0) { |
| 4075 | cm->delta_q_present_flag = cpi->oxcf.aq_mode == DELTA_AQ; |
| 4076 | aom_wb_write_bit(wb, cm->delta_q_present_flag); |
| 4077 | if (cm->delta_q_present_flag) { |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 4078 | aom_wb_write_literal(wb, OD_ILOG_NZ(cm->delta_q_res) - 1, 2); |
Arild Fuldseth | 0744116 | 2016-08-15 15:07:52 +0200 | [diff] [blame] | 4079 | xd->prev_qindex = cm->base_qindex; |
| 4080 | } |
| 4081 | } |
| 4082 | } |
| 4083 | #endif |
| 4084 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4085 | if (!cm->seg.enabled && xd->lossless[0]) |
Urvang Joshi | cb586f3 | 2016-09-20 11:36:33 -0700 | [diff] [blame] | 4086 | cm->tx_mode = ONLY_4X4; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4087 | else |
Debargha Mukherjee | 18d38f6 | 2016-11-17 20:30:16 -0800 | [diff] [blame] | 4088 | write_tx_mode(cm->tx_mode, wb); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4089 | |
| 4090 | if (cpi->allow_comp_inter_inter) { |
| 4091 | const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT; |
| 4092 | const int use_compound_pred = cm->reference_mode != SINGLE_REFERENCE; |
| 4093 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4094 | aom_wb_write_bit(wb, use_hybrid_pred); |
| 4095 | if (!use_hybrid_pred) aom_wb_write_bit(wb, use_compound_pred); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4096 | } |
| 4097 | |
| 4098 | write_tile_info(cm, wb); |
| 4099 | } |
| 4100 | |
| 4101 | #if CONFIG_GLOBAL_MOTION |
David Barker | cf3d0b0 | 2016-11-10 10:14:49 +0000 | [diff] [blame] | 4102 | static void write_global_motion_params(WarpedMotionParams *params, |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4103 | aom_prob *probs, aom_writer *w) { |
David Barker | cf3d0b0 | 2016-11-10 10:14:49 +0000 | [diff] [blame] | 4104 | TransformationType type = params->wmtype; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4105 | av1_write_token(w, av1_global_motion_types_tree, probs, |
David Barker | cf3d0b0 | 2016-11-10 10:14:49 +0000 | [diff] [blame] | 4106 | &global_motion_types_encodings[type]); |
| 4107 | switch (type) { |
Debargha Mukherjee | 3fb33f0 | 2016-11-12 10:43:50 -0800 | [diff] [blame] | 4108 | case HOMOGRAPHY: |
Debargha Mukherjee | 949097c | 2016-11-15 17:27:38 -0800 | [diff] [blame] | 4109 | aom_write_primitive_symmetric( |
| 4110 | w, (params->wmmat[6] >> GM_ROW3HOMO_PREC_DIFF), GM_ABS_ROW3HOMO_BITS); |
| 4111 | aom_write_primitive_symmetric( |
| 4112 | w, (params->wmmat[7] >> GM_ROW3HOMO_PREC_DIFF), GM_ABS_ROW3HOMO_BITS); |
Debargha Mukherjee | 3fb33f0 | 2016-11-12 10:43:50 -0800 | [diff] [blame] | 4113 | // fallthrough intended |
David Barker | cf3d0b0 | 2016-11-10 10:14:49 +0000 | [diff] [blame] | 4114 | case AFFINE: |
| 4115 | case ROTZOOM: |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4116 | aom_write_primitive_symmetric( |
David Barker | cf3d0b0 | 2016-11-10 10:14:49 +0000 | [diff] [blame] | 4117 | w, |
| 4118 | (params->wmmat[2] >> GM_ALPHA_PREC_DIFF) - (1 << GM_ALPHA_PREC_BITS), |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4119 | GM_ABS_ALPHA_BITS); |
David Barker | cf3d0b0 | 2016-11-10 10:14:49 +0000 | [diff] [blame] | 4120 | aom_write_primitive_symmetric(w, (params->wmmat[3] >> GM_ALPHA_PREC_DIFF), |
| 4121 | GM_ABS_ALPHA_BITS); |
Debargha Mukherjee | 3fb33f0 | 2016-11-12 10:43:50 -0800 | [diff] [blame] | 4122 | if (type == AFFINE || type == HOMOGRAPHY) { |
Debargha Mukherjee | 8db4c77 | 2016-11-07 12:54:21 -0800 | [diff] [blame] | 4123 | aom_write_primitive_symmetric( |
David Barker | cf3d0b0 | 2016-11-10 10:14:49 +0000 | [diff] [blame] | 4124 | w, (params->wmmat[4] >> GM_ALPHA_PREC_DIFF), GM_ABS_ALPHA_BITS); |
| 4125 | aom_write_primitive_symmetric(w, |
| 4126 | (params->wmmat[5] >> GM_ALPHA_PREC_DIFF) - |
| 4127 | (1 << GM_ALPHA_PREC_BITS), |
| 4128 | GM_ABS_ALPHA_BITS); |
Debargha Mukherjee | 8db4c77 | 2016-11-07 12:54:21 -0800 | [diff] [blame] | 4129 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4130 | // fallthrough intended |
David Barker | cf3d0b0 | 2016-11-10 10:14:49 +0000 | [diff] [blame] | 4131 | case TRANSLATION: |
| 4132 | aom_write_primitive_symmetric(w, (params->wmmat[0] >> GM_TRANS_PREC_DIFF), |
| 4133 | GM_ABS_TRANS_BITS); |
| 4134 | aom_write_primitive_symmetric(w, (params->wmmat[1] >> GM_TRANS_PREC_DIFF), |
| 4135 | GM_ABS_TRANS_BITS); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4136 | break; |
Debargha Mukherjee | 3fb33f0 | 2016-11-12 10:43:50 -0800 | [diff] [blame] | 4137 | case IDENTITY: break; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4138 | default: assert(0); |
| 4139 | } |
| 4140 | } |
| 4141 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4142 | static void write_global_motion(AV1_COMP *cpi, aom_writer *w) { |
| 4143 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4144 | int frame; |
| 4145 | for (frame = LAST_FRAME; frame <= ALTREF_FRAME; ++frame) { |
David Barker | 43479c6 | 2016-11-30 10:34:20 +0000 | [diff] [blame] | 4146 | #if !CONFIG_REF_MV |
| 4147 | // With ref-mv, clearing unused global motion models here is |
| 4148 | // unsafe, and we need to rely on the recode loop to do it |
| 4149 | // instead. See av1_find_mv_refs for details. |
Debargha Mukherjee | 705544c | 2016-11-22 08:55:49 -0800 | [diff] [blame] | 4150 | if (!cpi->global_motion_used[frame][0]) { |
Debargha Mukherjee | 8db4c77 | 2016-11-07 12:54:21 -0800 | [diff] [blame] | 4151 | set_default_gmparams(&cm->global_motion[frame]); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4152 | } |
David Barker | 43479c6 | 2016-11-30 10:34:20 +0000 | [diff] [blame] | 4153 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4154 | write_global_motion_params(&cm->global_motion[frame], |
| 4155 | cm->fc->global_motion_types_prob, w); |
Sarah Parker | e529986 | 2016-08-16 14:57:37 -0700 | [diff] [blame] | 4156 | /* |
Debargha Mukherjee | 705544c | 2016-11-22 08:55:49 -0800 | [diff] [blame] | 4157 | printf("Frame %d/%d: Enc Ref %d (used %d/%d): %d %d %d %d\n", |
Debargha Mukherjee | b98a702 | 2016-11-15 16:07:12 -0800 | [diff] [blame] | 4158 | cm->current_video_frame, cm->show_frame, frame, |
Debargha Mukherjee | 705544c | 2016-11-22 08:55:49 -0800 | [diff] [blame] | 4159 | cpi->global_motion_used[frame][0], cpi->global_motion_used[frame][1], |
| 4160 | cm->global_motion[frame].wmmat[0], cm->global_motion[frame].wmmat[1], |
| 4161 | cm->global_motion[frame].wmmat[2], |
| 4162 | cm->global_motion[frame].wmmat[3]); |
Debargha Mukherjee | 8db4c77 | 2016-11-07 12:54:21 -0800 | [diff] [blame] | 4163 | */ |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4164 | } |
| 4165 | } |
| 4166 | #endif |
| 4167 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4168 | static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data) { |
| 4169 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4170 | #if CONFIG_SUPERTX |
| 4171 | MACROBLOCKD *const xd = &cpi->td.mb.e_mbd; |
| 4172 | #endif // CONFIG_SUPERTX |
| 4173 | FRAME_CONTEXT *const fc = cm->fc; |
| 4174 | FRAME_COUNTS *counts = cpi->td.counts; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4175 | aom_writer *header_bc; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4176 | int i, j; |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 4177 | |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4178 | #if CONFIG_TILE_GROUPS |
| 4179 | const int probwt = cm->num_tg; |
| 4180 | #else |
| 4181 | const int probwt = 1; |
| 4182 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4183 | |
| 4184 | #if CONFIG_ANS |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4185 | int header_size; |
| 4186 | header_bc = &cpi->buf_ans; |
Alex Converse | 2a1b3af | 2016-10-26 13:11:26 -0700 | [diff] [blame] | 4187 | buf_ans_write_init(header_bc, data); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4188 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4189 | aom_writer real_header_bc; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4190 | header_bc = &real_header_bc; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4191 | aom_start_encode(header_bc, data); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4192 | #endif |
Debargha Mukherjee | 5cd2ab9 | 2016-09-08 15:15:17 -0700 | [diff] [blame] | 4193 | |
| 4194 | #if CONFIG_LOOP_RESTORATION |
| 4195 | encode_restoration(cm, header_bc); |
| 4196 | #endif // CONFIG_LOOP_RESTORATION |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4197 | update_txfm_probs(cm, header_bc, counts); |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 4198 | #if !CONFIG_PVQ |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4199 | update_coef_probs(cpi, header_bc); |
Yushin Cho | 77bba8d | 2016-11-04 16:36:56 -0700 | [diff] [blame] | 4200 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4201 | #if CONFIG_VAR_TX |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4202 | update_txfm_partition_probs(cm, header_bc, counts, probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4203 | #endif |
| 4204 | |
| 4205 | update_skip_probs(cm, header_bc, counts); |
Thomas Davies | f693610 | 2016-09-05 16:51:31 +0100 | [diff] [blame] | 4206 | #if CONFIG_DELTA_Q |
| 4207 | update_delta_q_probs(cm, header_bc, counts); |
| 4208 | #endif |
Nathan E. Egge | baaaa16 | 2016-10-24 09:50:52 -0400 | [diff] [blame] | 4209 | #if !CONFIG_EC_ADAPT |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4210 | update_seg_probs(cpi, header_bc); |
| 4211 | |
Nathan E. Egge | 380cb1a | 2016-09-08 10:13:42 -0400 | [diff] [blame] | 4212 | for (i = 0; i < INTRA_MODES; ++i) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4213 | prob_diff_update(av1_intra_mode_tree, fc->uv_mode_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4214 | counts->uv_mode[i], INTRA_MODES, probwt, header_bc); |
Nathan E. Egge | 380cb1a | 2016-09-08 10:13:42 -0400 | [diff] [blame] | 4215 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4216 | |
| 4217 | #if CONFIG_EXT_PARTITION_TYPES |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4218 | prob_diff_update(av1_partition_tree, fc->partition_prob[0], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4219 | counts->partition[0], PARTITION_TYPES, probwt, header_bc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4220 | for (i = 1; i < PARTITION_CONTEXTS; ++i) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4221 | prob_diff_update(av1_ext_partition_tree, fc->partition_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4222 | counts->partition[i], EXT_PARTITION_TYPES, probwt, |
| 4223 | header_bc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4224 | #else |
Nathan E. Egge | fba2be6 | 2016-05-03 09:48:54 -0400 | [diff] [blame] | 4225 | for (i = 0; i < PARTITION_CONTEXTS; ++i) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4226 | prob_diff_update(av1_partition_tree, fc->partition_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4227 | counts->partition[i], PARTITION_TYPES, probwt, header_bc); |
Nathan E. Egge | fba2be6 | 2016-05-03 09:48:54 -0400 | [diff] [blame] | 4228 | } |
Thomas Davies | 6519beb | 2016-10-19 14:46:07 +0100 | [diff] [blame] | 4229 | #endif // CONFIG_EC_ADAPT, CONFIG_DAALA_EC |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4230 | |
| 4231 | #if CONFIG_EXT_INTRA |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 4232 | #if CONFIG_INTRA_INTERP |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4233 | for (i = 0; i < INTRA_FILTERS + 1; ++i) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4234 | prob_diff_update(av1_intra_filter_tree, fc->intra_filter_probs[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4235 | counts->intra_filter[i], INTRA_FILTERS, probwt, header_bc); |
hui su | eda3d76 | 2016-12-06 16:58:23 -0800 | [diff] [blame] | 4236 | #endif // CONFIG_INTRA_INTERP |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4237 | #endif // CONFIG_EXT_INTRA |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 4238 | #endif // CONFIG_EC_ADAPT, CONFIG_DAALA_EC |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4239 | if (frame_is_intra_only(cm)) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4240 | av1_copy(cm->kf_y_prob, av1_kf_y_mode_prob); |
Nathan E. Egge | 10ba2be | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 4241 | #if CONFIG_EC_MULTISYMBOL |
Nathan E. Egge | 3ef926e | 2016-09-07 18:20:41 -0400 | [diff] [blame] | 4242 | av1_copy(cm->kf_y_cdf, av1_kf_y_mode_cdf); |
| 4243 | #endif |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 4244 | |
Nathan E. Egge | baaaa16 | 2016-10-24 09:50:52 -0400 | [diff] [blame] | 4245 | #if !CONFIG_EC_ADAPT |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4246 | for (i = 0; i < INTRA_MODES; ++i) |
Thomas Davies | 6519beb | 2016-10-19 14:46:07 +0100 | [diff] [blame] | 4247 | for (j = 0; j < INTRA_MODES; ++j) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4248 | prob_diff_update(av1_intra_mode_tree, cm->kf_y_prob[i][j], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4249 | counts->kf_y_mode[i][j], INTRA_MODES, probwt, |
| 4250 | header_bc); |
Nathan E. Egge | baaaa16 | 2016-10-24 09:50:52 -0400 | [diff] [blame] | 4251 | #endif // CONFIG_EC_ADAPT |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4252 | } else { |
| 4253 | #if CONFIG_REF_MV |
| 4254 | update_inter_mode_probs(cm, header_bc, counts); |
| 4255 | #else |
Nathan E. Egge | baaaa16 | 2016-10-24 09:50:52 -0400 | [diff] [blame] | 4256 | #if !CONFIG_EC_ADAPT |
Nathan E. Egge | 6ec4d10 | 2016-09-08 10:41:20 -0400 | [diff] [blame] | 4257 | for (i = 0; i < INTER_MODE_CONTEXTS; ++i) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4258 | prob_diff_update(av1_inter_mode_tree, cm->fc->inter_mode_probs[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4259 | counts->inter_mode[i], INTER_MODES, probwt, header_bc); |
Nathan E. Egge | 6ec4d10 | 2016-09-08 10:41:20 -0400 | [diff] [blame] | 4260 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4261 | #endif |
Thomas Davies | 6519beb | 2016-10-19 14:46:07 +0100 | [diff] [blame] | 4262 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4263 | #if CONFIG_EXT_INTER |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4264 | update_inter_compound_mode_probs(cm, probwt, header_bc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4265 | |
| 4266 | if (cm->reference_mode != COMPOUND_REFERENCE) { |
| 4267 | for (i = 0; i < BLOCK_SIZE_GROUPS; i++) { |
| 4268 | if (is_interintra_allowed_bsize_group(i)) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4269 | av1_cond_prob_diff_update(header_bc, &fc->interintra_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4270 | cm->counts.interintra[i], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4271 | } |
| 4272 | } |
| 4273 | for (i = 0; i < BLOCK_SIZE_GROUPS; i++) { |
| 4274 | prob_diff_update( |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4275 | av1_interintra_mode_tree, cm->fc->interintra_mode_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4276 | counts->interintra_mode[i], INTERINTRA_MODES, probwt, header_bc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4277 | } |
| 4278 | for (i = 0; i < BLOCK_SIZES; i++) { |
| 4279 | if (is_interintra_allowed_bsize(i) && is_interintra_wedge_used(i)) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4280 | av1_cond_prob_diff_update(header_bc, &fc->wedge_interintra_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4281 | cm->counts.wedge_interintra[i], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4282 | } |
| 4283 | } |
| 4284 | if (cm->reference_mode != SINGLE_REFERENCE) { |
| 4285 | for (i = 0; i < BLOCK_SIZES; i++) |
Sarah Parker | 6fdc853 | 2016-11-16 17:47:13 -0800 | [diff] [blame] | 4286 | prob_diff_update(av1_compound_type_tree, fc->compound_type_prob[i], |
| 4287 | cm->counts.compound_interinter[i], COMPOUND_TYPES, |
| 4288 | probwt, header_bc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4289 | } |
| 4290 | #endif // CONFIG_EXT_INTER |
| 4291 | |
Yue Chen | cb60b18 | 2016-10-13 15:18:22 -0700 | [diff] [blame] | 4292 | #if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4293 | for (i = BLOCK_8X8; i < BLOCK_SIZES; ++i) |
Yue Chen | cb60b18 | 2016-10-13 15:18:22 -0700 | [diff] [blame] | 4294 | prob_diff_update(av1_motion_mode_tree, fc->motion_mode_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4295 | counts->motion_mode[i], MOTION_MODES, probwt, header_bc); |
Yue Chen | cb60b18 | 2016-10-13 15:18:22 -0700 | [diff] [blame] | 4296 | #endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION |
Nathan E. Egge | baaaa16 | 2016-10-24 09:50:52 -0400 | [diff] [blame] | 4297 | #if !CONFIG_EC_ADAPT |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4298 | if (cm->interp_filter == SWITCHABLE) |
| 4299 | update_switchable_interp_probs(cm, header_bc, counts); |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 4300 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4301 | |
| 4302 | for (i = 0; i < INTRA_INTER_CONTEXTS; i++) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4303 | av1_cond_prob_diff_update(header_bc, &fc->intra_inter_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4304 | counts->intra_inter[i], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4305 | |
| 4306 | if (cpi->allow_comp_inter_inter) { |
| 4307 | const int use_hybrid_pred = cm->reference_mode == REFERENCE_MODE_SELECT; |
| 4308 | if (use_hybrid_pred) |
| 4309 | for (i = 0; i < COMP_INTER_CONTEXTS; i++) |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4310 | av1_cond_prob_diff_update(header_bc, &fc->comp_inter_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4311 | counts->comp_inter[i], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4312 | } |
| 4313 | |
| 4314 | if (cm->reference_mode != COMPOUND_REFERENCE) { |
| 4315 | for (i = 0; i < REF_CONTEXTS; i++) { |
| 4316 | for (j = 0; j < (SINGLE_REFS - 1); j++) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4317 | av1_cond_prob_diff_update(header_bc, &fc->single_ref_prob[i][j], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4318 | counts->single_ref[i][j], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4319 | } |
| 4320 | } |
| 4321 | } |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4322 | if (cm->reference_mode != SINGLE_REFERENCE) { |
| 4323 | for (i = 0; i < REF_CONTEXTS; i++) { |
| 4324 | #if CONFIG_EXT_REFS |
| 4325 | for (j = 0; j < (FWD_REFS - 1); j++) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4326 | av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4327 | counts->comp_ref[i][j], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4328 | } |
| 4329 | for (j = 0; j < (BWD_REFS - 1); j++) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4330 | av1_cond_prob_diff_update(header_bc, &fc->comp_bwdref_prob[i][j], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4331 | counts->comp_bwdref[i][j], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4332 | } |
| 4333 | #else |
| 4334 | for (j = 0; j < (COMP_REFS - 1); j++) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4335 | av1_cond_prob_diff_update(header_bc, &fc->comp_ref_prob[i][j], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4336 | counts->comp_ref[i][j], probwt); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4337 | } |
| 4338 | #endif // CONFIG_EXT_REFS |
| 4339 | } |
| 4340 | } |
| 4341 | |
Nathan E. Egge | baaaa16 | 2016-10-24 09:50:52 -0400 | [diff] [blame] | 4342 | #if !CONFIG_EC_ADAPT |
Nathan E. Egge | 5710c72 | 2016-09-08 10:01:16 -0400 | [diff] [blame] | 4343 | for (i = 0; i < BLOCK_SIZE_GROUPS; ++i) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4344 | prob_diff_update(av1_intra_mode_tree, cm->fc->y_mode_prob[i], |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4345 | counts->y_mode[i], INTRA_MODES, probwt, header_bc); |
Nathan E. Egge | 5710c72 | 2016-09-08 10:01:16 -0400 | [diff] [blame] | 4346 | } |
Thomas Davies | 6519beb | 2016-10-19 14:46:07 +0100 | [diff] [blame] | 4347 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4348 | |
Jingning Han | fd0cf16 | 2016-09-30 10:33:50 -0700 | [diff] [blame] | 4349 | av1_write_nmv_probs(cm, cm->allow_high_precision_mv, header_bc, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4350 | #if CONFIG_REF_MV |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4351 | counts->mv); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4352 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4353 | &counts->mv); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4354 | #endif |
Nathan E. Egge | baaaa16 | 2016-10-24 09:50:52 -0400 | [diff] [blame] | 4355 | #if !CONFIG_EC_ADAPT |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4356 | update_ext_tx_probs(cm, header_bc); |
Thomas | 9ac5508 | 2016-09-23 18:04:17 +0100 | [diff] [blame] | 4357 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4358 | #if CONFIG_SUPERTX |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4359 | if (!xd->lossless[0]) update_supertx_probs(cm, probwt, header_bc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4360 | #endif // CONFIG_SUPERTX |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4361 | #if CONFIG_GLOBAL_MOTION |
Sarah Parker | 689b0ca | 2016-10-11 12:06:33 -0700 | [diff] [blame] | 4362 | write_global_motion(cpi, header_bc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4363 | #endif // CONFIG_GLOBAL_MOTION |
Sarah Parker | 689b0ca | 2016-10-11 12:06:33 -0700 | [diff] [blame] | 4364 | } |
Thomas Davies | 6519beb | 2016-10-19 14:46:07 +0100 | [diff] [blame] | 4365 | #if CONFIG_EC_MULTISYMBOL |
| 4366 | av1_coef_pareto_cdfs(fc); |
David Barker | 599dfd0 | 2016-11-10 13:20:12 +0000 | [diff] [blame] | 4367 | #if CONFIG_REF_MV |
| 4368 | for (i = 0; i < NMV_CONTEXTS; ++i) av1_set_mv_cdfs(&fc->nmvc[i]); |
| 4369 | #else |
Thomas Davies | 6519beb | 2016-10-19 14:46:07 +0100 | [diff] [blame] | 4370 | av1_set_mv_cdfs(&fc->nmvc); |
David Barker | 599dfd0 | 2016-11-10 13:20:12 +0000 | [diff] [blame] | 4371 | #endif |
Nathan E. Egge | 3129606 | 2016-11-16 09:44:26 -0500 | [diff] [blame] | 4372 | #if CONFIG_EC_MULTISYMBOL |
Thomas Davies | 6519beb | 2016-10-19 14:46:07 +0100 | [diff] [blame] | 4373 | av1_set_mode_cdfs(cm); |
| 4374 | #endif |
| 4375 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4376 | #if CONFIG_ANS |
Alex Converse | 1ecdf2b | 2016-11-30 15:51:12 -0800 | [diff] [blame] | 4377 | aom_buf_ans_flush(header_bc); |
Alex Converse | 2a1b3af | 2016-10-26 13:11:26 -0700 | [diff] [blame] | 4378 | header_size = buf_ans_write_end(header_bc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4379 | assert(header_size <= 0xffff); |
| 4380 | return header_size; |
| 4381 | #else |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4382 | aom_stop_encode(header_bc); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4383 | assert(header_bc->pos <= 0xffff); |
| 4384 | return header_bc->pos; |
| 4385 | #endif // CONFIG_ANS |
| 4386 | } |
| 4387 | |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4388 | #if !CONFIG_TILE_GROUPS |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4389 | static int choose_size_bytes(uint32_t size, int spare_msbs) { |
| 4390 | // Choose the number of bytes required to represent size, without |
| 4391 | // using the 'spare_msbs' number of most significant bits. |
| 4392 | |
| 4393 | // Make sure we will fit in 4 bytes to start with.. |
| 4394 | if (spare_msbs > 0 && size >> (32 - spare_msbs) != 0) return -1; |
| 4395 | |
| 4396 | // Normalise to 32 bits |
| 4397 | size <<= spare_msbs; |
| 4398 | |
| 4399 | if (size >> 24 != 0) |
| 4400 | return 4; |
| 4401 | else if (size >> 16 != 0) |
| 4402 | return 3; |
| 4403 | else if (size >> 8 != 0) |
| 4404 | return 2; |
| 4405 | else |
| 4406 | return 1; |
| 4407 | } |
| 4408 | |
| 4409 | static void mem_put_varsize(uint8_t *const dst, const int sz, const int val) { |
| 4410 | switch (sz) { |
| 4411 | case 1: dst[0] = (uint8_t)(val & 0xff); break; |
| 4412 | case 2: mem_put_le16(dst, val); break; |
| 4413 | case 3: mem_put_le24(dst, val); break; |
| 4414 | case 4: mem_put_le32(dst, val); break; |
| 4415 | default: assert("Invalid size" && 0); break; |
| 4416 | } |
| 4417 | } |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4418 | static int remux_tiles(const AV1_COMMON *const cm, uint8_t *dst, |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4419 | const uint32_t data_size, const uint32_t max_tile_size, |
| 4420 | const uint32_t max_tile_col_size, |
| 4421 | int *const tile_size_bytes, |
| 4422 | int *const tile_col_size_bytes) { |
| 4423 | // Choose the tile size bytes (tsb) and tile column size bytes (tcsb) |
| 4424 | #if CONFIG_EXT_TILE |
| 4425 | // The top bit in the tile size field indicates tile copy mode, so we |
| 4426 | // have 1 less bit to code the tile size |
| 4427 | const int tsb = choose_size_bytes(max_tile_size, 1); |
| 4428 | const int tcsb = choose_size_bytes(max_tile_col_size, 0); |
| 4429 | #else |
| 4430 | const int tsb = choose_size_bytes(max_tile_size, 0); |
| 4431 | const int tcsb = 4; // This is ignored |
| 4432 | (void)max_tile_col_size; |
| 4433 | #endif // CONFIG_EXT_TILE |
| 4434 | |
| 4435 | assert(tsb > 0); |
| 4436 | assert(tcsb > 0); |
| 4437 | |
| 4438 | *tile_size_bytes = tsb; |
| 4439 | *tile_col_size_bytes = tcsb; |
| 4440 | |
| 4441 | if (tsb == 4 && tcsb == 4) { |
| 4442 | return data_size; |
| 4443 | } else { |
| 4444 | uint32_t wpos = 0; |
| 4445 | uint32_t rpos = 0; |
| 4446 | |
| 4447 | #if CONFIG_EXT_TILE |
| 4448 | int tile_row; |
| 4449 | int tile_col; |
| 4450 | |
| 4451 | for (tile_col = 0; tile_col < cm->tile_cols; tile_col++) { |
| 4452 | // All but the last column has a column header |
| 4453 | if (tile_col < cm->tile_cols - 1) { |
| 4454 | uint32_t tile_col_size = mem_get_le32(dst + rpos); |
| 4455 | rpos += 4; |
| 4456 | |
| 4457 | // Adjust the tile column size by the number of bytes removed |
| 4458 | // from the tile size fields. |
| 4459 | tile_col_size -= (4 - tsb) * cm->tile_rows; |
| 4460 | |
| 4461 | mem_put_varsize(dst + wpos, tcsb, tile_col_size); |
| 4462 | wpos += tcsb; |
| 4463 | } |
| 4464 | |
| 4465 | for (tile_row = 0; tile_row < cm->tile_rows; tile_row++) { |
| 4466 | // All, including the last row has a header |
| 4467 | uint32_t tile_header = mem_get_le32(dst + rpos); |
| 4468 | rpos += 4; |
| 4469 | |
| 4470 | // If this is a copy tile, we need to shift the MSB to the |
| 4471 | // top bit of the new width, and there is no data to copy. |
| 4472 | if (tile_header >> 31 != 0) { |
| 4473 | if (tsb < 4) tile_header >>= 32 - 8 * tsb; |
| 4474 | mem_put_varsize(dst + wpos, tsb, tile_header); |
| 4475 | wpos += tsb; |
| 4476 | } else { |
| 4477 | mem_put_varsize(dst + wpos, tsb, tile_header); |
| 4478 | wpos += tsb; |
| 4479 | |
| 4480 | memmove(dst + wpos, dst + rpos, tile_header); |
| 4481 | rpos += tile_header; |
| 4482 | wpos += tile_header; |
| 4483 | } |
| 4484 | } |
| 4485 | } |
| 4486 | #else |
| 4487 | const int n_tiles = cm->tile_cols * cm->tile_rows; |
| 4488 | int n; |
| 4489 | |
| 4490 | for (n = 0; n < n_tiles; n++) { |
| 4491 | int tile_size; |
| 4492 | |
| 4493 | if (n == n_tiles - 1) { |
| 4494 | tile_size = data_size - rpos; |
| 4495 | } else { |
| 4496 | tile_size = mem_get_le32(dst + rpos); |
| 4497 | rpos += 4; |
| 4498 | mem_put_varsize(dst + wpos, tsb, tile_size); |
| 4499 | wpos += tsb; |
| 4500 | } |
| 4501 | |
| 4502 | memmove(dst + wpos, dst + rpos, tile_size); |
| 4503 | |
| 4504 | rpos += tile_size; |
| 4505 | wpos += tile_size; |
| 4506 | } |
| 4507 | #endif // CONFIG_EXT_TILE |
| 4508 | |
| 4509 | assert(rpos > wpos); |
| 4510 | assert(rpos == data_size); |
| 4511 | |
| 4512 | return wpos; |
| 4513 | } |
| 4514 | } |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4515 | #endif // CONFIG_TILE_GROUPS |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4516 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4517 | void av1_pack_bitstream(AV1_COMP *const cpi, uint8_t *dst, size_t *size) { |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4518 | uint8_t *data = dst; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4519 | #if !CONFIG_TILE_GROUPS |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4520 | uint32_t compressed_header_size; |
| 4521 | uint32_t uncompressed_header_size; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4522 | struct aom_write_bit_buffer saved_wb; |
| 4523 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4524 | uint32_t data_size; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4525 | struct aom_write_bit_buffer wb = { data, 0 }; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4526 | |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4527 | unsigned int max_tile_size; |
| 4528 | unsigned int max_tile_col_size; |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4529 | |
| 4530 | #if !CONFIG_TILE_GROUPS |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4531 | int tile_size_bytes; |
| 4532 | int tile_col_size_bytes; |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4533 | AV1_COMMON *const cm = &cpi->common; |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4534 | const int have_tiles = cm->tile_cols * cm->tile_rows > 1; |
| 4535 | |
| 4536 | #if CONFIG_BITSTREAM_DEBUG |
| 4537 | bitstream_queue_reset_write(); |
| 4538 | #endif |
| 4539 | |
| 4540 | // Write the uncompressed header |
| 4541 | write_uncompressed_header(cpi, &wb); |
| 4542 | |
| 4543 | #if CONFIG_EXT_REFS |
| 4544 | if (cm->show_existing_frame) { |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4545 | *size = aom_wb_bytes_written(&wb); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4546 | return; |
| 4547 | } |
| 4548 | #endif // CONFIG_EXT_REFS |
| 4549 | |
| 4550 | // We do not know these in advance. Output placeholder bit. |
| 4551 | saved_wb = wb; |
| 4552 | // Write tile size magnitudes |
| 4553 | if (have_tiles) { |
| 4554 | // Note that the last item in the uncompressed header is the data |
| 4555 | // describing tile configuration. |
| 4556 | #if CONFIG_EXT_TILE |
| 4557 | // Number of bytes in tile column size - 1 |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4558 | aom_wb_write_literal(&wb, 0, 2); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4559 | #endif // CONFIG_EXT_TILE |
| 4560 | // Number of bytes in tile size - 1 |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4561 | aom_wb_write_literal(&wb, 0, 2); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4562 | } |
| 4563 | // Size of compressed header |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4564 | aom_wb_write_literal(&wb, 0, 16); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4565 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4566 | uncompressed_header_size = (uint32_t)aom_wb_bytes_written(&wb); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4567 | data += uncompressed_header_size; |
| 4568 | |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4569 | aom_clear_system_state(); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4570 | |
| 4571 | // Write the compressed header |
| 4572 | compressed_header_size = write_compressed_header(cpi, data); |
| 4573 | data += compressed_header_size; |
| 4574 | |
| 4575 | // Write the encoded tile data |
| 4576 | data_size = write_tiles(cpi, data, &max_tile_size, &max_tile_col_size); |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4577 | #else |
| 4578 | data_size = write_tiles(cpi, &wb, &max_tile_size, &max_tile_col_size); |
| 4579 | #endif |
| 4580 | #if !CONFIG_TILE_GROUPS |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4581 | if (have_tiles) { |
| 4582 | data_size = |
| 4583 | remux_tiles(cm, data, data_size, max_tile_size, max_tile_col_size, |
| 4584 | &tile_size_bytes, &tile_col_size_bytes); |
| 4585 | } |
| 4586 | |
| 4587 | data += data_size; |
| 4588 | |
| 4589 | // Now fill in the gaps in the uncompressed header. |
| 4590 | if (have_tiles) { |
| 4591 | #if CONFIG_EXT_TILE |
| 4592 | assert(tile_col_size_bytes >= 1 && tile_col_size_bytes <= 4); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4593 | aom_wb_write_literal(&saved_wb, tile_col_size_bytes - 1, 2); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4594 | #endif // CONFIG_EXT_TILE |
| 4595 | assert(tile_size_bytes >= 1 && tile_size_bytes <= 4); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4596 | aom_wb_write_literal(&saved_wb, tile_size_bytes - 1, 2); |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4597 | } |
| 4598 | // TODO(jbb): Figure out what to do if compressed_header_size > 16 bits. |
| 4599 | assert(compressed_header_size <= 0xffff); |
Yaowu Xu | f883b42 | 2016-08-30 14:01:10 -0700 | [diff] [blame] | 4600 | aom_wb_write_literal(&saved_wb, compressed_header_size, 16); |
Thomas Davies | 80188d1 | 2016-10-26 16:08:35 -0700 | [diff] [blame] | 4601 | #else |
| 4602 | data += data_size; |
| 4603 | #endif |
Alex Converse | b0bbd60 | 2016-10-21 14:15:06 -0700 | [diff] [blame] | 4604 | #if CONFIG_ANS && ANS_REVERSE |
| 4605 | // Avoid aliasing the superframe index |
| 4606 | *data++ = 0; |
| 4607 | #endif |
Yaowu Xu | c27fc14 | 2016-08-22 16:08:15 -0700 | [diff] [blame] | 4608 | *size = data - dst; |
| 4609 | } |