John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 1 | /* |
John Koleszar | c2140b8 | 2010-09-09 08:16:39 -0400 | [diff] [blame] | 2 | * Copyright (c) 2010 The WebM project authors. All Rights Reserved. |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 3 | * |
John Koleszar | 3085025 | 2010-06-09 11:29:20 -0400 | [diff] [blame] | 4 | * Use of this source code is governed by a BSD-style license |
John Koleszar | 09202d8 | 2010-06-04 16:19:40 -0400 | [diff] [blame] | 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
John Koleszar | 3085025 | 2010-06-09 11:29:20 -0400 | [diff] [blame] | 7 | * in the file PATENTS. All contributing project authors may |
John Koleszar | 09202d8 | 2010-06-04 16:19:40 -0400 | [diff] [blame] | 8 | * be found in the AUTHORS file in the root of the source tree. |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 9 | */ |
| 10 | |
Dmitry Kovalev | b442dfd | 2013-04-25 14:25:40 -0700 | [diff] [blame] | 11 | #include <assert.h> |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 12 | #include <stdlib.h> // qsort() |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 13 | |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 14 | #include "./vp9_rtcd.h" |
Zoe Liu | 7186a2d | 2015-07-22 10:40:42 -0700 | [diff] [blame] | 15 | #include "./vpx_dsp_rtcd.h" |
Jingning Han | 86d2a9b | 2013-11-22 10:56:41 -0800 | [diff] [blame] | 16 | #include "./vpx_scale_rtcd.h" |
| 17 | |
Yaowu Xu | 87d2c3c | 2015-07-17 14:09:05 -0700 | [diff] [blame] | 18 | #include "vpx_dsp/bitreader_buffer.h" |
| 19 | #include "vpx_dsp/bitreader.h" |
Johann | c5f1191 | 2015-08-31 14:36:35 -0700 | [diff] [blame] | 20 | #include "vpx_dsp/vpx_dsp_common.h" |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 21 | #include "vpx_mem/vpx_mem.h" |
Johann | 1d7ccd5 | 2015-05-11 19:09:22 -0700 | [diff] [blame] | 22 | #include "vpx_ports/mem.h" |
Dmitry Kovalev | bb65be9 | 2014-02-27 15:05:46 -0800 | [diff] [blame] | 23 | #include "vpx_ports/mem_ops.h" |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 24 | #include "vpx_scale/vpx_scale.h" |
Jingning Han | d1b30ce | 2015-07-02 10:01:09 -0700 | [diff] [blame] | 25 | #include "vpx_util/vpx_thread.h" |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 26 | |
Dmitry Kovalev | 3231da0 | 2013-06-27 16:15:43 -0700 | [diff] [blame] | 27 | #include "vp9/common/vp9_alloccommon.h" |
| 28 | #include "vp9/common/vp9_common.h" |
| 29 | #include "vp9/common/vp9_entropy.h" |
| 30 | #include "vp9/common/vp9_entropymode.h" |
Dmitry Kovalev | be7eec7 | 2013-10-02 14:13:33 -0700 | [diff] [blame] | 31 | #include "vp9/common/vp9_idct.h" |
Yunqing Wang | 4106313 | 2015-02-06 10:03:31 -0800 | [diff] [blame] | 32 | #include "vp9/common/vp9_thread_common.h" |
Dmitry Kovalev | 3231da0 | 2013-06-27 16:15:43 -0700 | [diff] [blame] | 33 | #include "vp9/common/vp9_pred_common.h" |
| 34 | #include "vp9/common/vp9_quant_common.h" |
John Koleszar | fcccbcb | 2012-11-27 13:59:17 -0800 | [diff] [blame] | 35 | #include "vp9/common/vp9_reconintra.h" |
John Koleszar | fcccbcb | 2012-11-27 13:59:17 -0800 | [diff] [blame] | 36 | #include "vp9/common/vp9_reconinter.h" |
Dmitry Kovalev | 1a240114 | 2013-05-28 02:24:52 -0700 | [diff] [blame] | 37 | #include "vp9/common/vp9_seg_common.h" |
| 38 | #include "vp9/common/vp9_tile_common.h" |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 39 | |
Yaowu Xu | 49cbe45 | 2013-11-15 12:48:43 -0800 | [diff] [blame] | 40 | #include "vp9/decoder/vp9_decodeframe.h" |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 41 | #include "vp9/decoder/vp9_detokenize.h" |
| 42 | #include "vp9/decoder/vp9_decodemv.h" |
Dmitry Kovalev | 5233e10 | 2014-03-06 12:08:20 -0800 | [diff] [blame] | 43 | #include "vp9/decoder/vp9_decoder.h" |
Dmitry Kovalev | 9467571 | 2013-06-25 11:52:44 -0700 | [diff] [blame] | 44 | #include "vp9/decoder/vp9_dsubexp.h" |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 45 | |
Joey Parrish | 18c0860 | 2014-04-15 14:10:58 -0700 | [diff] [blame] | 46 | #define MAX_VP9_HEADER_SIZE 80 |
| 47 | |
Dmitry Kovalev | c5bdc9d | 2013-11-26 16:46:13 -0800 | [diff] [blame] | 48 | static int is_compound_reference_allowed(const VP9_COMMON *cm) { |
Dmitry Kovalev | 4d88b38 | 2013-10-23 14:18:09 -0700 | [diff] [blame] | 49 | int i; |
Dmitry Kovalev | 0d4b8d7 | 2013-12-05 16:23:09 -0800 | [diff] [blame] | 50 | for (i = 1; i < REFS_PER_FRAME; ++i) |
Dmitry Kovalev | 69fd030 | 2014-02-19 15:33:59 +0100 | [diff] [blame] | 51 | if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1]) |
Dmitry Kovalev | 4d88b38 | 2013-10-23 14:18:09 -0700 | [diff] [blame] | 52 | return 1; |
| 53 | |
| 54 | return 0; |
| 55 | } |
| 56 | |
Dmitry Kovalev | 69fd030 | 2014-02-19 15:33:59 +0100 | [diff] [blame] | 57 | static void setup_compound_reference_mode(VP9_COMMON *cm) { |
Dmitry Kovalev | 4d88b38 | 2013-10-23 14:18:09 -0700 | [diff] [blame] | 58 | if (cm->ref_frame_sign_bias[LAST_FRAME] == |
| 59 | cm->ref_frame_sign_bias[GOLDEN_FRAME]) { |
| 60 | cm->comp_fixed_ref = ALTREF_FRAME; |
| 61 | cm->comp_var_ref[0] = LAST_FRAME; |
| 62 | cm->comp_var_ref[1] = GOLDEN_FRAME; |
| 63 | } else if (cm->ref_frame_sign_bias[LAST_FRAME] == |
| 64 | cm->ref_frame_sign_bias[ALTREF_FRAME]) { |
| 65 | cm->comp_fixed_ref = GOLDEN_FRAME; |
| 66 | cm->comp_var_ref[0] = LAST_FRAME; |
| 67 | cm->comp_var_ref[1] = ALTREF_FRAME; |
| 68 | } else { |
| 69 | cm->comp_fixed_ref = LAST_FRAME; |
| 70 | cm->comp_var_ref[0] = GOLDEN_FRAME; |
| 71 | cm->comp_var_ref[1] = ALTREF_FRAME; |
| 72 | } |
| 73 | } |
| 74 | |
Dmitry Kovalev | 2c31729 | 2013-07-15 14:47:25 -0700 | [diff] [blame] | 75 | static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) { |
Johann | dc2c62e | 2013-12-18 15:52:51 -0800 | [diff] [blame] | 76 | return len != 0 && len <= (size_t)(end - start); |
Dmitry Kovalev | 9285703 | 2013-03-14 12:31:54 -0700 | [diff] [blame] | 77 | } |
| 78 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 79 | static int decode_unsigned_max(struct vpx_read_bit_buffer *rb, int max) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 80 | const int data = vpx_rb_read_literal(rb, get_unsigned_bits(max)); |
Dmitry Kovalev | 9467571 | 2013-06-25 11:52:44 -0700 | [diff] [blame] | 81 | return data > max ? max : data; |
| 82 | } |
| 83 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 84 | static TX_MODE read_tx_mode(vpx_reader *r) { |
| 85 | TX_MODE tx_mode = vpx_read_literal(r, 2); |
Dmitry Kovalev | c0eb574 | 2013-07-19 11:37:13 -0700 | [diff] [blame] | 86 | if (tx_mode == ALLOW_32X32) |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 87 | tx_mode += vpx_read_bit(r); |
Dmitry Kovalev | c0eb574 | 2013-07-19 11:37:13 -0700 | [diff] [blame] | 88 | return tx_mode; |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 89 | } |
Dmitry Kovalev | 9467571 | 2013-06-25 11:52:44 -0700 | [diff] [blame] | 90 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 91 | static void read_tx_mode_probs(struct tx_probs *tx_probs, vpx_reader *r) { |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 92 | int i, j; |
Dmitry Kovalev | 9467571 | 2013-06-25 11:52:44 -0700 | [diff] [blame] | 93 | |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 94 | for (i = 0; i < TX_SIZE_CONTEXTS; ++i) |
Dmitry Kovalev | 23391ea | 2013-07-26 17:15:37 -0700 | [diff] [blame] | 95 | for (j = 0; j < TX_SIZES - 3; ++j) |
Dmitry Kovalev | 4a0f947 | 2013-10-11 10:47:22 -0700 | [diff] [blame] | 96 | vp9_diff_update_prob(r, &tx_probs->p8x8[i][j]); |
Dmitry Kovalev | 9467571 | 2013-06-25 11:52:44 -0700 | [diff] [blame] | 97 | |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 98 | for (i = 0; i < TX_SIZE_CONTEXTS; ++i) |
Dmitry Kovalev | 23391ea | 2013-07-26 17:15:37 -0700 | [diff] [blame] | 99 | for (j = 0; j < TX_SIZES - 2; ++j) |
Dmitry Kovalev | 4a0f947 | 2013-10-11 10:47:22 -0700 | [diff] [blame] | 100 | vp9_diff_update_prob(r, &tx_probs->p16x16[i][j]); |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 101 | |
| 102 | for (i = 0; i < TX_SIZE_CONTEXTS; ++i) |
Dmitry Kovalev | 23391ea | 2013-07-26 17:15:37 -0700 | [diff] [blame] | 103 | for (j = 0; j < TX_SIZES - 1; ++j) |
Dmitry Kovalev | 4a0f947 | 2013-10-11 10:47:22 -0700 | [diff] [blame] | 104 | vp9_diff_update_prob(r, &tx_probs->p32x32[i][j]); |
Dmitry Kovalev | 59b2928 | 2013-04-03 12:18:15 -0700 | [diff] [blame] | 105 | } |
| 106 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 107 | static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vpx_reader *r) { |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 108 | int i, j; |
Dmitry Kovalev | 6761872 | 2013-10-30 14:40:34 -0700 | [diff] [blame] | 109 | for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 110 | for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i) |
| 111 | vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]); |
| 112 | } |
| 113 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 114 | static void read_inter_mode_probs(FRAME_CONTEXT *fc, vpx_reader *r) { |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 115 | int i, j; |
| 116 | for (i = 0; i < INTER_MODE_CONTEXTS; ++i) |
| 117 | for (j = 0; j < INTER_MODES - 1; ++j) |
| 118 | vp9_diff_update_prob(r, &fc->inter_mode_probs[i][j]); |
| 119 | } |
| 120 | |
Dmitry Kovalev | 69fd030 | 2014-02-19 15:33:59 +0100 | [diff] [blame] | 121 | static REFERENCE_MODE read_frame_reference_mode(const VP9_COMMON *cm, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 122 | vpx_reader *r) { |
Dmitry Kovalev | c5bdc9d | 2013-11-26 16:46:13 -0800 | [diff] [blame] | 123 | if (is_compound_reference_allowed(cm)) { |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 124 | return vpx_read_bit(r) ? (vpx_read_bit(r) ? REFERENCE_MODE_SELECT |
Dmitry Kovalev | 69fd030 | 2014-02-19 15:33:59 +0100 | [diff] [blame] | 125 | : COMPOUND_REFERENCE) |
| 126 | : SINGLE_REFERENCE; |
Dmitry Kovalev | c5bdc9d | 2013-11-26 16:46:13 -0800 | [diff] [blame] | 127 | } else { |
| 128 | return SINGLE_REFERENCE; |
| 129 | } |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 130 | } |
| 131 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 132 | static void read_frame_reference_mode_probs(VP9_COMMON *cm, vpx_reader *r) { |
Yunqing Wang | 7c7e4d4 | 2014-10-22 14:37:38 -0700 | [diff] [blame] | 133 | FRAME_CONTEXT *const fc = cm->fc; |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 134 | int i; |
Dmitry Kovalev | 69fd030 | 2014-02-19 15:33:59 +0100 | [diff] [blame] | 135 | |
Dmitry Kovalev | 08c48dd | 2013-12-09 15:13:34 -0800 | [diff] [blame] | 136 | if (cm->reference_mode == REFERENCE_MODE_SELECT) |
Dmitry Kovalev | 69fd030 | 2014-02-19 15:33:59 +0100 | [diff] [blame] | 137 | for (i = 0; i < COMP_INTER_CONTEXTS; ++i) |
| 138 | vp9_diff_update_prob(r, &fc->comp_inter_prob[i]); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 139 | |
Dmitry Kovalev | 08c48dd | 2013-12-09 15:13:34 -0800 | [diff] [blame] | 140 | if (cm->reference_mode != COMPOUND_REFERENCE) |
Dmitry Kovalev | 69fd030 | 2014-02-19 15:33:59 +0100 | [diff] [blame] | 141 | for (i = 0; i < REF_CONTEXTS; ++i) { |
| 142 | vp9_diff_update_prob(r, &fc->single_ref_prob[i][0]); |
| 143 | vp9_diff_update_prob(r, &fc->single_ref_prob[i][1]); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 144 | } |
| 145 | |
Dmitry Kovalev | 08c48dd | 2013-12-09 15:13:34 -0800 | [diff] [blame] | 146 | if (cm->reference_mode != SINGLE_REFERENCE) |
Dmitry Kovalev | 69fd030 | 2014-02-19 15:33:59 +0100 | [diff] [blame] | 147 | for (i = 0; i < REF_CONTEXTS; ++i) |
| 148 | vp9_diff_update_prob(r, &fc->comp_ref_prob[i]); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 149 | } |
| 150 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 151 | static void update_mv_probs(vpx_prob *p, int n, vpx_reader *r) { |
Dmitry Kovalev | d172201 | 2013-11-05 14:43:35 -0800 | [diff] [blame] | 152 | int i; |
| 153 | for (i = 0; i < n; ++i) |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 154 | if (vpx_read(r, MV_UPDATE_PROB)) |
| 155 | p[i] = (vpx_read_literal(r, 7) << 1) | 1; |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 156 | } |
| 157 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 158 | static void read_mv_probs(nmv_context *ctx, int allow_hp, vpx_reader *r) { |
Dmitry Kovalev | d172201 | 2013-11-05 14:43:35 -0800 | [diff] [blame] | 159 | int i, j; |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 160 | |
Dmitry Kovalev | d172201 | 2013-11-05 14:43:35 -0800 | [diff] [blame] | 161 | update_mv_probs(ctx->joints, MV_JOINTS - 1, r); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 162 | |
| 163 | for (i = 0; i < 2; ++i) { |
Dmitry Kovalev | d172201 | 2013-11-05 14:43:35 -0800 | [diff] [blame] | 164 | nmv_component *const comp_ctx = &ctx->comps[i]; |
| 165 | update_mv_probs(&comp_ctx->sign, 1, r); |
| 166 | update_mv_probs(comp_ctx->classes, MV_CLASSES - 1, r); |
| 167 | update_mv_probs(comp_ctx->class0, CLASS0_SIZE - 1, r); |
| 168 | update_mv_probs(comp_ctx->bits, MV_OFFSET_BITS, r); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 169 | } |
| 170 | |
| 171 | for (i = 0; i < 2; ++i) { |
Dmitry Kovalev | d172201 | 2013-11-05 14:43:35 -0800 | [diff] [blame] | 172 | nmv_component *const comp_ctx = &ctx->comps[i]; |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 173 | for (j = 0; j < CLASS0_SIZE; ++j) |
Dmitry Kovalev | 4956fcd | 2013-11-19 20:18:01 -0800 | [diff] [blame] | 174 | update_mv_probs(comp_ctx->class0_fp[j], MV_FP_SIZE - 1, r); |
Dmitry Kovalev | d172201 | 2013-11-05 14:43:35 -0800 | [diff] [blame] | 175 | update_mv_probs(comp_ctx->fp, 3, r); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 176 | } |
| 177 | |
| 178 | if (allow_hp) { |
| 179 | for (i = 0; i < 2; ++i) { |
Dmitry Kovalev | d172201 | 2013-11-05 14:43:35 -0800 | [diff] [blame] | 180 | nmv_component *const comp_ctx = &ctx->comps[i]; |
| 181 | update_mv_probs(&comp_ctx->class0_hp, 1, r); |
| 182 | update_mv_probs(&comp_ctx->hp, 1, r); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 183 | } |
| 184 | } |
| 185 | } |
| 186 | |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 187 | static void inverse_transform_block_inter(MACROBLOCKD* xd, int plane, |
| 188 | const TX_SIZE tx_size, |
| 189 | uint8_t *dst, int stride, |
| 190 | int eob) { |
Dmitry Kovalev | bb3b817 | 2013-08-15 11:44:57 -0700 | [diff] [blame] | 191 | struct macroblockd_plane *const pd = &xd->plane[plane]; |
Dmitry Kovalev | c4d1ab5 | 2013-10-02 11:48:08 -0700 | [diff] [blame] | 192 | if (eob > 0) { |
Jingning Han | cccad1c | 2015-07-07 11:36:05 -0700 | [diff] [blame] | 193 | tran_low_t *const dqcoeff = pd->dqcoeff; |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 194 | #if CONFIG_VP9_HIGHBITDEPTH |
| 195 | if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) { |
| 196 | if (xd->lossless) { |
Deb Mukherjee | 1929c9b | 2014-10-08 12:43:22 -0700 | [diff] [blame] | 197 | vp9_highbd_iwht4x4_add(dqcoeff, dst, stride, eob, xd->bd); |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 198 | } else { |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 199 | switch (tx_size) { |
| 200 | case TX_4X4: |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 201 | vp9_highbd_idct4x4_add(dqcoeff, dst, stride, eob, xd->bd); |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 202 | break; |
| 203 | case TX_8X8: |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 204 | vp9_highbd_idct8x8_add(dqcoeff, dst, stride, eob, xd->bd); |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 205 | break; |
| 206 | case TX_16X16: |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 207 | vp9_highbd_idct16x16_add(dqcoeff, dst, stride, eob, xd->bd); |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 208 | break; |
| 209 | case TX_32X32: |
Deb Mukherjee | 1929c9b | 2014-10-08 12:43:22 -0700 | [diff] [blame] | 210 | vp9_highbd_idct32x32_add(dqcoeff, dst, stride, eob, xd->bd); |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 211 | break; |
| 212 | default: |
| 213 | assert(0 && "Invalid transform size"); |
| 214 | } |
| 215 | } |
| 216 | } else { |
| 217 | if (xd->lossless) { |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 218 | vp9_iwht4x4_add(dqcoeff, dst, stride, eob); |
| 219 | } else { |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 220 | switch (tx_size) { |
| 221 | case TX_4X4: |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 222 | vp9_idct4x4_add(dqcoeff, dst, stride, eob); |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 223 | break; |
| 224 | case TX_8X8: |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 225 | vp9_idct8x8_add(dqcoeff, dst, stride, eob); |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 226 | break; |
| 227 | case TX_16X16: |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 228 | vp9_idct16x16_add(dqcoeff, dst, stride, eob); |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 229 | break; |
| 230 | case TX_32X32: |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 231 | vp9_idct32x32_add(dqcoeff, dst, stride, eob); |
| 232 | break; |
| 233 | default: |
| 234 | assert(0 && "Invalid transform size"); |
| 235 | return; |
| 236 | } |
| 237 | } |
| 238 | } |
| 239 | #else |
Dmitry Kovalev | 35a8367 | 2014-05-21 11:09:44 -0700 | [diff] [blame] | 240 | if (xd->lossless) { |
Dmitry Kovalev | 35a8367 | 2014-05-21 11:09:44 -0700 | [diff] [blame] | 241 | vp9_iwht4x4_add(dqcoeff, dst, stride, eob); |
| 242 | } else { |
Dmitry Kovalev | 35a8367 | 2014-05-21 11:09:44 -0700 | [diff] [blame] | 243 | switch (tx_size) { |
| 244 | case TX_4X4: |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 245 | vp9_idct4x4_add(dqcoeff, dst, stride, eob); |
| 246 | break; |
| 247 | case TX_8X8: |
| 248 | vp9_idct8x8_add(dqcoeff, dst, stride, eob); |
| 249 | break; |
| 250 | case TX_16X16: |
| 251 | vp9_idct16x16_add(dqcoeff, dst, stride, eob); |
| 252 | break; |
| 253 | case TX_32X32: |
| 254 | vp9_idct32x32_add(dqcoeff, dst, stride, eob); |
| 255 | break; |
| 256 | default: |
| 257 | assert(0 && "Invalid transform size"); |
| 258 | return; |
| 259 | } |
| 260 | } |
| 261 | #endif // CONFIG_VP9_HIGHBITDEPTH |
| 262 | |
| 263 | if (eob == 1) { |
| 264 | dqcoeff[0] = 0; |
| 265 | } else { |
| 266 | if (tx_size <= TX_16X16 && eob <= 10) |
| 267 | memset(dqcoeff, 0, 4 * (4 << tx_size) * sizeof(dqcoeff[0])); |
| 268 | else if (tx_size == TX_32X32 && eob <= 34) |
| 269 | memset(dqcoeff, 0, 256 * sizeof(dqcoeff[0])); |
| 270 | else |
| 271 | memset(dqcoeff, 0, (16 << (tx_size << 1)) * sizeof(dqcoeff[0])); |
| 272 | } |
| 273 | } |
| 274 | } |
| 275 | |
| 276 | static void inverse_transform_block_intra(MACROBLOCKD* xd, int plane, |
| 277 | const TX_TYPE tx_type, |
| 278 | const TX_SIZE tx_size, |
| 279 | uint8_t *dst, int stride, |
| 280 | int eob) { |
| 281 | struct macroblockd_plane *const pd = &xd->plane[plane]; |
| 282 | if (eob > 0) { |
| 283 | tran_low_t *const dqcoeff = pd->dqcoeff; |
| 284 | #if CONFIG_VP9_HIGHBITDEPTH |
| 285 | if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) { |
| 286 | if (xd->lossless) { |
| 287 | vp9_highbd_iwht4x4_add(dqcoeff, dst, stride, eob, xd->bd); |
| 288 | } else { |
| 289 | switch (tx_size) { |
| 290 | case TX_4X4: |
| 291 | vp9_highbd_iht4x4_add(tx_type, dqcoeff, dst, stride, eob, xd->bd); |
| 292 | break; |
| 293 | case TX_8X8: |
| 294 | vp9_highbd_iht8x8_add(tx_type, dqcoeff, dst, stride, eob, xd->bd); |
| 295 | break; |
| 296 | case TX_16X16: |
| 297 | vp9_highbd_iht16x16_add(tx_type, dqcoeff, dst, stride, eob, xd->bd); |
| 298 | break; |
| 299 | case TX_32X32: |
| 300 | vp9_highbd_idct32x32_add(dqcoeff, dst, stride, eob, xd->bd); |
| 301 | break; |
| 302 | default: |
| 303 | assert(0 && "Invalid transform size"); |
| 304 | } |
| 305 | } |
| 306 | } else { |
| 307 | if (xd->lossless) { |
| 308 | vp9_iwht4x4_add(dqcoeff, dst, stride, eob); |
| 309 | } else { |
| 310 | switch (tx_size) { |
| 311 | case TX_4X4: |
| 312 | vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob); |
| 313 | break; |
| 314 | case TX_8X8: |
| 315 | vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob); |
| 316 | break; |
| 317 | case TX_16X16: |
| 318 | vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob); |
| 319 | break; |
| 320 | case TX_32X32: |
| 321 | vp9_idct32x32_add(dqcoeff, dst, stride, eob); |
| 322 | break; |
| 323 | default: |
| 324 | assert(0 && "Invalid transform size"); |
| 325 | return; |
| 326 | } |
| 327 | } |
| 328 | } |
| 329 | #else |
| 330 | if (xd->lossless) { |
| 331 | vp9_iwht4x4_add(dqcoeff, dst, stride, eob); |
| 332 | } else { |
| 333 | switch (tx_size) { |
| 334 | case TX_4X4: |
Dmitry Kovalev | f562885 | 2014-06-02 11:14:12 -0700 | [diff] [blame] | 335 | vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob); |
Dmitry Kovalev | 35a8367 | 2014-05-21 11:09:44 -0700 | [diff] [blame] | 336 | break; |
| 337 | case TX_8X8: |
Dmitry Kovalev | 35a8367 | 2014-05-21 11:09:44 -0700 | [diff] [blame] | 338 | vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob); |
| 339 | break; |
| 340 | case TX_16X16: |
Dmitry Kovalev | 35a8367 | 2014-05-21 11:09:44 -0700 | [diff] [blame] | 341 | vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob); |
| 342 | break; |
| 343 | case TX_32X32: |
Dmitry Kovalev | 35a8367 | 2014-05-21 11:09:44 -0700 | [diff] [blame] | 344 | vp9_idct32x32_add(dqcoeff, dst, stride, eob); |
| 345 | break; |
| 346 | default: |
| 347 | assert(0 && "Invalid transform size"); |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 348 | return; |
Dmitry Kovalev | 35a8367 | 2014-05-21 11:09:44 -0700 | [diff] [blame] | 349 | } |
Dmitry Kovalev | 9139ee0 | 2013-07-24 12:55:45 -0700 | [diff] [blame] | 350 | } |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 351 | #endif // CONFIG_VP9_HIGHBITDEPTH |
Dmitry Kovalev | c4d1ab5 | 2013-10-02 11:48:08 -0700 | [diff] [blame] | 352 | |
| 353 | if (eob == 1) { |
Jingning Han | 76ccba9e | 2015-07-07 15:18:30 -0700 | [diff] [blame] | 354 | dqcoeff[0] = 0; |
Dmitry Kovalev | c4d1ab5 | 2013-10-02 11:48:08 -0700 | [diff] [blame] | 355 | } else { |
Dmitry Kovalev | 5ab920d | 2013-12-02 15:44:26 -0800 | [diff] [blame] | 356 | if (tx_type == DCT_DCT && tx_size <= TX_16X16 && eob <= 10) |
James Zern | f58011a | 2015-04-23 20:47:40 -0700 | [diff] [blame] | 357 | memset(dqcoeff, 0, 4 * (4 << tx_size) * sizeof(dqcoeff[0])); |
Dmitry Kovalev | 5ab920d | 2013-12-02 15:44:26 -0800 | [diff] [blame] | 358 | else if (tx_size == TX_32X32 && eob <= 34) |
James Zern | f58011a | 2015-04-23 20:47:40 -0700 | [diff] [blame] | 359 | memset(dqcoeff, 0, 256 * sizeof(dqcoeff[0])); |
Dmitry Kovalev | 5ab920d | 2013-12-02 15:44:26 -0800 | [diff] [blame] | 360 | else |
James Zern | f58011a | 2015-04-23 20:47:40 -0700 | [diff] [blame] | 361 | memset(dqcoeff, 0, (16 << (tx_size << 1)) * sizeof(dqcoeff[0])); |
Dmitry Kovalev | c4d1ab5 | 2013-10-02 11:48:08 -0700 | [diff] [blame] | 362 | } |
Jingning Han | bbd0063 | 2013-04-10 12:30:20 -0700 | [diff] [blame] | 363 | } |
| 364 | } |
| 365 | |
Jingning Han | cb1e817 | 2015-07-13 11:49:57 -0700 | [diff] [blame] | 366 | static void predict_and_reconstruct_intra_block(MACROBLOCKD *const xd, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 367 | vpx_reader *r, |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 368 | MODE_INFO *const mi, |
Jingning Han | cb1e817 | 2015-07-13 11:49:57 -0700 | [diff] [blame] | 369 | int plane, |
| 370 | int row, int col, |
| 371 | TX_SIZE tx_size) { |
Dmitry Kovalev | bb3b817 | 2013-08-15 11:44:57 -0700 | [diff] [blame] | 372 | struct macroblockd_plane *const pd = &xd->plane[plane]; |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 373 | PREDICTION_MODE mode = (plane == 0) ? mi->mode : mi->uv_mode; |
Dmitry Kovalev | ed5a993 | 2013-11-11 14:54:01 -0800 | [diff] [blame] | 374 | uint8_t *dst; |
Jingning Han | cb1e817 | 2015-07-13 11:49:57 -0700 | [diff] [blame] | 375 | dst = &pd->dst.buf[4 * row * pd->dst.stride + 4 * col]; |
Yaowu Xu | 8ba92a0 | 2013-05-17 12:50:40 -0700 | [diff] [blame] | 376 | |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 377 | if (mi->sb_type < BLOCK_8X8) |
Jingning Han | cb1e817 | 2015-07-13 11:49:57 -0700 | [diff] [blame] | 378 | if (plane == 0) |
| 379 | mode = xd->mi[0]->bmi[(row << 1) + col].as_mode; |
| 380 | |
| 381 | vp9_predict_intra_block(xd, pd->n4_wl, tx_size, mode, |
hkuang | 25e5552 | 2013-12-12 20:33:06 -0800 | [diff] [blame] | 382 | dst, pd->dst.stride, dst, pd->dst.stride, |
Jingning Han | cb1e817 | 2015-07-13 11:49:57 -0700 | [diff] [blame] | 383 | col, row, plane); |
Yaowu Xu | 8ba92a0 | 2013-05-17 12:50:40 -0700 | [diff] [blame] | 384 | |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 385 | if (!mi->skip) { |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 386 | const TX_TYPE tx_type = (plane || xd->lossless) ? |
| 387 | DCT_DCT : intra_mode_to_tx_type_lookup[mode]; |
Jingning Han | 97d1f1a | 2015-07-07 13:52:56 -0700 | [diff] [blame] | 388 | const scan_order *sc = (plane || xd->lossless) ? |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 389 | &vp9_default_scan_orders[tx_size] : &vp9_scan_orders[tx_size][tx_type]; |
Jingning Han | cb1e817 | 2015-07-13 11:49:57 -0700 | [diff] [blame] | 390 | const int eob = vp9_decode_block_tokens(xd, plane, sc, col, row, tx_size, |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 391 | r, mi->segment_id); |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 392 | inverse_transform_block_intra(xd, plane, tx_type, tx_size, |
| 393 | dst, pd->dst.stride, eob); |
Dmitry Kovalev | 47b6030 | 2013-10-31 13:52:08 -0700 | [diff] [blame] | 394 | } |
Yaowu Xu | 8ba92a0 | 2013-05-17 12:50:40 -0700 | [diff] [blame] | 395 | } |
| 396 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 397 | static int reconstruct_inter_block(MACROBLOCKD *const xd, vpx_reader *r, |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 398 | MODE_INFO *const mi, int plane, |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 399 | int row, int col, TX_SIZE tx_size) { |
Dmitry Kovalev | b5c4520 | 2013-11-18 18:37:53 -0800 | [diff] [blame] | 400 | struct macroblockd_plane *const pd = &xd->plane[plane]; |
Jingning Han | 97d1f1a | 2015-07-07 13:52:56 -0700 | [diff] [blame] | 401 | const scan_order *sc = &vp9_default_scan_orders[tx_size]; |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 402 | const int eob = vp9_decode_block_tokens(xd, plane, sc, col, row, tx_size, r, |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 403 | mi->segment_id); |
Jingning Han | 8783a8a | 2015-07-08 09:15:39 -0700 | [diff] [blame] | 404 | |
Jingning Han | 7e0d0de | 2015-07-07 15:32:27 -0700 | [diff] [blame] | 405 | inverse_transform_block_inter(xd, plane, tx_size, |
Jingning Han | 8783a8a | 2015-07-08 09:15:39 -0700 | [diff] [blame] | 406 | &pd->dst.buf[4 * row * pd->dst.stride + 4 * col], |
| 407 | pd->dst.stride, eob); |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 408 | return eob; |
Dmitry Kovalev | 3231da0 | 2013-06-27 16:15:43 -0700 | [diff] [blame] | 409 | } |
| 410 | |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 411 | static void build_mc_border(const uint8_t *src, int src_stride, |
| 412 | uint8_t *dst, int dst_stride, |
| 413 | int x, int y, int b_w, int b_h, int w, int h) { |
| 414 | // Get a pointer to the start of the real data for this row. |
| 415 | const uint8_t *ref_row = src - x - y * src_stride; |
| 416 | |
| 417 | if (y >= h) |
| 418 | ref_row += (h - 1) * src_stride; |
| 419 | else if (y > 0) |
| 420 | ref_row += y * src_stride; |
| 421 | |
| 422 | do { |
| 423 | int right = 0, copy; |
| 424 | int left = x < 0 ? -x : 0; |
| 425 | |
| 426 | if (left > b_w) |
| 427 | left = b_w; |
| 428 | |
| 429 | if (x + b_w > w) |
| 430 | right = x + b_w - w; |
| 431 | |
| 432 | if (right > b_w) |
| 433 | right = b_w; |
| 434 | |
| 435 | copy = b_w - left - right; |
| 436 | |
| 437 | if (left) |
| 438 | memset(dst, ref_row[0], left); |
| 439 | |
| 440 | if (copy) |
| 441 | memcpy(dst + left, ref_row + x + left, copy); |
| 442 | |
| 443 | if (right) |
| 444 | memset(dst + left + copy, ref_row[w - 1], right); |
| 445 | |
| 446 | dst += dst_stride; |
| 447 | ++y; |
| 448 | |
| 449 | if (y > 0 && y < h) |
| 450 | ref_row += src_stride; |
| 451 | } while (--b_h); |
| 452 | } |
| 453 | |
| 454 | #if CONFIG_VP9_HIGHBITDEPTH |
| 455 | static void high_build_mc_border(const uint8_t *src8, int src_stride, |
| 456 | uint16_t *dst, int dst_stride, |
| 457 | int x, int y, int b_w, int b_h, |
| 458 | int w, int h) { |
| 459 | // Get a pointer to the start of the real data for this row. |
| 460 | const uint16_t *src = CONVERT_TO_SHORTPTR(src8); |
| 461 | const uint16_t *ref_row = src - x - y * src_stride; |
| 462 | |
| 463 | if (y >= h) |
| 464 | ref_row += (h - 1) * src_stride; |
| 465 | else if (y > 0) |
| 466 | ref_row += y * src_stride; |
| 467 | |
| 468 | do { |
| 469 | int right = 0, copy; |
| 470 | int left = x < 0 ? -x : 0; |
| 471 | |
| 472 | if (left > b_w) |
| 473 | left = b_w; |
| 474 | |
| 475 | if (x + b_w > w) |
| 476 | right = x + b_w - w; |
| 477 | |
| 478 | if (right > b_w) |
| 479 | right = b_w; |
| 480 | |
| 481 | copy = b_w - left - right; |
| 482 | |
| 483 | if (left) |
| 484 | vpx_memset16(dst, ref_row[0], left); |
| 485 | |
| 486 | if (copy) |
| 487 | memcpy(dst + left, ref_row + x + left, copy * sizeof(uint16_t)); |
| 488 | |
| 489 | if (right) |
| 490 | vpx_memset16(dst + left + copy, ref_row[w - 1], right); |
| 491 | |
| 492 | dst += dst_stride; |
| 493 | ++y; |
| 494 | |
| 495 | if (y > 0 && y < h) |
| 496 | ref_row += src_stride; |
| 497 | } while (--b_h); |
| 498 | } |
| 499 | #endif // CONFIG_VP9_HIGHBITDEPTH |
| 500 | |
| 501 | #if CONFIG_VP9_HIGHBITDEPTH |
| 502 | static void extend_and_predict(const uint8_t *buf_ptr1, int pre_buf_stride, |
| 503 | int x0, int y0, int b_w, int b_h, |
| 504 | int frame_width, int frame_height, |
| 505 | int border_offset, |
| 506 | uint8_t *const dst, int dst_buf_stride, |
| 507 | int subpel_x, int subpel_y, |
| 508 | const InterpKernel *kernel, |
| 509 | const struct scale_factors *sf, |
| 510 | MACROBLOCKD *xd, |
| 511 | int w, int h, int ref, int xs, int ys) { |
| 512 | DECLARE_ALIGNED(16, uint16_t, mc_buf_high[80 * 2 * 80 * 2]); |
| 513 | const uint8_t *buf_ptr; |
| 514 | |
| 515 | if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) { |
| 516 | high_build_mc_border(buf_ptr1, pre_buf_stride, mc_buf_high, b_w, |
| 517 | x0, y0, b_w, b_h, frame_width, frame_height); |
| 518 | buf_ptr = CONVERT_TO_BYTEPTR(mc_buf_high) + border_offset; |
| 519 | } else { |
| 520 | build_mc_border(buf_ptr1, pre_buf_stride, (uint8_t *)mc_buf_high, b_w, |
| 521 | x0, y0, b_w, b_h, frame_width, frame_height); |
| 522 | buf_ptr = ((uint8_t *)mc_buf_high) + border_offset; |
| 523 | } |
| 524 | |
| 525 | if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) { |
| 526 | high_inter_predictor(buf_ptr, b_w, dst, dst_buf_stride, subpel_x, |
| 527 | subpel_y, sf, w, h, ref, kernel, xs, ys, xd->bd); |
| 528 | } else { |
| 529 | inter_predictor(buf_ptr, b_w, dst, dst_buf_stride, subpel_x, |
| 530 | subpel_y, sf, w, h, ref, kernel, xs, ys); |
| 531 | } |
| 532 | } |
| 533 | #else |
| 534 | static void extend_and_predict(const uint8_t *buf_ptr1, int pre_buf_stride, |
| 535 | int x0, int y0, int b_w, int b_h, |
| 536 | int frame_width, int frame_height, |
| 537 | int border_offset, |
| 538 | uint8_t *const dst, int dst_buf_stride, |
| 539 | int subpel_x, int subpel_y, |
| 540 | const InterpKernel *kernel, |
| 541 | const struct scale_factors *sf, |
| 542 | int w, int h, int ref, int xs, int ys) { |
| 543 | DECLARE_ALIGNED(16, uint8_t, mc_buf[80 * 2 * 80 * 2]); |
| 544 | const uint8_t *buf_ptr; |
| 545 | |
| 546 | build_mc_border(buf_ptr1, pre_buf_stride, mc_buf, b_w, |
| 547 | x0, y0, b_w, b_h, frame_width, frame_height); |
| 548 | buf_ptr = mc_buf + border_offset; |
| 549 | |
| 550 | inter_predictor(buf_ptr, b_w, dst, dst_buf_stride, subpel_x, |
| 551 | subpel_y, sf, w, h, ref, kernel, xs, ys); |
| 552 | } |
| 553 | #endif // CONFIG_VP9_HIGHBITDEPTH |
| 554 | |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 555 | static void dec_build_inter_predictors(VPxWorker *const worker, MACROBLOCKD *xd, |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 556 | int plane, int bw, int bh, int x, |
| 557 | int y, int w, int h, int mi_x, int mi_y, |
| 558 | const InterpKernel *kernel, |
| 559 | const struct scale_factors *sf, |
| 560 | struct buf_2d *pre_buf, |
| 561 | struct buf_2d *dst_buf, const MV* mv, |
| 562 | RefCntBuffer *ref_frame_buf, |
| 563 | int is_scaled, int ref) { |
| 564 | struct macroblockd_plane *const pd = &xd->plane[plane]; |
| 565 | uint8_t *const dst = dst_buf->buf + dst_buf->stride * y + x; |
| 566 | MV32 scaled_mv; |
| 567 | int xs, ys, x0, y0, x0_16, y0_16, frame_width, frame_height, |
| 568 | buf_stride, subpel_x, subpel_y; |
| 569 | uint8_t *ref_frame, *buf_ptr; |
| 570 | |
| 571 | // Get reference frame pointer, width and height. |
| 572 | if (plane == 0) { |
| 573 | frame_width = ref_frame_buf->buf.y_crop_width; |
| 574 | frame_height = ref_frame_buf->buf.y_crop_height; |
| 575 | ref_frame = ref_frame_buf->buf.y_buffer; |
| 576 | } else { |
| 577 | frame_width = ref_frame_buf->buf.uv_crop_width; |
| 578 | frame_height = ref_frame_buf->buf.uv_crop_height; |
| 579 | ref_frame = plane == 1 ? ref_frame_buf->buf.u_buffer |
| 580 | : ref_frame_buf->buf.v_buffer; |
| 581 | } |
| 582 | |
| 583 | if (is_scaled) { |
| 584 | const MV mv_q4 = clamp_mv_to_umv_border_sb(xd, mv, bw, bh, |
| 585 | pd->subsampling_x, |
| 586 | pd->subsampling_y); |
| 587 | // Co-ordinate of containing block to pixel precision. |
| 588 | int x_start = (-xd->mb_to_left_edge >> (3 + pd->subsampling_x)); |
| 589 | int y_start = (-xd->mb_to_top_edge >> (3 + pd->subsampling_y)); |
Yaowu Xu | 2bd4f44 | 2016-01-04 18:24:18 -0800 | [diff] [blame] | 590 | #if CONFIG_BETTER_HW_COMPATIBILITY |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 591 | assert(xd->mi[0]->sb_type != BLOCK_4X8 && |
| 592 | xd->mi[0]->sb_type != BLOCK_8X4); |
Yaowu Xu | 2bd4f44 | 2016-01-04 18:24:18 -0800 | [diff] [blame] | 593 | assert(mv_q4.row == mv->row * (1 << (1 - pd->subsampling_y)) && |
| 594 | mv_q4.col == mv->col * (1 << (1 - pd->subsampling_x))); |
| 595 | #endif |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 596 | // Co-ordinate of the block to 1/16th pixel precision. |
| 597 | x0_16 = (x_start + x) << SUBPEL_BITS; |
| 598 | y0_16 = (y_start + y) << SUBPEL_BITS; |
| 599 | |
| 600 | // Co-ordinate of current block in reference frame |
| 601 | // to 1/16th pixel precision. |
| 602 | x0_16 = sf->scale_value_x(x0_16, sf); |
| 603 | y0_16 = sf->scale_value_y(y0_16, sf); |
| 604 | |
| 605 | // Map the top left corner of the block into the reference frame. |
| 606 | x0 = sf->scale_value_x(x_start + x, sf); |
| 607 | y0 = sf->scale_value_y(y_start + y, sf); |
| 608 | |
| 609 | // Scale the MV and incorporate the sub-pixel offset of the block |
| 610 | // in the reference frame. |
| 611 | scaled_mv = vp9_scale_mv(&mv_q4, mi_x + x, mi_y + y, sf); |
| 612 | xs = sf->x_step_q4; |
| 613 | ys = sf->y_step_q4; |
| 614 | } else { |
| 615 | // Co-ordinate of containing block to pixel precision. |
| 616 | x0 = (-xd->mb_to_left_edge >> (3 + pd->subsampling_x)) + x; |
| 617 | y0 = (-xd->mb_to_top_edge >> (3 + pd->subsampling_y)) + y; |
| 618 | |
| 619 | // Co-ordinate of the block to 1/16th pixel precision. |
| 620 | x0_16 = x0 << SUBPEL_BITS; |
| 621 | y0_16 = y0 << SUBPEL_BITS; |
| 622 | |
| 623 | scaled_mv.row = mv->row * (1 << (1 - pd->subsampling_y)); |
| 624 | scaled_mv.col = mv->col * (1 << (1 - pd->subsampling_x)); |
| 625 | xs = ys = 16; |
| 626 | } |
| 627 | subpel_x = scaled_mv.col & SUBPEL_MASK; |
| 628 | subpel_y = scaled_mv.row & SUBPEL_MASK; |
| 629 | |
| 630 | // Calculate the top left corner of the best matching block in the |
| 631 | // reference frame. |
| 632 | x0 += scaled_mv.col >> SUBPEL_BITS; |
| 633 | y0 += scaled_mv.row >> SUBPEL_BITS; |
| 634 | x0_16 += scaled_mv.col; |
| 635 | y0_16 += scaled_mv.row; |
| 636 | |
| 637 | // Get reference block pointer. |
| 638 | buf_ptr = ref_frame + y0 * pre_buf->stride + x0; |
| 639 | buf_stride = pre_buf->stride; |
| 640 | |
| 641 | // Do border extension if there is motion or the |
| 642 | // width/height is not a multiple of 8 pixels. |
| 643 | if (is_scaled || scaled_mv.col || scaled_mv.row || |
| 644 | (frame_width & 0x7) || (frame_height & 0x7)) { |
hkuang | 52e358f | 2015-07-01 16:22:47 -0700 | [diff] [blame] | 645 | int y1 = ((y0_16 + (h - 1) * ys) >> SUBPEL_BITS) + 1; |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 646 | |
| 647 | // Get reference block bottom right horizontal coordinate. |
hkuang | 52e358f | 2015-07-01 16:22:47 -0700 | [diff] [blame] | 648 | int x1 = ((x0_16 + (w - 1) * xs) >> SUBPEL_BITS) + 1; |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 649 | int x_pad = 0, y_pad = 0; |
| 650 | |
| 651 | if (subpel_x || (sf->x_step_q4 != SUBPEL_SHIFTS)) { |
| 652 | x0 -= VP9_INTERP_EXTEND - 1; |
| 653 | x1 += VP9_INTERP_EXTEND; |
| 654 | x_pad = 1; |
| 655 | } |
| 656 | |
| 657 | if (subpel_y || (sf->y_step_q4 != SUBPEL_SHIFTS)) { |
| 658 | y0 -= VP9_INTERP_EXTEND - 1; |
| 659 | y1 += VP9_INTERP_EXTEND; |
| 660 | y_pad = 1; |
| 661 | } |
| 662 | |
| 663 | // Wait until reference block is ready. Pad 7 more pixels as last 7 |
| 664 | // pixels of each superblock row can be changed by next superblock row. |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 665 | if (worker != NULL) |
| 666 | vp9_frameworker_wait(worker, ref_frame_buf, |
James Zern | 5e16d39 | 2015-08-17 18:19:22 -0700 | [diff] [blame] | 667 | VPXMAX(0, (y1 + 7)) << (plane == 0 ? 0 : 1)); |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 668 | |
| 669 | // Skip border extension if block is inside the frame. |
| 670 | if (x0 < 0 || x0 > frame_width - 1 || x1 < 0 || x1 > frame_width - 1 || |
| 671 | y0 < 0 || y0 > frame_height - 1 || y1 < 0 || y1 > frame_height - 1) { |
| 672 | // Extend the border. |
| 673 | const uint8_t *const buf_ptr1 = ref_frame + y0 * buf_stride + x0; |
| 674 | const int b_w = x1 - x0 + 1; |
| 675 | const int b_h = y1 - y0 + 1; |
| 676 | const int border_offset = y_pad * 3 * b_w + x_pad * 3; |
| 677 | |
| 678 | extend_and_predict(buf_ptr1, buf_stride, x0, y0, b_w, b_h, |
| 679 | frame_width, frame_height, border_offset, |
| 680 | dst, dst_buf->stride, |
| 681 | subpel_x, subpel_y, |
| 682 | kernel, sf, |
| 683 | #if CONFIG_VP9_HIGHBITDEPTH |
| 684 | xd, |
| 685 | #endif |
| 686 | w, h, ref, xs, ys); |
| 687 | return; |
| 688 | } |
| 689 | } else { |
| 690 | // Wait until reference block is ready. Pad 7 more pixels as last 7 |
| 691 | // pixels of each superblock row can be changed by next superblock row. |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 692 | if (worker != NULL) { |
| 693 | const int y1 = (y0_16 + (h - 1) * ys) >> SUBPEL_BITS; |
| 694 | vp9_frameworker_wait(worker, ref_frame_buf, |
| 695 | VPXMAX(0, (y1 + 7)) << (plane == 0 ? 0 : 1)); |
| 696 | } |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 697 | } |
| 698 | #if CONFIG_VP9_HIGHBITDEPTH |
| 699 | if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) { |
| 700 | high_inter_predictor(buf_ptr, buf_stride, dst, dst_buf->stride, subpel_x, |
| 701 | subpel_y, sf, w, h, ref, kernel, xs, ys, xd->bd); |
| 702 | } else { |
| 703 | inter_predictor(buf_ptr, buf_stride, dst, dst_buf->stride, subpel_x, |
| 704 | subpel_y, sf, w, h, ref, kernel, xs, ys); |
| 705 | } |
| 706 | #else |
| 707 | inter_predictor(buf_ptr, buf_stride, dst, dst_buf->stride, subpel_x, |
| 708 | subpel_y, sf, w, h, ref, kernel, xs, ys); |
| 709 | #endif // CONFIG_VP9_HIGHBITDEPTH |
| 710 | } |
| 711 | |
| 712 | static void dec_build_inter_predictors_sb(VP9Decoder *const pbi, |
| 713 | MACROBLOCKD *xd, |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 714 | int mi_row, int mi_col) { |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 715 | int plane; |
| 716 | const int mi_x = mi_col * MI_SIZE; |
| 717 | const int mi_y = mi_row * MI_SIZE; |
| 718 | const MODE_INFO *mi = xd->mi[0]; |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 719 | const InterpKernel *kernel = vp9_filter_kernels[mi->interp_filter]; |
| 720 | const BLOCK_SIZE sb_type = mi->sb_type; |
| 721 | const int is_compound = has_second_ref(mi); |
Scott LaVarnway | d8aa406 | 2016-01-12 05:09:06 -0800 | [diff] [blame] | 722 | int ref; |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 723 | int is_scaled; |
| 724 | VPxWorker *const fwo = pbi->frame_parallel_decode ? |
| 725 | pbi->frame_worker_owner : NULL; |
Scott LaVarnway | d8aa406 | 2016-01-12 05:09:06 -0800 | [diff] [blame] | 726 | |
| 727 | for (ref = 0; ref < 1 + is_compound; ++ref) { |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 728 | const MV_REFERENCE_FRAME frame = mi->ref_frame[ref]; |
Scott LaVarnway | d8aa406 | 2016-01-12 05:09:06 -0800 | [diff] [blame] | 729 | RefBuffer *ref_buf = &pbi->common.frame_refs[frame - LAST_FRAME]; |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 730 | const struct scale_factors *const sf = &ref_buf->sf; |
| 731 | const int idx = ref_buf->idx; |
| 732 | BufferPool *const pool = pbi->common.buffer_pool; |
| 733 | RefCntBuffer *const ref_frame_buf = &pool->frame_bufs[idx]; |
Scott LaVarnway | d8aa406 | 2016-01-12 05:09:06 -0800 | [diff] [blame] | 734 | |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 735 | if (!vp9_is_valid_scale(sf)) |
Scott LaVarnway | d8aa406 | 2016-01-12 05:09:06 -0800 | [diff] [blame] | 736 | vpx_internal_error(xd->error_info, VPX_CODEC_UNSUP_BITSTREAM, |
| 737 | "Reference frame has invalid dimensions"); |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 738 | |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 739 | is_scaled = vp9_is_scaled(sf); |
Scott LaVarnway | 0ae42ee | 2016-02-10 11:43:23 -0800 | [diff] [blame] | 740 | vp9_setup_pre_planes(xd, ref, ref_buf->buf, mi_row, mi_col, |
| 741 | is_scaled ? sf : NULL); |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 742 | xd->block_refs[ref] = ref_buf; |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 743 | |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 744 | if (sb_type < BLOCK_8X8) { |
| 745 | for (plane = 0; plane < MAX_MB_PLANE; ++plane) { |
| 746 | struct macroblockd_plane *const pd = &xd->plane[plane]; |
| 747 | struct buf_2d *const dst_buf = &pd->dst; |
| 748 | const int num_4x4_w = pd->n4_w; |
| 749 | const int num_4x4_h = pd->n4_h; |
| 750 | const int n4w_x4 = 4 * num_4x4_w; |
| 751 | const int n4h_x4 = 4 * num_4x4_h; |
| 752 | struct buf_2d *const pre_buf = &pd->pre[ref]; |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 753 | int i = 0, x, y; |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 754 | for (y = 0; y < num_4x4_h; ++y) { |
| 755 | for (x = 0; x < num_4x4_w; ++x) { |
| 756 | const MV mv = average_split_mvs(pd, mi, ref, i++); |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 757 | dec_build_inter_predictors(fwo, xd, plane, n4w_x4, n4h_x4, |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 758 | 4 * x, 4 * y, 4, 4, mi_x, mi_y, kernel, |
| 759 | sf, pre_buf, dst_buf, &mv, |
| 760 | ref_frame_buf, is_scaled, ref); |
| 761 | } |
| 762 | } |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 763 | } |
| 764 | } else { |
| 765 | const MV mv = mi->mv[ref].as_mv; |
| 766 | for (plane = 0; plane < MAX_MB_PLANE; ++plane) { |
| 767 | struct macroblockd_plane *const pd = &xd->plane[plane]; |
| 768 | struct buf_2d *const dst_buf = &pd->dst; |
| 769 | const int num_4x4_w = pd->n4_w; |
| 770 | const int num_4x4_h = pd->n4_h; |
| 771 | const int n4w_x4 = 4 * num_4x4_w; |
| 772 | const int n4h_x4 = 4 * num_4x4_h; |
| 773 | struct buf_2d *const pre_buf = &pd->pre[ref]; |
| 774 | dec_build_inter_predictors(fwo, xd, plane, n4w_x4, n4h_x4, |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 775 | 0, 0, n4w_x4, n4h_x4, mi_x, mi_y, kernel, |
Scott LaVarnway | 7203100 | 2016-01-27 07:37:59 -0800 | [diff] [blame] | 776 | sf, pre_buf, dst_buf, &mv, |
| 777 | ref_frame_buf, is_scaled, ref); |
James Zern | 38dd044 | 2015-06-10 15:44:21 -0700 | [diff] [blame] | 778 | } |
| 779 | } |
| 780 | } |
| 781 | } |
| 782 | |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 783 | static INLINE TX_SIZE dec_get_uv_tx_size(const MODE_INFO *mi, |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 784 | int n4_wl, int n4_hl) { |
| 785 | // get minimum log2 num4x4s dimension |
James Zern | 5e16d39 | 2015-08-17 18:19:22 -0700 | [diff] [blame] | 786 | const int x = VPXMIN(n4_wl, n4_hl); |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 787 | return VPXMIN(mi->tx_size, x); |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 788 | } |
| 789 | |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 790 | static INLINE void dec_reset_skip_context(MACROBLOCKD *xd) { |
| 791 | int i; |
| 792 | for (i = 0; i < MAX_MB_PLANE; i++) { |
| 793 | struct macroblockd_plane *const pd = &xd->plane[i]; |
| 794 | memset(pd->above_context, 0, sizeof(ENTROPY_CONTEXT) * pd->n4_w); |
| 795 | memset(pd->left_context, 0, sizeof(ENTROPY_CONTEXT) * pd->n4_h); |
| 796 | } |
| 797 | } |
| 798 | |
| 799 | static void set_plane_n4(MACROBLOCKD *const xd, int bw, int bh, int bwl, |
| 800 | int bhl) { |
| 801 | int i; |
| 802 | for (i = 0; i < MAX_MB_PLANE; i++) { |
| 803 | xd->plane[i].n4_w = (bw << 1) >> xd->plane[i].subsampling_x; |
| 804 | xd->plane[i].n4_h = (bh << 1) >> xd->plane[i].subsampling_y; |
| 805 | xd->plane[i].n4_wl = bwl - xd->plane[i].subsampling_x; |
| 806 | xd->plane[i].n4_hl = bhl - xd->plane[i].subsampling_y; |
| 807 | } |
| 808 | } |
| 809 | |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 810 | static MODE_INFO *set_offsets(VP9_COMMON *const cm, MACROBLOCKD *const xd, |
| 811 | BLOCK_SIZE bsize, int mi_row, int mi_col, |
| 812 | int bw, int bh, int x_mis, int y_mis, |
| 813 | int bwl, int bhl) { |
Dmitry Kovalev | 86f44a9 | 2014-04-01 16:18:47 -0700 | [diff] [blame] | 814 | const int offset = mi_row * cm->mi_stride + mi_col; |
Dmitry Kovalev | e121bf4 | 2013-12-12 19:56:30 -0800 | [diff] [blame] | 815 | int x, y; |
Scott LaVarnway | 86f4a3d | 2015-06-16 06:38:34 -0700 | [diff] [blame] | 816 | const TileInfo *const tile = &xd->tile; |
Scott LaVarnway | ac6093d | 2013-09-11 13:45:44 -0400 | [diff] [blame] | 817 | |
Scott LaVarnway | 8b17f7f | 2015-04-21 05:36:58 -0700 | [diff] [blame] | 818 | xd->mi = cm->mi_grid_visible + offset; |
| 819 | xd->mi[0] = &cm->mi[offset]; |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 820 | // TODO(slavarnway): Generate sb_type based on bwl and bhl, instead of |
| 821 | // passing bsize from decode_partition(). |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 822 | xd->mi[0]->sb_type = bsize; |
Dmitry Kovalev | e121bf4 | 2013-12-12 19:56:30 -0800 | [diff] [blame] | 823 | for (y = 0; y < y_mis; ++y) |
hkuang | c70cea9 | 2014-09-11 15:37:08 -0700 | [diff] [blame] | 824 | for (x = !y; x < x_mis; ++x) { |
Scott LaVarnway | 8b17f7f | 2015-04-21 05:36:58 -0700 | [diff] [blame] | 825 | xd->mi[y * cm->mi_stride + x] = xd->mi[0]; |
hkuang | c70cea9 | 2014-09-11 15:37:08 -0700 | [diff] [blame] | 826 | } |
Dmitry Kovalev | e121bf4 | 2013-12-12 19:56:30 -0800 | [diff] [blame] | 827 | |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 828 | set_plane_n4(xd, bw, bh, bwl, bhl); |
| 829 | |
Dmitry Kovalev | d42976c | 2014-04-01 10:57:59 -0700 | [diff] [blame] | 830 | set_skip_context(xd, mi_row, mi_col); |
Ronald S. Bultje | c394166 | 2013-01-05 18:20:25 -0800 | [diff] [blame] | 831 | |
Dmitry Kovalev | df76a61 | 2013-04-09 10:17:22 -0700 | [diff] [blame] | 832 | // Distance of Mb to the various image edges. These are specified to 8th pel |
| 833 | // as they are always compared to values that are in 1/8th pel units |
James Zern | 58a0f6d | 2013-10-25 17:18:04 +0200 | [diff] [blame] | 834 | set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols); |
Ronald S. Bultje | c394166 | 2013-01-05 18:20:25 -0800 | [diff] [blame] | 835 | |
Dmitry Kovalev | 79ba419 | 2014-05-16 09:48:26 -0700 | [diff] [blame] | 836 | vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col); |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 837 | return xd->mi[0]; |
Ronald S. Bultje | c394166 | 2013-01-05 18:20:25 -0800 | [diff] [blame] | 838 | } |
| 839 | |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 840 | static void decode_block(VP9Decoder *const pbi, MACROBLOCKD *const xd, |
Dmitry Kovalev | 90fed85 | 2014-03-12 15:11:07 -0700 | [diff] [blame] | 841 | int mi_row, int mi_col, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 842 | vpx_reader *r, BLOCK_SIZE bsize, |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 843 | int bwl, int bhl) { |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 844 | VP9_COMMON *const cm = &pbi->common; |
Dmitry Kovalev | d007446 | 2013-08-02 11:45:21 -0700 | [diff] [blame] | 845 | const int less8x8 = bsize < BLOCK_8X8; |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 846 | const int bw = 1 << (bwl - 1); |
| 847 | const int bh = 1 << (bhl - 1); |
James Zern | 5e16d39 | 2015-08-17 18:19:22 -0700 | [diff] [blame] | 848 | const int x_mis = VPXMIN(bw, cm->mi_cols - mi_col); |
| 849 | const int y_mis = VPXMIN(bh, cm->mi_rows - mi_row); |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 850 | |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 851 | MODE_INFO *mi = set_offsets(cm, xd, bsize, mi_row, mi_col, |
| 852 | bw, bh, x_mis, y_mis, bwl, bhl); |
hkuang | 87c21a9 | 2015-05-06 16:31:13 -0700 | [diff] [blame] | 853 | |
| 854 | if (bsize >= BLOCK_8X8 && (cm->subsampling_x || cm->subsampling_y)) { |
| 855 | const BLOCK_SIZE uv_subsize = |
| 856 | ss_size_lookup[bsize][cm->subsampling_x][cm->subsampling_y]; |
| 857 | if (uv_subsize == BLOCK_INVALID) |
| 858 | vpx_internal_error(xd->error_info, |
| 859 | VPX_CODEC_CORRUPT_FRAME, "Invalid block size."); |
| 860 | } |
| 861 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 862 | vpx_read_mode_info(pbi, xd, mi_row, mi_col, r, x_mis, y_mis); |
Dmitry Kovalev | 1947828 | 2013-06-28 18:34:30 -0700 | [diff] [blame] | 863 | |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 864 | if (mi->skip) { |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 865 | dec_reset_skip_context(xd); |
Dmitry Kovalev | 47b6030 | 2013-10-31 13:52:08 -0700 | [diff] [blame] | 866 | } |
Dmitry Kovalev | 1947828 | 2013-06-28 18:34:30 -0700 | [diff] [blame] | 867 | |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 868 | if (!is_inter_block(mi)) { |
Jingning Han | cb1e817 | 2015-07-13 11:49:57 -0700 | [diff] [blame] | 869 | int plane; |
| 870 | for (plane = 0; plane < MAX_MB_PLANE; ++plane) { |
| 871 | const struct macroblockd_plane *const pd = &xd->plane[plane]; |
| 872 | const TX_SIZE tx_size = |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 873 | plane ? dec_get_uv_tx_size(mi, pd->n4_wl, pd->n4_hl) |
| 874 | : mi->tx_size; |
Jingning Han | cb1e817 | 2015-07-13 11:49:57 -0700 | [diff] [blame] | 875 | const int num_4x4_w = pd->n4_w; |
| 876 | const int num_4x4_h = pd->n4_h; |
| 877 | const int step = (1 << tx_size); |
| 878 | int row, col; |
| 879 | const int max_blocks_wide = num_4x4_w + (xd->mb_to_right_edge >= 0 ? |
| 880 | 0 : xd->mb_to_right_edge >> (5 + pd->subsampling_x)); |
| 881 | const int max_blocks_high = num_4x4_h + (xd->mb_to_bottom_edge >= 0 ? |
| 882 | 0 : xd->mb_to_bottom_edge >> (5 + pd->subsampling_y)); |
| 883 | |
| 884 | for (row = 0; row < max_blocks_high; row += step) |
| 885 | for (col = 0; col < max_blocks_wide; col += step) |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 886 | predict_and_reconstruct_intra_block(xd, r, mi, plane, |
Jingning Han | cb1e817 | 2015-07-13 11:49:57 -0700 | [diff] [blame] | 887 | row, col, tx_size); |
| 888 | } |
Yaowu Xu | 6e3b34b | 2013-06-19 17:34:49 -0700 | [diff] [blame] | 889 | } else { |
Dmitry Kovalev | 47b6030 | 2013-10-31 13:52:08 -0700 | [diff] [blame] | 890 | // Prediction |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 891 | dec_build_inter_predictors_sb(pbi, xd, mi_row, mi_col); |
Dmitry Kovalev | aeb603f | 2013-10-01 15:41:30 -0700 | [diff] [blame] | 892 | |
Dmitry Kovalev | 47b6030 | 2013-10-31 13:52:08 -0700 | [diff] [blame] | 893 | // Reconstruction |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 894 | if (!mi->skip) { |
Dmitry Kovalev | 47b6030 | 2013-10-31 13:52:08 -0700 | [diff] [blame] | 895 | int eobtotal = 0; |
Jingning Han | 8783a8a | 2015-07-08 09:15:39 -0700 | [diff] [blame] | 896 | int plane; |
| 897 | |
| 898 | for (plane = 0; plane < MAX_MB_PLANE; ++plane) { |
| 899 | const struct macroblockd_plane *const pd = &xd->plane[plane]; |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 900 | const TX_SIZE tx_size = |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 901 | plane ? dec_get_uv_tx_size(mi, pd->n4_wl, pd->n4_hl) |
| 902 | : mi->tx_size; |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 903 | const int num_4x4_w = pd->n4_w; |
| 904 | const int num_4x4_h = pd->n4_h; |
Jingning Han | 8783a8a | 2015-07-08 09:15:39 -0700 | [diff] [blame] | 905 | const int step = (1 << tx_size); |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 906 | int row, col; |
Jingning Han | 8783a8a | 2015-07-08 09:15:39 -0700 | [diff] [blame] | 907 | const int max_blocks_wide = num_4x4_w + (xd->mb_to_right_edge >= 0 ? |
| 908 | 0 : xd->mb_to_right_edge >> (5 + pd->subsampling_x)); |
| 909 | const int max_blocks_high = num_4x4_h + (xd->mb_to_bottom_edge >= 0 ? |
| 910 | 0 : xd->mb_to_bottom_edge >> (5 + pd->subsampling_y)); |
| 911 | |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 912 | for (row = 0; row < max_blocks_high; row += step) |
| 913 | for (col = 0; col < max_blocks_wide; col += step) |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 914 | eobtotal += reconstruct_inter_block(xd, r, mi, plane, row, col, |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 915 | tx_size); |
Jingning Han | 8783a8a | 2015-07-08 09:15:39 -0700 | [diff] [blame] | 916 | } |
| 917 | |
Dmitry Kovalev | 47b6030 | 2013-10-31 13:52:08 -0700 | [diff] [blame] | 918 | if (!less8x8 && eobtotal == 0) |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 919 | mi->skip = 1; // skip loopfilter |
Dmitry Kovalev | 47b6030 | 2013-10-31 13:52:08 -0700 | [diff] [blame] | 920 | } |
Yaowu Xu | 6e3b34b | 2013-06-19 17:34:49 -0700 | [diff] [blame] | 921 | } |
Dmitry Kovalev | 47b6030 | 2013-10-31 13:52:08 -0700 | [diff] [blame] | 922 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 923 | xd->corrupted |= vpx_reader_has_error(r); |
Scott LaVarnway | 7718117 | 2015-09-29 05:20:49 -0700 | [diff] [blame] | 924 | |
| 925 | if (cm->lf.filter_level) { |
Scott LaVarnway | 5232326 | 2016-01-19 16:40:20 -0800 | [diff] [blame] | 926 | vp9_build_mask(cm, mi, mi_row, mi_col, bw, bh); |
Scott LaVarnway | 7718117 | 2015-09-29 05:20:49 -0700 | [diff] [blame] | 927 | } |
Jingning Han | 90a91cc | 2013-04-16 00:18:02 -0700 | [diff] [blame] | 928 | } |
| 929 | |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 930 | static INLINE int dec_partition_plane_context(const MACROBLOCKD *xd, |
| 931 | int mi_row, int mi_col, |
| 932 | int bsl) { |
| 933 | const PARTITION_CONTEXT *above_ctx = xd->above_seg_context + mi_col; |
| 934 | const PARTITION_CONTEXT *left_ctx = xd->left_seg_context + (mi_row & MI_MASK); |
| 935 | int above = (*above_ctx >> bsl) & 1 , left = (*left_ctx >> bsl) & 1; |
| 936 | |
| 937 | // assert(bsl >= 0); |
| 938 | |
| 939 | return (left * 2 + above) + bsl * PARTITION_PLOFFSET; |
| 940 | } |
| 941 | |
| 942 | static INLINE void dec_update_partition_context(MACROBLOCKD *xd, |
| 943 | int mi_row, int mi_col, |
| 944 | BLOCK_SIZE subsize, |
| 945 | int bw) { |
| 946 | PARTITION_CONTEXT *const above_ctx = xd->above_seg_context + mi_col; |
| 947 | PARTITION_CONTEXT *const left_ctx = xd->left_seg_context + (mi_row & MI_MASK); |
| 948 | |
| 949 | // update the partition context at the end notes. set partition bits |
| 950 | // of block sizes larger than the current one to be one, and partition |
| 951 | // bits of smaller block sizes to be zero. |
| 952 | memset(above_ctx, partition_context_lookup[subsize].above, bw); |
| 953 | memset(left_ctx, partition_context_lookup[subsize].left, bw); |
| 954 | } |
| 955 | |
Scott LaVarnway | ce6a6c5 | 2015-06-16 11:50:54 -0700 | [diff] [blame] | 956 | static PARTITION_TYPE read_partition(MACROBLOCKD *xd, int mi_row, int mi_col, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 957 | vpx_reader *r, |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 958 | int has_rows, int has_cols, int bsl) { |
| 959 | const int ctx = dec_partition_plane_context(xd, mi_row, mi_col, bsl); |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 960 | const vpx_prob *const probs = get_partition_probs(xd, ctx); |
Scott LaVarnway | f779dba | 2015-06-02 09:06:00 -0700 | [diff] [blame] | 961 | FRAME_COUNTS *counts = xd->counts; |
Dmitry Kovalev | dde8069 | 2013-11-01 18:23:06 -0700 | [diff] [blame] | 962 | PARTITION_TYPE p; |
Dmitry Kovalev | 19cf72e | 2013-10-28 15:14:45 -0700 | [diff] [blame] | 963 | |
| 964 | if (has_rows && has_cols) |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 965 | p = (PARTITION_TYPE)vpx_read_tree(r, vp9_partition_tree, probs); |
Dmitry Kovalev | 19cf72e | 2013-10-28 15:14:45 -0700 | [diff] [blame] | 966 | else if (!has_rows && has_cols) |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 967 | p = vpx_read(r, probs[1]) ? PARTITION_SPLIT : PARTITION_HORZ; |
Dmitry Kovalev | 19cf72e | 2013-10-28 15:14:45 -0700 | [diff] [blame] | 968 | else if (has_rows && !has_cols) |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 969 | p = vpx_read(r, probs[2]) ? PARTITION_SPLIT : PARTITION_VERT; |
Dmitry Kovalev | 19cf72e | 2013-10-28 15:14:45 -0700 | [diff] [blame] | 970 | else |
Dmitry Kovalev | dde8069 | 2013-11-01 18:23:06 -0700 | [diff] [blame] | 971 | p = PARTITION_SPLIT; |
| 972 | |
Scott LaVarnway | f779dba | 2015-06-02 09:06:00 -0700 | [diff] [blame] | 973 | if (counts) |
Yunqing Wang | 85a9bc0 | 2015-01-30 10:14:44 -0800 | [diff] [blame] | 974 | ++counts->partition[ctx][p]; |
Dmitry Kovalev | dde8069 | 2013-11-01 18:23:06 -0700 | [diff] [blame] | 975 | |
| 976 | return p; |
Dmitry Kovalev | 19cf72e | 2013-10-28 15:14:45 -0700 | [diff] [blame] | 977 | } |
| 978 | |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 979 | // TODO(slavarnway): eliminate bsize and subsize in future commits |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 980 | static void decode_partition(VP9Decoder *const pbi, MACROBLOCKD *const xd, |
Dmitry Kovalev | 90fed85 | 2014-03-12 15:11:07 -0700 | [diff] [blame] | 981 | int mi_row, int mi_col, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 982 | vpx_reader* r, BLOCK_SIZE bsize, int n4x4_l2) { |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 983 | VP9_COMMON *const cm = &pbi->common; |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 984 | const int n8x8_l2 = n4x4_l2 - 1; |
| 985 | const int num_8x8_wh = 1 << n8x8_l2; |
| 986 | const int hbs = num_8x8_wh >> 1; |
Dmitry Kovalev | 2901bf2 | 2013-10-30 12:17:05 -0700 | [diff] [blame] | 987 | PARTITION_TYPE partition; |
hkuang | 87c21a9 | 2015-05-06 16:31:13 -0700 | [diff] [blame] | 988 | BLOCK_SIZE subsize; |
Scott LaVarnway | ce6a6c5 | 2015-06-16 11:50:54 -0700 | [diff] [blame] | 989 | const int has_rows = (mi_row + hbs) < cm->mi_rows; |
| 990 | const int has_cols = (mi_col + hbs) < cm->mi_cols; |
Jingning Han | 90a91cc | 2013-04-16 00:18:02 -0700 | [diff] [blame] | 991 | |
James Zern | 924d745 | 2013-08-22 20:03:08 -0700 | [diff] [blame] | 992 | if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) |
Jingning Han | 90a91cc | 2013-04-16 00:18:02 -0700 | [diff] [blame] | 993 | return; |
| 994 | |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 995 | partition = read_partition(xd, mi_row, mi_col, r, has_rows, has_cols, |
| 996 | n8x8_l2); |
| 997 | subsize = subsize_lookup[partition][bsize]; // get_subsize(bsize, partition); |
| 998 | if (!hbs) { |
| 999 | // calculate bmode block dimensions (log 2) |
| 1000 | xd->bmode_blocks_wl = 1 >> !!(partition & PARTITION_VERT); |
| 1001 | xd->bmode_blocks_hl = 1 >> !!(partition & PARTITION_HORZ); |
| 1002 | decode_block(pbi, xd, mi_row, mi_col, r, subsize, 1, 1); |
Dmitry Kovalev | 2901bf2 | 2013-10-30 12:17:05 -0700 | [diff] [blame] | 1003 | } else { |
| 1004 | switch (partition) { |
| 1005 | case PARTITION_NONE: |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 1006 | decode_block(pbi, xd, mi_row, mi_col, r, subsize, n4x4_l2, n4x4_l2); |
Dmitry Kovalev | 2901bf2 | 2013-10-30 12:17:05 -0700 | [diff] [blame] | 1007 | break; |
| 1008 | case PARTITION_HORZ: |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 1009 | decode_block(pbi, xd, mi_row, mi_col, r, subsize, n4x4_l2, n8x8_l2); |
Scott LaVarnway | ce6a6c5 | 2015-06-16 11:50:54 -0700 | [diff] [blame] | 1010 | if (has_rows) |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 1011 | decode_block(pbi, xd, mi_row + hbs, mi_col, r, subsize, n4x4_l2, |
| 1012 | n8x8_l2); |
Dmitry Kovalev | 2901bf2 | 2013-10-30 12:17:05 -0700 | [diff] [blame] | 1013 | break; |
| 1014 | case PARTITION_VERT: |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 1015 | decode_block(pbi, xd, mi_row, mi_col, r, subsize, n8x8_l2, n4x4_l2); |
Scott LaVarnway | ce6a6c5 | 2015-06-16 11:50:54 -0700 | [diff] [blame] | 1016 | if (has_cols) |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 1017 | decode_block(pbi, xd, mi_row, mi_col + hbs, r, subsize, n8x8_l2, |
| 1018 | n4x4_l2); |
Dmitry Kovalev | 2901bf2 | 2013-10-30 12:17:05 -0700 | [diff] [blame] | 1019 | break; |
| 1020 | case PARTITION_SPLIT: |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 1021 | decode_partition(pbi, xd, mi_row, mi_col, r, subsize, n8x8_l2); |
| 1022 | decode_partition(pbi, xd, mi_row, mi_col + hbs, r, subsize, n8x8_l2); |
| 1023 | decode_partition(pbi, xd, mi_row + hbs, mi_col, r, subsize, n8x8_l2); |
| 1024 | decode_partition(pbi, xd, mi_row + hbs, mi_col + hbs, r, subsize, |
| 1025 | n8x8_l2); |
Dmitry Kovalev | 2901bf2 | 2013-10-30 12:17:05 -0700 | [diff] [blame] | 1026 | break; |
| 1027 | default: |
James Zern | 178db94 | 2013-12-12 19:44:08 -0800 | [diff] [blame] | 1028 | assert(0 && "Invalid partition type"); |
Dmitry Kovalev | 2901bf2 | 2013-10-30 12:17:05 -0700 | [diff] [blame] | 1029 | } |
Jingning Han | 90a91cc | 2013-04-16 00:18:02 -0700 | [diff] [blame] | 1030 | } |
Dmitry Kovalev | 2c31729 | 2013-07-15 14:47:25 -0700 | [diff] [blame] | 1031 | |
Jingning Han | ff2b8aa | 2013-04-23 10:12:18 -0700 | [diff] [blame] | 1032 | // update partition context |
Dmitry Kovalev | d007446 | 2013-08-02 11:45:21 -0700 | [diff] [blame] | 1033 | if (bsize >= BLOCK_8X8 && |
Dmitry Kovalev | a0be71c | 2013-10-21 12:02:19 -0700 | [diff] [blame] | 1034 | (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT)) |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 1035 | dec_update_partition_context(xd, mi_row, mi_col, subsize, num_8x8_wh); |
Jingning Han | 90a91cc | 2013-04-16 00:18:02 -0700 | [diff] [blame] | 1036 | } |
| 1037 | |
James Zern | 0aee867 | 2013-10-07 12:38:06 +0200 | [diff] [blame] | 1038 | static void setup_token_decoder(const uint8_t *data, |
| 1039 | const uint8_t *data_end, |
| 1040 | size_t read_size, |
| 1041 | struct vpx_internal_error_info *error_info, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1042 | vpx_reader *r, |
Joey Parrish | 18c0860 | 2014-04-15 14:10:58 -0700 | [diff] [blame] | 1043 | vpx_decrypt_cb decrypt_cb, |
| 1044 | void *decrypt_state) { |
Dmitry Kovalev | 2891d70 | 2013-03-11 17:02:27 -0700 | [diff] [blame] | 1045 | // Validate the calculated partition length. If the buffer |
| 1046 | // described by the partition can't be fully read, then restrict |
| 1047 | // it to the portion that can be (for EC mode) or throw an error. |
John Koleszar | 2bcc473 | 2013-06-11 10:06:31 -0700 | [diff] [blame] | 1048 | if (!read_is_valid(data, read_size, data_end)) |
James Zern | 0aee867 | 2013-10-07 12:38:06 +0200 | [diff] [blame] | 1049 | vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME, |
John Koleszar | 2bcc473 | 2013-06-11 10:06:31 -0700 | [diff] [blame] | 1050 | "Truncated packet or corrupt tile length"); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 1051 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1052 | if (vpx_reader_init(r, data, read_size, decrypt_cb, decrypt_state)) |
James Zern | 0aee867 | 2013-10-07 12:38:06 +0200 | [diff] [blame] | 1053 | vpx_internal_error(error_info, VPX_CODEC_MEM_ERROR, |
John Koleszar | c6b9039 | 2012-07-13 15:21:29 -0700 | [diff] [blame] | 1054 | "Failed to allocate bool decoder %d", 1); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 1055 | } |
| 1056 | |
Dmitry Kovalev | 2c31729 | 2013-07-15 14:47:25 -0700 | [diff] [blame] | 1057 | static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1058 | vpx_reader *r) { |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 1059 | int i, j, k, l, m; |
Deb Mukherjee | fd18d5d | 2013-03-13 11:03:17 -0700 | [diff] [blame] | 1060 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1061 | if (vpx_read_bit(r)) |
Dmitry Kovalev | d6b159d | 2013-12-06 10:54:00 -0800 | [diff] [blame] | 1062 | for (i = 0; i < PLANE_TYPES; ++i) |
Dmitry Kovalev | 377fa8a | 2013-12-05 17:08:06 -0800 | [diff] [blame] | 1063 | for (j = 0; j < REF_TYPES; ++j) |
| 1064 | for (k = 0; k < COEF_BANDS; ++k) |
| 1065 | for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) |
| 1066 | for (m = 0; m < UNCONSTRAINED_NODES; ++m) |
| 1067 | vp9_diff_update_prob(r, &coef_probs[i][j][k][l][m]); |
John Koleszar | 226e55c | 2012-10-19 15:35:36 -0700 | [diff] [blame] | 1068 | } |
| 1069 | |
Dmitry Kovalev | c0eb574 | 2013-07-19 11:37:13 -0700 | [diff] [blame] | 1070 | static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1071 | vpx_reader *r) { |
Yaowu Xu | a49e77a | 2013-10-30 18:06:42 -0700 | [diff] [blame] | 1072 | const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode]; |
| 1073 | TX_SIZE tx_size; |
| 1074 | for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size) |
| 1075 | read_coef_probs_common(fc->coef_probs[tx_size], r); |
John Koleszar | c6b9039 | 2012-07-13 15:21:29 -0700 | [diff] [blame] | 1076 | } |
| 1077 | |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1078 | static void setup_segmentation(struct segmentation *seg, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1079 | struct vpx_read_bit_buffer *rb) { |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1080 | int i, j; |
| 1081 | |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1082 | seg->update_map = 0; |
| 1083 | seg->update_data = 0; |
Dmitry Kovalev | a237576 | 2013-06-06 12:33:12 -0700 | [diff] [blame] | 1084 | |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1085 | seg->enabled = vpx_rb_read_bit(rb); |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1086 | if (!seg->enabled) |
Dmitry Kovalev | 144f49c | 2013-04-23 15:50:56 -0700 | [diff] [blame] | 1087 | return; |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1088 | |
Dmitry Kovalev | 144f49c | 2013-04-23 15:50:56 -0700 | [diff] [blame] | 1089 | // Segmentation map update |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1090 | seg->update_map = vpx_rb_read_bit(rb); |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1091 | if (seg->update_map) { |
Paul Wilkins | 32042af | 2013-07-23 12:09:04 +0100 | [diff] [blame] | 1092 | for (i = 0; i < SEG_TREE_PROBS; i++) |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1093 | seg->tree_probs[i] = vpx_rb_read_bit(rb) ? vpx_rb_read_literal(rb, 8) |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1094 | : MAX_PROB; |
Dmitry Kovalev | 2c42499 | 2013-04-05 11:55:52 -0700 | [diff] [blame] | 1095 | |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1096 | seg->temporal_update = vpx_rb_read_bit(rb); |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1097 | if (seg->temporal_update) { |
Dmitry Kovalev | 144f49c | 2013-04-23 15:50:56 -0700 | [diff] [blame] | 1098 | for (i = 0; i < PREDICTION_PROBS; i++) |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1099 | seg->pred_probs[i] = vpx_rb_read_bit(rb) ? vpx_rb_read_literal(rb, 8) |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1100 | : MAX_PROB; |
Dmitry Kovalev | 144f49c | 2013-04-23 15:50:56 -0700 | [diff] [blame] | 1101 | } else { |
| 1102 | for (i = 0; i < PREDICTION_PROBS; i++) |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1103 | seg->pred_probs[i] = MAX_PROB; |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1104 | } |
Dmitry Kovalev | 144f49c | 2013-04-23 15:50:56 -0700 | [diff] [blame] | 1105 | } |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1106 | |
Dmitry Kovalev | 144f49c | 2013-04-23 15:50:56 -0700 | [diff] [blame] | 1107 | // Segmentation data update |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1108 | seg->update_data = vpx_rb_read_bit(rb); |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1109 | if (seg->update_data) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1110 | seg->abs_delta = vpx_rb_read_bit(rb); |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1111 | |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1112 | vp9_clearall_segfeatures(seg); |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1113 | |
Paul Wilkins | 32042af | 2013-07-23 12:09:04 +0100 | [diff] [blame] | 1114 | for (i = 0; i < MAX_SEGMENTS; i++) { |
Dmitry Kovalev | 144f49c | 2013-04-23 15:50:56 -0700 | [diff] [blame] | 1115 | for (j = 0; j < SEG_LVL_MAX; j++) { |
| 1116 | int data = 0; |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1117 | const int feature_enabled = vpx_rb_read_bit(rb); |
Dmitry Kovalev | 144f49c | 2013-04-23 15:50:56 -0700 | [diff] [blame] | 1118 | if (feature_enabled) { |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1119 | vp9_enable_segfeature(seg, i, j); |
Dmitry Kovalev | a237576 | 2013-06-06 12:33:12 -0700 | [diff] [blame] | 1120 | data = decode_unsigned_max(rb, vp9_seg_feature_data_max(j)); |
Dmitry Kovalev | 144f49c | 2013-04-23 15:50:56 -0700 | [diff] [blame] | 1121 | if (vp9_is_segfeature_signed(j)) |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1122 | data = vpx_rb_read_bit(rb) ? -data : data; |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1123 | } |
Dmitry Kovalev | c4ad327 | 2013-07-10 12:29:43 -0700 | [diff] [blame] | 1124 | vp9_set_segdata(seg, i, j, data); |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1125 | } |
| 1126 | } |
| 1127 | } |
| 1128 | } |
| 1129 | |
Dmitry Kovalev | ee1771e | 2013-07-17 18:37:45 -0700 | [diff] [blame] | 1130 | static void setup_loopfilter(struct loopfilter *lf, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1131 | struct vpx_read_bit_buffer *rb) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1132 | lf->filter_level = vpx_rb_read_literal(rb, 6); |
| 1133 | lf->sharpness_level = vpx_rb_read_literal(rb, 3); |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1134 | |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1135 | // Read in loop filter deltas applied at the MB level based on mode or ref |
| 1136 | // frame. |
Dmitry Kovalev | ee1771e | 2013-07-17 18:37:45 -0700 | [diff] [blame] | 1137 | lf->mode_ref_delta_update = 0; |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1138 | |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1139 | lf->mode_ref_delta_enabled = vpx_rb_read_bit(rb); |
Dmitry Kovalev | ee1771e | 2013-07-17 18:37:45 -0700 | [diff] [blame] | 1140 | if (lf->mode_ref_delta_enabled) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1141 | lf->mode_ref_delta_update = vpx_rb_read_bit(rb); |
Dmitry Kovalev | ee1771e | 2013-07-17 18:37:45 -0700 | [diff] [blame] | 1142 | if (lf->mode_ref_delta_update) { |
Dmitry Kovalev | 59b2928 | 2013-04-03 12:18:15 -0700 | [diff] [blame] | 1143 | int i; |
| 1144 | |
Dmitry Kovalev | e5e15eb | 2013-07-01 02:09:36 -0700 | [diff] [blame] | 1145 | for (i = 0; i < MAX_REF_LF_DELTAS; i++) |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1146 | if (vpx_rb_read_bit(rb)) |
| 1147 | lf->ref_deltas[i] = vpx_rb_read_signed_literal(rb, 6); |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1148 | |
Dmitry Kovalev | e5e15eb | 2013-07-01 02:09:36 -0700 | [diff] [blame] | 1149 | for (i = 0; i < MAX_MODE_LF_DELTAS; i++) |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1150 | if (vpx_rb_read_bit(rb)) |
| 1151 | lf->mode_deltas[i] = vpx_rb_read_signed_literal(rb, 6); |
Dmitry Kovalev | 4a3d786 | 2013-03-26 11:04:25 -0700 | [diff] [blame] | 1152 | } |
| 1153 | } |
| 1154 | } |
| 1155 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1156 | static INLINE int read_delta_q(struct vpx_read_bit_buffer *rb) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1157 | return vpx_rb_read_bit(rb) ? vpx_rb_read_signed_literal(rb, 4) : 0; |
Dmitry Kovalev | b8b91b2 | 2013-06-03 10:50:57 -0700 | [diff] [blame] | 1158 | } |
Dmitry Kovalev | f370db0 | 2013-04-09 18:24:08 -0700 | [diff] [blame] | 1159 | |
James Zern | f0eabfd | 2013-10-25 13:50:05 +0200 | [diff] [blame] | 1160 | static void setup_quantization(VP9_COMMON *const cm, MACROBLOCKD *const xd, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1161 | struct vpx_read_bit_buffer *rb) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1162 | cm->base_qindex = vpx_rb_read_literal(rb, QINDEX_BITS); |
Hangyu Kuang | bdd249b | 2015-02-18 15:32:24 -0800 | [diff] [blame] | 1163 | cm->y_dc_delta_q = read_delta_q(rb); |
| 1164 | cm->uv_dc_delta_q = read_delta_q(rb); |
| 1165 | cm->uv_ac_delta_q = read_delta_q(rb); |
| 1166 | cm->dequant_bit_depth = cm->bit_depth; |
Dmitry Kovalev | a237576 | 2013-06-06 12:33:12 -0700 | [diff] [blame] | 1167 | xd->lossless = cm->base_qindex == 0 && |
| 1168 | cm->y_dc_delta_q == 0 && |
| 1169 | cm->uv_dc_delta_q == 0 && |
| 1170 | cm->uv_ac_delta_q == 0; |
Scott LaVarnway | afcb62b | 2015-04-28 07:52:06 -0700 | [diff] [blame] | 1171 | |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 1172 | #if CONFIG_VP9_HIGHBITDEPTH |
| 1173 | xd->bd = (int)cm->bit_depth; |
| 1174 | #endif |
Dmitry Kovalev | f370db0 | 2013-04-09 18:24:08 -0700 | [diff] [blame] | 1175 | } |
| 1176 | |
Scott LaVarnway | afcb62b | 2015-04-28 07:52:06 -0700 | [diff] [blame] | 1177 | static void setup_segmentation_dequant(VP9_COMMON *const cm) { |
| 1178 | // Build y/uv dequant values based on segmentation. |
| 1179 | if (cm->seg.enabled) { |
| 1180 | int i; |
| 1181 | for (i = 0; i < MAX_SEGMENTS; ++i) { |
| 1182 | const int qindex = vp9_get_qindex(&cm->seg, i, cm->base_qindex); |
| 1183 | cm->y_dequant[i][0] = vp9_dc_quant(qindex, cm->y_dc_delta_q, |
| 1184 | cm->bit_depth); |
| 1185 | cm->y_dequant[i][1] = vp9_ac_quant(qindex, 0, cm->bit_depth); |
| 1186 | cm->uv_dequant[i][0] = vp9_dc_quant(qindex, cm->uv_dc_delta_q, |
| 1187 | cm->bit_depth); |
| 1188 | cm->uv_dequant[i][1] = vp9_ac_quant(qindex, cm->uv_ac_delta_q, |
| 1189 | cm->bit_depth); |
| 1190 | } |
| 1191 | } else { |
| 1192 | const int qindex = cm->base_qindex; |
| 1193 | // When segmentation is disabled, only the first value is used. The |
| 1194 | // remaining are don't cares. |
| 1195 | cm->y_dequant[0][0] = vp9_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth); |
| 1196 | cm->y_dequant[0][1] = vp9_ac_quant(qindex, 0, cm->bit_depth); |
| 1197 | cm->uv_dequant[0][0] = vp9_dc_quant(qindex, cm->uv_dc_delta_q, |
| 1198 | cm->bit_depth); |
| 1199 | cm->uv_dequant[0][1] = vp9_ac_quant(qindex, cm->uv_ac_delta_q, |
| 1200 | cm->bit_depth); |
| 1201 | } |
| 1202 | } |
| 1203 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1204 | static INTERP_FILTER read_interp_filter(struct vpx_read_bit_buffer *rb) { |
Dmitry Kovalev | 4264c93 | 2014-01-24 12:26:57 -0800 | [diff] [blame] | 1205 | const INTERP_FILTER literal_to_filter[] = { EIGHTTAP_SMOOTH, |
| 1206 | EIGHTTAP, |
| 1207 | EIGHTTAP_SHARP, |
| 1208 | BILINEAR }; |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1209 | return vpx_rb_read_bit(rb) ? SWITCHABLE |
| 1210 | : literal_to_filter[vpx_rb_read_literal(rb, 2)]; |
Dmitry Kovalev | 144f49c | 2013-04-23 15:50:56 -0700 | [diff] [blame] | 1211 | } |
| 1212 | |
Ronald S. Bultje | 36ffe64 | 2015-09-25 21:34:29 -0400 | [diff] [blame] | 1213 | static void setup_render_size(VP9_COMMON *cm, struct vpx_read_bit_buffer *rb) { |
| 1214 | cm->render_width = cm->width; |
| 1215 | cm->render_height = cm->height; |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1216 | if (vpx_rb_read_bit(rb)) |
Ronald S. Bultje | 36ffe64 | 2015-09-25 21:34:29 -0400 | [diff] [blame] | 1217 | vp9_read_frame_size(rb, &cm->render_width, &cm->render_height); |
Dmitry Kovalev | 514b8ad | 2013-06-07 13:41:44 -0700 | [diff] [blame] | 1218 | } |
Dmitry Kovalev | 8c69c19 | 2013-03-27 14:04:35 -0700 | [diff] [blame] | 1219 | |
hkuang | 5557743 | 2014-10-27 16:19:04 -0700 | [diff] [blame] | 1220 | static void resize_mv_buffer(VP9_COMMON *cm) { |
| 1221 | vpx_free(cm->cur_frame->mvs); |
| 1222 | cm->cur_frame->mi_rows = cm->mi_rows; |
| 1223 | cm->cur_frame->mi_cols = cm->mi_cols; |
James Zern | ac4aeb5 | 2016-02-17 12:38:40 -0800 | [diff] [blame] | 1224 | CHECK_MEM_ERROR(cm, cm->cur_frame->mvs, |
| 1225 | (MV_REF *)vpx_calloc(cm->mi_rows * cm->mi_cols, |
| 1226 | sizeof(*cm->cur_frame->mvs))); |
hkuang | 5557743 | 2014-10-27 16:19:04 -0700 | [diff] [blame] | 1227 | } |
| 1228 | |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1229 | static void resize_context_buffers(VP9_COMMON *cm, int width, int height) { |
Adrian Grange | 18a7f69 | 2014-07-18 15:40:39 -0700 | [diff] [blame] | 1230 | #if CONFIG_SIZE_LIMIT |
| 1231 | if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT) |
| 1232 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
Johann | 1e4473b | 2015-05-27 15:27:34 -0700 | [diff] [blame] | 1233 | "Dimensions of %dx%d beyond allowed size of %dx%d.", |
| 1234 | width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT); |
Adrian Grange | 18a7f69 | 2014-07-18 15:40:39 -0700 | [diff] [blame] | 1235 | #endif |
Dmitry Kovalev | 514b8ad | 2013-06-07 13:41:44 -0700 | [diff] [blame] | 1236 | if (cm->width != width || cm->height != height) { |
Adrian Grange | 7b2177c | 2014-08-21 08:15:23 -0700 | [diff] [blame] | 1237 | const int new_mi_rows = |
James Zern | bb4950d | 2014-09-04 16:05:18 -0700 | [diff] [blame] | 1238 | ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2; |
Adrian Grange | 7b2177c | 2014-08-21 08:15:23 -0700 | [diff] [blame] | 1239 | const int new_mi_cols = |
James Zern | bb4950d | 2014-09-04 16:05:18 -0700 | [diff] [blame] | 1240 | ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2; |
| 1241 | |
| 1242 | // Allocations in vp9_alloc_context_buffers() depend on individual |
| 1243 | // dimensions as well as the overall size. |
| 1244 | if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) { |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1245 | if (vp9_alloc_context_buffers(cm, width, height)) |
Dmitry Kovalev | 514b8ad | 2013-06-07 13:41:44 -0700 | [diff] [blame] | 1246 | vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR, |
Adrian Grange | 4e30565 | 2014-08-12 11:24:24 -0700 | [diff] [blame] | 1247 | "Failed to allocate context buffers"); |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1248 | } else { |
| 1249 | vp9_set_mb_mi(cm, width, height); |
Dmitry Kovalev | 8c69c19 | 2013-03-27 14:04:35 -0700 | [diff] [blame] | 1250 | } |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1251 | vp9_init_context_buffers(cm); |
Dmitry Kovalev | 514b8ad | 2013-06-07 13:41:44 -0700 | [diff] [blame] | 1252 | cm->width = width; |
| 1253 | cm->height = height; |
Dmitry Kovalev | 8c69c19 | 2013-03-27 14:04:35 -0700 | [diff] [blame] | 1254 | } |
hkuang | 5557743 | 2014-10-27 16:19:04 -0700 | [diff] [blame] | 1255 | if (cm->cur_frame->mvs == NULL || cm->mi_rows > cm->cur_frame->mi_rows || |
| 1256 | cm->mi_cols > cm->cur_frame->mi_cols) { |
| 1257 | resize_mv_buffer(cm); |
| 1258 | } |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1259 | } |
| 1260 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1261 | static void setup_frame_size(VP9_COMMON *cm, struct vpx_read_bit_buffer *rb) { |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1262 | int width, height; |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1263 | BufferPool *const pool = cm->buffer_pool; |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1264 | vp9_read_frame_size(rb, &width, &height); |
| 1265 | resize_context_buffers(cm, width, height); |
Ronald S. Bultje | 36ffe64 | 2015-09-25 21:34:29 -0400 | [diff] [blame] | 1266 | setup_render_size(cm, rb); |
Dmitry Kovalev | 12345cb | 2013-06-05 20:56:37 -0700 | [diff] [blame] | 1267 | |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1268 | lock_buffer_pool(pool); |
Jingning Han | 89af744 | 2015-08-14 15:27:02 -0700 | [diff] [blame] | 1269 | if (vpx_realloc_frame_buffer( |
Frank Galligan | e8e1527 | 2014-02-05 17:44:42 -0800 | [diff] [blame] | 1270 | get_frame_new_buffer(cm), cm->width, cm->height, |
Deb Mukherjee | 5acfafb | 2014-08-26 12:35:15 -0700 | [diff] [blame] | 1271 | cm->subsampling_x, cm->subsampling_y, |
| 1272 | #if CONFIG_VP9_HIGHBITDEPTH |
| 1273 | cm->use_highbitdepth, |
| 1274 | #endif |
| 1275 | VP9_DEC_BORDER_IN_PIXELS, |
Frank Galligan | c4f7079 | 2014-12-15 12:00:09 -0800 | [diff] [blame] | 1276 | cm->byte_alignment, |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1277 | &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb, |
| 1278 | pool->cb_priv)) { |
| 1279 | unlock_buffer_pool(pool); |
Frank Galligan | e8e1527 | 2014-02-05 17:44:42 -0800 | [diff] [blame] | 1280 | vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR, |
| 1281 | "Failed to allocate frame buffer"); |
| 1282 | } |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1283 | unlock_buffer_pool(pool); |
| 1284 | |
| 1285 | pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x; |
| 1286 | pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y; |
| 1287 | pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth; |
Yaowu Xu | 6cf3031 | 2015-02-23 13:01:14 -0800 | [diff] [blame] | 1288 | pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space; |
Ronald S. Bultje | eeb5ef0 | 2015-09-15 21:56:51 -0400 | [diff] [blame] | 1289 | pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range; |
Ronald S. Bultje | 812945a | 2015-09-25 21:51:55 -0400 | [diff] [blame] | 1290 | pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width; |
| 1291 | pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height; |
Alex Converse | b932c6c | 2014-09-02 12:25:44 -0700 | [diff] [blame] | 1292 | } |
| 1293 | |
| 1294 | static INLINE int valid_ref_frame_img_fmt(vpx_bit_depth_t ref_bit_depth, |
| 1295 | int ref_xss, int ref_yss, |
| 1296 | vpx_bit_depth_t this_bit_depth, |
| 1297 | int this_xss, int this_yss) { |
| 1298 | return ref_bit_depth == this_bit_depth && ref_xss == this_xss && |
| 1299 | ref_yss == this_yss; |
Dmitry Kovalev | 8c69c19 | 2013-03-27 14:04:35 -0700 | [diff] [blame] | 1300 | } |
| 1301 | |
Dmitry Kovalev | 9366624 | 2014-03-25 14:21:26 -0700 | [diff] [blame] | 1302 | static void setup_frame_size_with_refs(VP9_COMMON *cm, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1303 | struct vpx_read_bit_buffer *rb) { |
Dmitry Kovalev | 4053fde | 2013-06-07 18:09:29 -0700 | [diff] [blame] | 1304 | int width, height; |
| 1305 | int found = 0, i; |
Yaowu Xu | 9261e1a | 2014-07-24 09:12:46 -0700 | [diff] [blame] | 1306 | int has_valid_ref_frame = 0; |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1307 | BufferPool *const pool = cm->buffer_pool; |
Dmitry Kovalev | 0d4b8d7 | 2013-12-05 16:23:09 -0800 | [diff] [blame] | 1308 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1309 | if (vpx_rb_read_bit(rb)) { |
Dmitry Kovalev | ba41e9d | 2013-12-27 18:44:19 -0800 | [diff] [blame] | 1310 | YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf; |
| 1311 | width = buf->y_crop_width; |
| 1312 | height = buf->y_crop_height; |
Dmitry Kovalev | 4053fde | 2013-06-07 18:09:29 -0700 | [diff] [blame] | 1313 | found = 1; |
| 1314 | break; |
| 1315 | } |
| 1316 | } |
| 1317 | |
| 1318 | if (!found) |
Adrian Grange | 7c43fb6 | 2014-06-09 15:22:17 -0700 | [diff] [blame] | 1319 | vp9_read_frame_size(rb, &width, &height); |
Dmitry Kovalev | 4053fde | 2013-06-07 18:09:29 -0700 | [diff] [blame] | 1320 | |
hkuang | c1b0d0d | 2014-10-14 11:35:26 -0700 | [diff] [blame] | 1321 | if (width <= 0 || height <= 0) |
Yaowu Xu | 9261e1a | 2014-07-24 09:12:46 -0700 | [diff] [blame] | 1322 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 1323 | "Invalid frame size"); |
| 1324 | |
| 1325 | // Check to make sure at least one of frames that this frame references |
| 1326 | // has valid dimensions. |
Jim Bankoski | 9f37d14 | 2014-06-27 10:03:15 -0700 | [diff] [blame] | 1327 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
| 1328 | RefBuffer *const ref_frame = &cm->frame_refs[i]; |
Jim Bankoski | 899585e | 2014-07-28 08:37:25 -0700 | [diff] [blame] | 1329 | has_valid_ref_frame |= valid_ref_frame_size(ref_frame->buf->y_crop_width, |
| 1330 | ref_frame->buf->y_crop_height, |
Yaowu Xu | 9261e1a | 2014-07-24 09:12:46 -0700 | [diff] [blame] | 1331 | width, height); |
Jim Bankoski | 9f37d14 | 2014-06-27 10:03:15 -0700 | [diff] [blame] | 1332 | } |
Yaowu Xu | 9261e1a | 2014-07-24 09:12:46 -0700 | [diff] [blame] | 1333 | if (!has_valid_ref_frame) |
| 1334 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 1335 | "Referenced frame has invalid size"); |
Alex Converse | b932c6c | 2014-09-02 12:25:44 -0700 | [diff] [blame] | 1336 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
| 1337 | RefBuffer *const ref_frame = &cm->frame_refs[i]; |
| 1338 | if (!valid_ref_frame_img_fmt( |
| 1339 | ref_frame->buf->bit_depth, |
Alex Converse | a0befb9 | 2014-10-01 11:23:57 -0700 | [diff] [blame] | 1340 | ref_frame->buf->subsampling_x, |
| 1341 | ref_frame->buf->subsampling_y, |
Alex Converse | b932c6c | 2014-09-02 12:25:44 -0700 | [diff] [blame] | 1342 | cm->bit_depth, |
| 1343 | cm->subsampling_x, |
| 1344 | cm->subsampling_y)) |
| 1345 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
Yaowu Xu | ecbca31 | 2015-01-09 10:36:43 -0800 | [diff] [blame] | 1346 | "Referenced frame has incompatible color format"); |
Alex Converse | b932c6c | 2014-09-02 12:25:44 -0700 | [diff] [blame] | 1347 | } |
John Koleszar | 76e0c95 | 2013-06-11 14:24:53 -0700 | [diff] [blame] | 1348 | |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1349 | resize_context_buffers(cm, width, height); |
Ronald S. Bultje | 36ffe64 | 2015-09-25 21:34:29 -0400 | [diff] [blame] | 1350 | setup_render_size(cm, rb); |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1351 | |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1352 | lock_buffer_pool(pool); |
Jingning Han | 89af744 | 2015-08-14 15:27:02 -0700 | [diff] [blame] | 1353 | if (vpx_realloc_frame_buffer( |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1354 | get_frame_new_buffer(cm), cm->width, cm->height, |
Deb Mukherjee | 5acfafb | 2014-08-26 12:35:15 -0700 | [diff] [blame] | 1355 | cm->subsampling_x, cm->subsampling_y, |
| 1356 | #if CONFIG_VP9_HIGHBITDEPTH |
| 1357 | cm->use_highbitdepth, |
| 1358 | #endif |
| 1359 | VP9_DEC_BORDER_IN_PIXELS, |
Frank Galligan | c4f7079 | 2014-12-15 12:00:09 -0800 | [diff] [blame] | 1360 | cm->byte_alignment, |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1361 | &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb, |
| 1362 | pool->cb_priv)) { |
| 1363 | unlock_buffer_pool(pool); |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1364 | vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR, |
| 1365 | "Failed to allocate frame buffer"); |
| 1366 | } |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1367 | unlock_buffer_pool(pool); |
| 1368 | |
| 1369 | pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x; |
| 1370 | pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y; |
| 1371 | pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth; |
Yaowu Xu | 6cf3031 | 2015-02-23 13:01:14 -0800 | [diff] [blame] | 1372 | pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space; |
Ronald S. Bultje | eeb5ef0 | 2015-09-15 21:56:51 -0400 | [diff] [blame] | 1373 | pool->frame_bufs[cm->new_fb_idx].buf.color_range = cm->color_range; |
Ronald S. Bultje | 812945a | 2015-09-25 21:51:55 -0400 | [diff] [blame] | 1374 | pool->frame_bufs[cm->new_fb_idx].buf.render_width = cm->render_width; |
| 1375 | pool->frame_bufs[cm->new_fb_idx].buf.render_height = cm->render_height; |
Dmitry Kovalev | 4053fde | 2013-06-07 18:09:29 -0700 | [diff] [blame] | 1376 | } |
| 1377 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1378 | static void setup_tile_info(VP9_COMMON *cm, struct vpx_read_bit_buffer *rb) { |
Dmitry Kovalev | 9482a0b | 2013-07-16 14:47:15 -0700 | [diff] [blame] | 1379 | int min_log2_tile_cols, max_log2_tile_cols, max_ones; |
| 1380 | vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols); |
Dmitry Kovalev | 72f9f10 | 2013-03-27 16:23:12 -0700 | [diff] [blame] | 1381 | |
Dmitry Kovalev | 9482a0b | 2013-07-16 14:47:15 -0700 | [diff] [blame] | 1382 | // columns |
| 1383 | max_ones = max_log2_tile_cols - min_log2_tile_cols; |
| 1384 | cm->log2_tile_cols = min_log2_tile_cols; |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1385 | while (max_ones-- && vpx_rb_read_bit(rb)) |
Dmitry Kovalev | 9482a0b | 2013-07-16 14:47:15 -0700 | [diff] [blame] | 1386 | cm->log2_tile_cols++; |
Dmitry Kovalev | a237576 | 2013-06-06 12:33:12 -0700 | [diff] [blame] | 1387 | |
James Zern | b2b0775 | 2014-06-21 16:55:29 -0700 | [diff] [blame] | 1388 | if (cm->log2_tile_cols > 6) |
| 1389 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 1390 | "Invalid number of tile columns"); |
| 1391 | |
Dmitry Kovalev | 9482a0b | 2013-07-16 14:47:15 -0700 | [diff] [blame] | 1392 | // rows |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1393 | cm->log2_tile_rows = vpx_rb_read_bit(rb); |
Dmitry Kovalev | a237576 | 2013-06-06 12:33:12 -0700 | [diff] [blame] | 1394 | if (cm->log2_tile_rows) |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1395 | cm->log2_tile_rows += vpx_rb_read_bit(rb); |
Dmitry Kovalev | a237576 | 2013-06-06 12:33:12 -0700 | [diff] [blame] | 1396 | } |
| 1397 | |
James Zern | 6b00202 | 2013-10-28 21:43:05 +0100 | [diff] [blame] | 1398 | // Reads the next tile returning its size and adjusting '*data' accordingly |
| 1399 | // based on 'is_last'. |
Dmitry Kovalev | ccfb4b9 | 2014-05-12 12:49:29 -0700 | [diff] [blame] | 1400 | static void get_tile_buffer(const uint8_t *const data_end, |
| 1401 | int is_last, |
| 1402 | struct vpx_internal_error_info *error_info, |
| 1403 | const uint8_t **data, |
| 1404 | vpx_decrypt_cb decrypt_cb, void *decrypt_state, |
| 1405 | TileBuffer *buf) { |
James Zern | 6b00202 | 2013-10-28 21:43:05 +0100 | [diff] [blame] | 1406 | size_t size; |
| 1407 | |
| 1408 | if (!is_last) { |
| 1409 | if (!read_is_valid(*data, 4, data_end)) |
| 1410 | vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME, |
Johann | 8577026 | 2013-12-17 18:29:06 -0800 | [diff] [blame] | 1411 | "Truncated packet or corrupt tile length"); |
James Zern | 6b00202 | 2013-10-28 21:43:05 +0100 | [diff] [blame] | 1412 | |
Joey Parrish | 18c0860 | 2014-04-15 14:10:58 -0700 | [diff] [blame] | 1413 | if (decrypt_cb) { |
| 1414 | uint8_t be_data[4]; |
| 1415 | decrypt_cb(decrypt_state, *data, be_data, 4); |
| 1416 | size = mem_get_be32(be_data); |
| 1417 | } else { |
| 1418 | size = mem_get_be32(*data); |
| 1419 | } |
James Zern | 6b00202 | 2013-10-28 21:43:05 +0100 | [diff] [blame] | 1420 | *data += 4; |
Johann | 8577026 | 2013-12-17 18:29:06 -0800 | [diff] [blame] | 1421 | |
Johann | dc2c62e | 2013-12-18 15:52:51 -0800 | [diff] [blame] | 1422 | if (size > (size_t)(data_end - *data)) |
Johann | 8577026 | 2013-12-17 18:29:06 -0800 | [diff] [blame] | 1423 | vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME, |
| 1424 | "Truncated packet or corrupt tile size"); |
James Zern | 6b00202 | 2013-10-28 21:43:05 +0100 | [diff] [blame] | 1425 | } else { |
| 1426 | size = data_end - *data; |
| 1427 | } |
Dmitry Kovalev | ccfb4b9 | 2014-05-12 12:49:29 -0700 | [diff] [blame] | 1428 | |
| 1429 | buf->data = *data; |
| 1430 | buf->size = size; |
| 1431 | |
| 1432 | *data += size; |
James Zern | 6b00202 | 2013-10-28 21:43:05 +0100 | [diff] [blame] | 1433 | } |
| 1434 | |
Dmitry Kovalev | ccfb4b9 | 2014-05-12 12:49:29 -0700 | [diff] [blame] | 1435 | static void get_tile_buffers(VP9Decoder *pbi, |
| 1436 | const uint8_t *data, const uint8_t *data_end, |
| 1437 | int tile_cols, int tile_rows, |
| 1438 | TileBuffer (*tile_buffers)[1 << 6]) { |
| 1439 | int r, c; |
| 1440 | |
| 1441 | for (r = 0; r < tile_rows; ++r) { |
| 1442 | for (c = 0; c < tile_cols; ++c) { |
| 1443 | const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1); |
| 1444 | TileBuffer *const buf = &tile_buffers[r][c]; |
| 1445 | buf->col = c; |
| 1446 | get_tile_buffer(data_end, is_last, &pbi->common.error, &data, |
| 1447 | pbi->decrypt_cb, pbi->decrypt_state, buf); |
| 1448 | } |
| 1449 | } |
| 1450 | } |
Dmitry Kovalev | d6e74e0 | 2013-07-19 12:27:56 -0700 | [diff] [blame] | 1451 | |
Dmitry Kovalev | 56c2f41 | 2014-04-08 11:41:12 -0700 | [diff] [blame] | 1452 | static const uint8_t *decode_tiles(VP9Decoder *pbi, |
Dmitry Kovalev | 88a10ab | 2014-03-20 15:01:37 -0700 | [diff] [blame] | 1453 | const uint8_t *data, |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1454 | const uint8_t *data_end) { |
James Zern | 924d745 | 2013-08-22 20:03:08 -0700 | [diff] [blame] | 1455 | VP9_COMMON *const cm = &pbi->common; |
Jingning Han | 04d2e57 | 2015-07-01 16:32:48 -0700 | [diff] [blame] | 1456 | const VPxWorkerInterface *const winterface = vpx_get_worker_interface(); |
Dmitry Kovalev | 672ba3d | 2013-11-06 18:15:33 -0800 | [diff] [blame] | 1457 | const int aligned_cols = mi_cols_aligned_to_sb(cm->mi_cols); |
James Zern | 924d745 | 2013-08-22 20:03:08 -0700 | [diff] [blame] | 1458 | const int tile_cols = 1 << cm->log2_tile_cols; |
| 1459 | const int tile_rows = 1 << cm->log2_tile_rows; |
Dmitry Kovalev | 672ba3d | 2013-11-06 18:15:33 -0800 | [diff] [blame] | 1460 | TileBuffer tile_buffers[4][1 << 6]; |
Dmitry Kovalev | a237576 | 2013-06-06 12:33:12 -0700 | [diff] [blame] | 1461 | int tile_row, tile_col; |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1462 | int mi_row, mi_col; |
| 1463 | TileData *tile_data = NULL; |
| 1464 | |
Frank Galligan | bfb6d48 | 2015-05-21 11:49:11 -0700 | [diff] [blame] | 1465 | if (cm->lf.filter_level && !cm->skip_loop_filter && |
| 1466 | pbi->lf_worker.data1 == NULL) { |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1467 | CHECK_MEM_ERROR(cm, pbi->lf_worker.data1, |
| 1468 | vpx_memalign(32, sizeof(LFWorkerData))); |
Jingning Han | 04d2e57 | 2015-07-01 16:32:48 -0700 | [diff] [blame] | 1469 | pbi->lf_worker.hook = (VPxWorkerHook)vp9_loop_filter_worker; |
James Zern | e656f44 | 2014-06-19 21:14:51 -0700 | [diff] [blame] | 1470 | if (pbi->max_threads > 1 && !winterface->reset(&pbi->lf_worker)) { |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1471 | vpx_internal_error(&cm->error, VPX_CODEC_ERROR, |
| 1472 | "Loop filter thread creation failed"); |
| 1473 | } |
| 1474 | } |
| 1475 | |
Frank Galligan | bfb6d48 | 2015-05-21 11:49:11 -0700 | [diff] [blame] | 1476 | if (cm->lf.filter_level && !cm->skip_loop_filter) { |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1477 | LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1; |
James Zern | cde790c | 2014-08-27 16:29:34 -0700 | [diff] [blame] | 1478 | // Be sure to sync as we might be resuming after a failed frame decode. |
| 1479 | winterface->sync(&pbi->lf_worker); |
James Zern | 0148367 | 2013-12-26 13:15:56 -0500 | [diff] [blame] | 1480 | vp9_loop_filter_data_reset(lf_data, get_frame_new_buffer(cm), cm, |
| 1481 | pbi->mb.plane); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1482 | } |
Dmitry Kovalev | 672ba3d | 2013-11-06 18:15:33 -0800 | [diff] [blame] | 1483 | |
Yaowu Xu | a596975 | 2013-11-08 12:43:51 -0800 | [diff] [blame] | 1484 | assert(tile_rows <= 4); |
| 1485 | assert(tile_cols <= (1 << 6)); |
Dmitry Kovalev | 72f9f10 | 2013-03-27 16:23:12 -0700 | [diff] [blame] | 1486 | |
Ronald S. Bultje | 2dbaa4f | 2013-04-29 10:37:25 -0700 | [diff] [blame] | 1487 | // Note: this memset assumes above_context[0], [1] and [2] |
| 1488 | // are allocated as part of the same buffer. |
James Zern | f58011a | 2015-04-23 20:47:40 -0700 | [diff] [blame] | 1489 | memset(cm->above_context, 0, |
| 1490 | sizeof(*cm->above_context) * MAX_MB_PLANE * 2 * aligned_cols); |
Jingning Han | ff2b8aa | 2013-04-23 10:12:18 -0700 | [diff] [blame] | 1491 | |
James Zern | f58011a | 2015-04-23 20:47:40 -0700 | [diff] [blame] | 1492 | memset(cm->above_seg_context, 0, |
| 1493 | sizeof(*cm->above_seg_context) * aligned_cols); |
Dmitry Kovalev | 72f9f10 | 2013-03-27 16:23:12 -0700 | [diff] [blame] | 1494 | |
Scott LaVarnway | 7718117 | 2015-09-29 05:20:49 -0700 | [diff] [blame] | 1495 | vp9_reset_lfm(cm); |
| 1496 | |
Dmitry Kovalev | ccfb4b9 | 2014-05-12 12:49:29 -0700 | [diff] [blame] | 1497 | get_tile_buffers(pbi, data, data_end, tile_cols, tile_rows, tile_buffers); |
Dmitry Kovalev | d6e74e0 | 2013-07-19 12:27:56 -0700 | [diff] [blame] | 1498 | |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1499 | if (pbi->tile_data == NULL || |
| 1500 | (tile_cols * tile_rows) != pbi->total_tiles) { |
| 1501 | vpx_free(pbi->tile_data); |
| 1502 | CHECK_MEM_ERROR( |
| 1503 | cm, |
| 1504 | pbi->tile_data, |
hkuang | b9e1e99 | 2014-05-21 10:08:06 -0700 | [diff] [blame] | 1505 | vpx_memalign(32, tile_cols * tile_rows * (sizeof(*pbi->tile_data)))); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1506 | pbi->total_tiles = tile_rows * tile_cols; |
| 1507 | } |
| 1508 | |
| 1509 | // Load all tile information into tile_data. |
Dmitry Kovalev | 672ba3d | 2013-11-06 18:15:33 -0800 | [diff] [blame] | 1510 | for (tile_row = 0; tile_row < tile_rows; ++tile_row) { |
| 1511 | for (tile_col = 0; tile_col < tile_cols; ++tile_col) { |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1512 | const TileBuffer *const buf = &tile_buffers[tile_row][tile_col]; |
| 1513 | tile_data = pbi->tile_data + tile_cols * tile_row + tile_col; |
| 1514 | tile_data->cm = cm; |
| 1515 | tile_data->xd = pbi->mb; |
| 1516 | tile_data->xd.corrupted = 0; |
Scott LaVarnway | f779dba | 2015-06-02 09:06:00 -0700 | [diff] [blame] | 1517 | tile_data->xd.counts = cm->frame_parallel_decoding_mode ? |
| 1518 | NULL : &cm->counts; |
Alex Converse | 89090d8 | 2015-07-07 21:19:21 -0700 | [diff] [blame] | 1519 | vp9_zero(tile_data->dqcoeff); |
Scott LaVarnway | 86f4a3d | 2015-06-16 06:38:34 -0700 | [diff] [blame] | 1520 | vp9_tile_init(&tile_data->xd.tile, tile_data->cm, tile_row, tile_col); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1521 | setup_token_decoder(buf->data, data_end, buf->size, &cm->error, |
| 1522 | &tile_data->bit_reader, pbi->decrypt_cb, |
| 1523 | pbi->decrypt_state); |
Alex Converse | 89090d8 | 2015-07-07 21:19:21 -0700 | [diff] [blame] | 1524 | vp9_init_macroblockd(cm, &tile_data->xd, tile_data->dqcoeff); |
Dmitry Kovalev | 672ba3d | 2013-11-06 18:15:33 -0800 | [diff] [blame] | 1525 | } |
| 1526 | } |
| 1527 | |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1528 | for (tile_row = 0; tile_row < tile_rows; ++tile_row) { |
| 1529 | TileInfo tile; |
| 1530 | vp9_tile_set_row(&tile, cm, tile_row); |
| 1531 | for (mi_row = tile.mi_row_start; mi_row < tile.mi_row_end; |
| 1532 | mi_row += MI_BLOCK_SIZE) { |
| 1533 | for (tile_col = 0; tile_col < tile_cols; ++tile_col) { |
| 1534 | const int col = pbi->inv_tile_order ? |
| 1535 | tile_cols - tile_col - 1 : tile_col; |
| 1536 | tile_data = pbi->tile_data + tile_cols * tile_row + col; |
| 1537 | vp9_tile_set_col(&tile, tile_data->cm, col); |
| 1538 | vp9_zero(tile_data->xd.left_context); |
| 1539 | vp9_zero(tile_data->xd.left_seg_context); |
| 1540 | for (mi_col = tile.mi_col_start; mi_col < tile.mi_col_end; |
| 1541 | mi_col += MI_BLOCK_SIZE) { |
Scott LaVarnway | 13a4f14 | 2015-07-09 05:30:46 -0700 | [diff] [blame] | 1542 | decode_partition(pbi, &tile_data->xd, mi_row, |
| 1543 | mi_col, &tile_data->bit_reader, BLOCK_64X64, 4); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1544 | } |
hkuang | c147cf3 | 2014-07-01 16:04:53 -0700 | [diff] [blame] | 1545 | pbi->mb.corrupted |= tile_data->xd.corrupted; |
hkuang | dde8195 | 2014-12-04 15:06:31 -0800 | [diff] [blame] | 1546 | if (pbi->mb.corrupted) |
| 1547 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 1548 | "Failed to decode tile data"); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1549 | } |
| 1550 | // Loopfilter one row. |
Frank Galligan | bfb6d48 | 2015-05-21 11:49:11 -0700 | [diff] [blame] | 1551 | if (cm->lf.filter_level && !cm->skip_loop_filter) { |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1552 | const int lf_start = mi_row - MI_BLOCK_SIZE; |
| 1553 | LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1; |
| 1554 | |
| 1555 | // delay the loopfilter by 1 macroblock row. |
| 1556 | if (lf_start < 0) continue; |
| 1557 | |
| 1558 | // decoding has completed: finish up the loop filter in this thread. |
| 1559 | if (mi_row + MI_BLOCK_SIZE >= cm->mi_rows) continue; |
| 1560 | |
James Zern | e656f44 | 2014-06-19 21:14:51 -0700 | [diff] [blame] | 1561 | winterface->sync(&pbi->lf_worker); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1562 | lf_data->start = lf_start; |
| 1563 | lf_data->stop = mi_row; |
| 1564 | if (pbi->max_threads > 1) { |
James Zern | e656f44 | 2014-06-19 21:14:51 -0700 | [diff] [blame] | 1565 | winterface->launch(&pbi->lf_worker); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1566 | } else { |
James Zern | e656f44 | 2014-06-19 21:14:51 -0700 | [diff] [blame] | 1567 | winterface->execute(&pbi->lf_worker); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1568 | } |
| 1569 | } |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1570 | // After loopfiltering, the last 7 row pixels in each superblock row may |
| 1571 | // still be changed by the longest loopfilter of the next superblock |
| 1572 | // row. |
| 1573 | if (pbi->frame_parallel_decode) |
| 1574 | vp9_frameworker_broadcast(pbi->cur_buf, |
| 1575 | mi_row << MI_BLOCK_SIZE_LOG2); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1576 | } |
| 1577 | } |
| 1578 | |
| 1579 | // Loopfilter remaining rows in the frame. |
Frank Galligan | bfb6d48 | 2015-05-21 11:49:11 -0700 | [diff] [blame] | 1580 | if (cm->lf.filter_level && !cm->skip_loop_filter) { |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1581 | LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1; |
James Zern | e656f44 | 2014-06-19 21:14:51 -0700 | [diff] [blame] | 1582 | winterface->sync(&pbi->lf_worker); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1583 | lf_data->start = lf_data->stop; |
| 1584 | lf_data->stop = cm->mi_rows; |
James Zern | e656f44 | 2014-06-19 21:14:51 -0700 | [diff] [blame] | 1585 | winterface->execute(&pbi->lf_worker); |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 1586 | } |
| 1587 | |
| 1588 | // Get last tile data. |
| 1589 | tile_data = pbi->tile_data + tile_cols * tile_rows - 1; |
| 1590 | |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1591 | if (pbi->frame_parallel_decode) |
| 1592 | vp9_frameworker_broadcast(pbi->cur_buf, INT_MAX); |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1593 | return vpx_reader_find_end(&tile_data->bit_reader); |
Dmitry Kovalev | 72f9f10 | 2013-03-27 16:23:12 -0700 | [diff] [blame] | 1594 | } |
| 1595 | |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1596 | // On entry 'tile_data->data_end' points to the end of the input frame, on exit |
| 1597 | // it is updated to reflect the bitreader position of the final tile column if |
| 1598 | // present in the tile buffer group or NULL otherwise. |
James Zern | 0bd82af | 2015-09-30 23:02:31 -0700 | [diff] [blame] | 1599 | static int tile_worker_hook(TileWorkerData *const tile_data, |
| 1600 | VP9Decoder *const pbi) { |
James Zern | ba7ea44 | 2015-10-10 11:16:20 -0700 | [diff] [blame] | 1601 | TileInfo *volatile tile = &tile_data->xd.tile; |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1602 | const int final_col = (1 << pbi->common.log2_tile_cols) - 1; |
| 1603 | const uint8_t *volatile bit_reader_end = NULL; |
| 1604 | volatile int n = tile_data->buf_start; |
| 1605 | tile_data->error_info.setjmp = 1; |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1606 | |
James Zern | 953dd18 | 2014-12-17 12:00:05 -0800 | [diff] [blame] | 1607 | if (setjmp(tile_data->error_info.jmp)) { |
| 1608 | tile_data->error_info.setjmp = 0; |
| 1609 | tile_data->xd.corrupted = 1; |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1610 | tile_data->data_end = NULL; |
James Zern | 953dd18 | 2014-12-17 12:00:05 -0800 | [diff] [blame] | 1611 | return 0; |
| 1612 | } |
| 1613 | |
James Zern | 953dd18 | 2014-12-17 12:00:05 -0800 | [diff] [blame] | 1614 | tile_data->xd.error_info = &tile_data->error_info; |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1615 | tile_data->xd.corrupted = 0; |
James Zern | 953dd18 | 2014-12-17 12:00:05 -0800 | [diff] [blame] | 1616 | |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1617 | do { |
| 1618 | int mi_row, mi_col; |
| 1619 | const TileBuffer *const buf = pbi->tile_buffers + n; |
| 1620 | vp9_zero(tile_data->dqcoeff); |
| 1621 | vp9_tile_init(tile, &pbi->common, 0, buf->col); |
| 1622 | setup_token_decoder(buf->data, tile_data->data_end, buf->size, |
| 1623 | &tile_data->error_info, &tile_data->bit_reader, |
| 1624 | pbi->decrypt_cb, pbi->decrypt_state); |
| 1625 | vp9_init_macroblockd(&pbi->common, &tile_data->xd, tile_data->dqcoeff); |
| 1626 | |
| 1627 | for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end; |
| 1628 | mi_row += MI_BLOCK_SIZE) { |
| 1629 | vp9_zero(tile_data->xd.left_context); |
| 1630 | vp9_zero(tile_data->xd.left_seg_context); |
| 1631 | for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end; |
| 1632 | mi_col += MI_BLOCK_SIZE) { |
James Zern | 0bd82af | 2015-09-30 23:02:31 -0700 | [diff] [blame] | 1633 | decode_partition(pbi, &tile_data->xd, mi_row, mi_col, |
| 1634 | &tile_data->bit_reader, BLOCK_64X64, 4); |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1635 | } |
Deb Mukherjee | 3a833ea | 2013-11-04 16:34:18 -0800 | [diff] [blame] | 1636 | } |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1637 | |
| 1638 | if (buf->col == final_col) { |
| 1639 | bit_reader_end = vpx_reader_find_end(&tile_data->bit_reader); |
| 1640 | } |
| 1641 | } while (!tile_data->xd.corrupted && ++n <= tile_data->buf_end); |
| 1642 | |
| 1643 | tile_data->data_end = bit_reader_end; |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1644 | return !tile_data->xd.corrupted; |
| 1645 | } |
| 1646 | |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1647 | // sorts in descending order |
| 1648 | static int compare_tile_buffers(const void *a, const void *b) { |
| 1649 | const TileBuffer *const buf1 = (const TileBuffer*)a; |
| 1650 | const TileBuffer *const buf2 = (const TileBuffer*)b; |
James Zern | 9e81112 | 2015-04-21 22:48:24 -0700 | [diff] [blame] | 1651 | return (int)(buf2->size - buf1->size); |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1652 | } |
| 1653 | |
Dmitry Kovalev | 56c2f41 | 2014-04-08 11:41:12 -0700 | [diff] [blame] | 1654 | static const uint8_t *decode_tiles_mt(VP9Decoder *pbi, |
Dmitry Kovalev | 88a10ab | 2014-03-20 15:01:37 -0700 | [diff] [blame] | 1655 | const uint8_t *data, |
| 1656 | const uint8_t *data_end) { |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1657 | VP9_COMMON *const cm = &pbi->common; |
Jingning Han | 04d2e57 | 2015-07-01 16:32:48 -0700 | [diff] [blame] | 1658 | const VPxWorkerInterface *const winterface = vpx_get_worker_interface(); |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1659 | const uint8_t *bit_reader_end = NULL; |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1660 | const int aligned_mi_cols = mi_cols_aligned_to_sb(cm->mi_cols); |
| 1661 | const int tile_cols = 1 << cm->log2_tile_cols; |
| 1662 | const int tile_rows = 1 << cm->log2_tile_rows; |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1663 | const int num_workers = VPXMIN(pbi->max_threads, tile_cols); |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1664 | int n; |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1665 | |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1666 | assert(tile_cols <= (1 << 6)); |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1667 | assert(tile_rows == 1); |
| 1668 | (void)tile_rows; |
| 1669 | |
Frank Galligan | 6ae5893 | 2014-04-06 20:07:14 -0700 | [diff] [blame] | 1670 | if (pbi->num_tile_workers == 0) { |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1671 | const int num_threads = pbi->max_threads; |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1672 | CHECK_MEM_ERROR(cm, pbi->tile_workers, |
Frank Galligan | 6ae5893 | 2014-04-06 20:07:14 -0700 | [diff] [blame] | 1673 | vpx_malloc(num_threads * sizeof(*pbi->tile_workers))); |
James Zern | ff3ae42 | 2014-08-30 20:15:37 -0700 | [diff] [blame] | 1674 | // Ensure tile data offsets will be properly aligned. This may fail on |
| 1675 | // platforms without DECLARE_ALIGNED(). |
| 1676 | assert((sizeof(*pbi->tile_worker_data) % 16) == 0); |
| 1677 | CHECK_MEM_ERROR(cm, pbi->tile_worker_data, |
| 1678 | vpx_memalign(32, num_threads * |
| 1679 | sizeof(*pbi->tile_worker_data))); |
James Zern | 50b20b9 | 2015-10-07 17:45:45 -0700 | [diff] [blame] | 1680 | for (n = 0; n < num_threads; ++n) { |
| 1681 | VPxWorker *const worker = &pbi->tile_workers[n]; |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1682 | ++pbi->num_tile_workers; |
| 1683 | |
James Zern | e656f44 | 2014-06-19 21:14:51 -0700 | [diff] [blame] | 1684 | winterface->init(worker); |
James Zern | 50b20b9 | 2015-10-07 17:45:45 -0700 | [diff] [blame] | 1685 | if (n < num_threads - 1 && !winterface->reset(worker)) { |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1686 | vpx_internal_error(&cm->error, VPX_CODEC_ERROR, |
| 1687 | "Tile decoder thread creation failed"); |
| 1688 | } |
| 1689 | } |
| 1690 | } |
| 1691 | |
Yunqing Wang | 903801f | 2013-12-27 15:25:54 -0800 | [diff] [blame] | 1692 | // Reset tile decoding hook |
Frank Galligan | 6ae5893 | 2014-04-06 20:07:14 -0700 | [diff] [blame] | 1693 | for (n = 0; n < num_workers; ++n) { |
Jingning Han | 04d2e57 | 2015-07-01 16:32:48 -0700 | [diff] [blame] | 1694 | VPxWorker *const worker = &pbi->tile_workers[n]; |
James Zern | 2d06b08 | 2015-08-19 19:46:19 -0700 | [diff] [blame] | 1695 | TileWorkerData *const tile_data = &pbi->tile_worker_data[n]; |
James Zern | ff3ae42 | 2014-08-30 20:15:37 -0700 | [diff] [blame] | 1696 | winterface->sync(worker); |
James Zern | 2d06b08 | 2015-08-19 19:46:19 -0700 | [diff] [blame] | 1697 | tile_data->xd = pbi->mb; |
| 1698 | tile_data->xd.counts = |
| 1699 | cm->frame_parallel_decoding_mode ? NULL : &tile_data->counts; |
Jingning Han | 04d2e57 | 2015-07-01 16:32:48 -0700 | [diff] [blame] | 1700 | worker->hook = (VPxWorkerHook)tile_worker_hook; |
James Zern | 2d06b08 | 2015-08-19 19:46:19 -0700 | [diff] [blame] | 1701 | worker->data1 = tile_data; |
James Zern | 0bd82af | 2015-09-30 23:02:31 -0700 | [diff] [blame] | 1702 | worker->data2 = pbi; |
Yunqing Wang | 903801f | 2013-12-27 15:25:54 -0800 | [diff] [blame] | 1703 | } |
| 1704 | |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1705 | // Note: this memset assumes above_context[0], [1] and [2] |
| 1706 | // are allocated as part of the same buffer. |
James Zern | f58011a | 2015-04-23 20:47:40 -0700 | [diff] [blame] | 1707 | memset(cm->above_context, 0, |
| 1708 | sizeof(*cm->above_context) * MAX_MB_PLANE * 2 * aligned_mi_cols); |
| 1709 | memset(cm->above_seg_context, 0, |
| 1710 | sizeof(*cm->above_seg_context) * aligned_mi_cols); |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1711 | |
Scott LaVarnway | 7718117 | 2015-09-29 05:20:49 -0700 | [diff] [blame] | 1712 | vp9_reset_lfm(cm); |
| 1713 | |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1714 | // Load tile data into tile_buffers |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1715 | get_tile_buffers(pbi, data, data_end, tile_cols, tile_rows, |
| 1716 | &pbi->tile_buffers); |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1717 | |
| 1718 | // Sort the buffers based on size in descending order. |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1719 | qsort(pbi->tile_buffers, tile_cols, sizeof(pbi->tile_buffers[0]), |
Dmitry Kovalev | ccfb4b9 | 2014-05-12 12:49:29 -0700 | [diff] [blame] | 1720 | compare_tile_buffers); |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1721 | |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1722 | if (num_workers == tile_cols) { |
| 1723 | // Rearrange the tile buffers such that the largest, and |
| 1724 | // presumably the most difficult, tile will be decoded in the main thread. |
| 1725 | // This should help minimize the number of instances where the main thread |
| 1726 | // is waiting for a worker to complete. |
| 1727 | const TileBuffer largest = pbi->tile_buffers[0]; |
| 1728 | memmove(pbi->tile_buffers, pbi->tile_buffers + 1, |
| 1729 | (tile_cols - 1) * sizeof(pbi->tile_buffers[0])); |
| 1730 | pbi->tile_buffers[tile_cols - 1] = largest; |
| 1731 | } else { |
| 1732 | int start = 0, end = tile_cols - 2; |
| 1733 | TileBuffer tmp; |
| 1734 | |
| 1735 | // Interleave the tiles to distribute the load between threads, assuming a |
| 1736 | // larger tile implies it is more difficult to decode. |
| 1737 | while (start < end) { |
| 1738 | tmp = pbi->tile_buffers[start]; |
| 1739 | pbi->tile_buffers[start] = pbi->tile_buffers[end]; |
| 1740 | pbi->tile_buffers[end] = tmp; |
| 1741 | start += 2; |
| 1742 | end -= 2; |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1743 | } |
| 1744 | } |
| 1745 | |
Yunqing Wang | b3b7645 | 2015-01-30 17:00:54 -0800 | [diff] [blame] | 1746 | // Initialize thread frame counts. |
| 1747 | if (!cm->frame_parallel_decoding_mode) { |
James Zern | 50b20b9 | 2015-10-07 17:45:45 -0700 | [diff] [blame] | 1748 | for (n = 0; n < num_workers; ++n) { |
Yunqing Wang | b3b7645 | 2015-01-30 17:00:54 -0800 | [diff] [blame] | 1749 | TileWorkerData *const tile_data = |
James Zern | 50b20b9 | 2015-10-07 17:45:45 -0700 | [diff] [blame] | 1750 | (TileWorkerData*)pbi->tile_workers[n].data1; |
Yunqing Wang | b3b7645 | 2015-01-30 17:00:54 -0800 | [diff] [blame] | 1751 | vp9_zero(tile_data->counts); |
| 1752 | } |
| 1753 | } |
| 1754 | |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1755 | { |
| 1756 | const int base = tile_cols / num_workers; |
| 1757 | const int remain = tile_cols % num_workers; |
| 1758 | int buf_start = 0; |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1759 | |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1760 | for (n = 0; n < num_workers; ++n) { |
| 1761 | const int count = base + (remain + n) / num_workers; |
| 1762 | VPxWorker *const worker = &pbi->tile_workers[n]; |
| 1763 | TileWorkerData *const tile_data = (TileWorkerData*)worker->data1; |
| 1764 | |
| 1765 | tile_data->buf_start = buf_start; |
| 1766 | tile_data->buf_end = buf_start + count - 1; |
| 1767 | tile_data->data_end = data_end; |
| 1768 | buf_start += count; |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1769 | |
| 1770 | worker->had_error = 0; |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1771 | if (n == num_workers - 1) { |
| 1772 | assert(tile_data->buf_end == tile_cols - 1); |
James Zern | e656f44 | 2014-06-19 21:14:51 -0700 | [diff] [blame] | 1773 | winterface->execute(worker); |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1774 | } else { |
James Zern | e656f44 | 2014-06-19 21:14:51 -0700 | [diff] [blame] | 1775 | winterface->launch(worker); |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1776 | } |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1777 | } |
| 1778 | |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1779 | for (; n > 0; --n) { |
| 1780 | VPxWorker *const worker = &pbi->tile_workers[n - 1]; |
| 1781 | TileWorkerData *const tile_data = (TileWorkerData*)worker->data1; |
James Zern | 953dd18 | 2014-12-17 12:00:05 -0800 | [diff] [blame] | 1782 | // TODO(jzern): The tile may have specific error data associated with |
| 1783 | // its vpx_internal_error_info which could be propagated to the main info |
| 1784 | // in cm. Additionally once the threads have been synced and an error is |
| 1785 | // detected, there's no point in continuing to decode tiles. |
James Zern | e656f44 | 2014-06-19 21:14:51 -0700 | [diff] [blame] | 1786 | pbi->mb.corrupted |= !winterface->sync(worker); |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1787 | if (!bit_reader_end) bit_reader_end = tile_data->data_end; |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1788 | } |
James Zern | ad0ac04 | 2015-09-04 20:24:29 -0700 | [diff] [blame] | 1789 | } |
Yunqing Wang | b3b7645 | 2015-01-30 17:00:54 -0800 | [diff] [blame] | 1790 | |
James Zern | ad0ac04 | 2015-09-04 20:24:29 -0700 | [diff] [blame] | 1791 | // Accumulate thread frame counts. |
| 1792 | if (!cm->frame_parallel_decoding_mode) { |
James Zern | 50b20b9 | 2015-10-07 17:45:45 -0700 | [diff] [blame] | 1793 | for (n = 0; n < num_workers; ++n) { |
James Zern | ad0ac04 | 2015-09-04 20:24:29 -0700 | [diff] [blame] | 1794 | TileWorkerData *const tile_data = |
James Zern | 50b20b9 | 2015-10-07 17:45:45 -0700 | [diff] [blame] | 1795 | (TileWorkerData*)pbi->tile_workers[n].data1; |
James Zern | ad0ac04 | 2015-09-04 20:24:29 -0700 | [diff] [blame] | 1796 | vp9_accumulate_frame_counts(&cm->counts, &tile_data->counts, 1); |
Yunqing Wang | b3b7645 | 2015-01-30 17:00:54 -0800 | [diff] [blame] | 1797 | } |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1798 | } |
| 1799 | |
James Zern | 1f4a6c8 | 2015-09-25 20:43:04 -0700 | [diff] [blame] | 1800 | assert(bit_reader_end || pbi->mb.corrupted); |
James Zern | 345fbfe | 2013-12-05 15:42:47 -0800 | [diff] [blame] | 1801 | return bit_reader_end; |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 1802 | } |
| 1803 | |
Yaowu Xu | 501fce7 | 2014-03-01 20:31:47 -0800 | [diff] [blame] | 1804 | static void error_handler(void *data) { |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 1805 | VP9_COMMON *const cm = (VP9_COMMON *)data; |
| 1806 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, "Truncated packet"); |
| 1807 | } |
John Koleszar | c6b9039 | 2012-07-13 15:21:29 -0700 | [diff] [blame] | 1808 | |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1809 | static void read_bitdepth_colorspace_sampling( |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1810 | VP9_COMMON *cm, struct vpx_read_bit_buffer *rb) { |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 1811 | if (cm->profile >= PROFILE_2) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1812 | cm->bit_depth = vpx_rb_read_bit(rb) ? VPX_BITS_12 : VPX_BITS_10; |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 1813 | #if CONFIG_VP9_HIGHBITDEPTH |
| 1814 | cm->use_highbitdepth = 1; |
| 1815 | #endif |
| 1816 | } else { |
| 1817 | cm->bit_depth = VPX_BITS_8; |
| 1818 | #if CONFIG_VP9_HIGHBITDEPTH |
| 1819 | cm->use_highbitdepth = 0; |
| 1820 | #endif |
| 1821 | } |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1822 | cm->color_space = vpx_rb_read_literal(rb, 3); |
Yaowu Xu | e94b415 | 2015-01-13 10:07:20 -0800 | [diff] [blame] | 1823 | if (cm->color_space != VPX_CS_SRGB) { |
Yaowu Xu | 5684295 | 2015-10-16 16:25:08 -0700 | [diff] [blame] | 1824 | cm->color_range = (vpx_color_range_t)vpx_rb_read_bit(rb); |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1825 | if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1826 | cm->subsampling_x = vpx_rb_read_bit(rb); |
| 1827 | cm->subsampling_y = vpx_rb_read_bit(rb); |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1828 | if (cm->subsampling_x == 1 && cm->subsampling_y == 1) |
| 1829 | vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1830 | "4:2:0 color not supported in profile 1 or 3"); |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1831 | if (vpx_rb_read_bit(rb)) |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1832 | vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1833 | "Reserved bit set"); |
| 1834 | } else { |
| 1835 | cm->subsampling_y = cm->subsampling_x = 1; |
| 1836 | } |
| 1837 | } else { |
Yaowu Xu | 5684295 | 2015-10-16 16:25:08 -0700 | [diff] [blame] | 1838 | cm->color_range = VPX_CR_FULL_RANGE; |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1839 | if (cm->profile == PROFILE_1 || cm->profile == PROFILE_3) { |
| 1840 | // Note if colorspace is SRGB then 4:4:4 chroma sampling is assumed. |
| 1841 | // 4:2:2 or 4:4:0 chroma sampling is not allowed. |
| 1842 | cm->subsampling_y = cm->subsampling_x = 0; |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1843 | if (vpx_rb_read_bit(rb)) |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1844 | vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1845 | "Reserved bit set"); |
| 1846 | } else { |
| 1847 | vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1848 | "4:4:4 color not supported in profile 0 or 2"); |
| 1849 | } |
| 1850 | } |
| 1851 | } |
| 1852 | |
Dmitry Kovalev | 56c2f41 | 2014-04-08 11:41:12 -0700 | [diff] [blame] | 1853 | static size_t read_uncompressed_header(VP9Decoder *pbi, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 1854 | struct vpx_read_bit_buffer *rb) { |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 1855 | VP9_COMMON *const cm = &pbi->common; |
Jingning Han | 2b2b461 | 2015-05-11 10:05:58 -0700 | [diff] [blame] | 1856 | BufferPool *const pool = cm->buffer_pool; |
| 1857 | RefCntBuffer *const frame_bufs = pool->frame_bufs; |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1858 | int i, mask, ref_index = 0; |
Yaowu Xu | 062fb50 | 2013-10-18 10:32:56 -0700 | [diff] [blame] | 1859 | size_t sz; |
John Koleszar | c6b9039 | 2012-07-13 15:21:29 -0700 | [diff] [blame] | 1860 | |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 1861 | cm->last_frame_type = cm->frame_type; |
Scott LaVarnway | c9976b3 | 2015-06-08 03:38:13 -0700 | [diff] [blame] | 1862 | cm->last_intra_only = cm->intra_only; |
Dmitry Kovalev | 514b8ad | 2013-06-07 13:41:44 -0700 | [diff] [blame] | 1863 | |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1864 | if (vpx_rb_read_literal(rb, 2) != VP9_FRAME_MARKER) |
John Koleszar | e5b956f | 2013-06-06 23:53:56 -0700 | [diff] [blame] | 1865 | vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1866 | "Invalid frame marker"); |
| 1867 | |
Deb Mukherjee | c447a50 | 2014-07-15 01:54:29 -0700 | [diff] [blame] | 1868 | cm->profile = vp9_read_profile(rb); |
Yaowu Xu | 7c0c62d | 2015-07-16 15:40:22 -0700 | [diff] [blame] | 1869 | #if CONFIG_VP9_HIGHBITDEPTH |
Deb Mukherjee | d35df2d | 2014-04-04 17:30:16 -0700 | [diff] [blame] | 1870 | if (cm->profile >= MAX_PROFILES) |
| 1871 | vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1872 | "Unsupported bitstream profile"); |
Yaowu Xu | 7c0c62d | 2015-07-16 15:40:22 -0700 | [diff] [blame] | 1873 | #else |
| 1874 | if (cm->profile >= PROFILE_2) |
| 1875 | vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1876 | "Unsupported bitstream profile"); |
| 1877 | #endif |
John Koleszar | e5b956f | 2013-06-06 23:53:56 -0700 | [diff] [blame] | 1878 | |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1879 | cm->show_existing_frame = vpx_rb_read_bit(rb); |
Tero Rintaluoma | 047b0b0 | 2013-12-05 11:42:47 +0200 | [diff] [blame] | 1880 | if (cm->show_existing_frame) { |
Adrian Grange | 3246692 | 2014-01-21 10:39:39 -0800 | [diff] [blame] | 1881 | // Show an existing frame directly. |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1882 | const int frame_to_show = cm->ref_frame_map[vpx_rb_read_literal(rb, 3)]; |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1883 | lock_buffer_pool(pool); |
| 1884 | if (frame_to_show < 0 || frame_bufs[frame_to_show].ref_count < 1) { |
| 1885 | unlock_buffer_pool(pool); |
Adrian Grange | 709feca | 2014-02-24 10:15:21 -0800 | [diff] [blame] | 1886 | vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1887 | "Buffer %d does not contain a decoded frame", |
| 1888 | frame_to_show); |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1889 | } |
Adrian Grange | 709feca | 2014-02-24 10:15:21 -0800 | [diff] [blame] | 1890 | |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1891 | ref_cnt_fb(frame_bufs, &cm->new_fb_idx, frame_to_show); |
| 1892 | unlock_buffer_pool(pool); |
John Koleszar | 2584a5e | 2012-10-03 12:11:05 -0700 | [diff] [blame] | 1893 | pbi->refresh_frame_flags = 0; |
Dmitry Kovalev | 816d6c9 | 2013-08-09 14:41:51 -0700 | [diff] [blame] | 1894 | cm->lf.filter_level = 0; |
Adrian Grange | 3246692 | 2014-01-21 10:39:39 -0800 | [diff] [blame] | 1895 | cm->show_frame = 1; |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1896 | |
| 1897 | if (pbi->frame_parallel_decode) { |
| 1898 | for (i = 0; i < REF_FRAMES; ++i) |
| 1899 | cm->next_ref_frame_map[i] = cm->ref_frame_map[i]; |
| 1900 | } |
John Koleszar | 2584a5e | 2012-10-03 12:11:05 -0700 | [diff] [blame] | 1901 | return 0; |
| 1902 | } |
Dmitry Kovalev | 514b8ad | 2013-06-07 13:41:44 -0700 | [diff] [blame] | 1903 | |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1904 | cm->frame_type = (FRAME_TYPE) vpx_rb_read_bit(rb); |
| 1905 | cm->show_frame = vpx_rb_read_bit(rb); |
| 1906 | cm->error_resilient_mode = vpx_rb_read_bit(rb); |
John Koleszar | c6b9039 | 2012-07-13 15:21:29 -0700 | [diff] [blame] | 1907 | |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 1908 | if (cm->frame_type == KEY_FRAME) { |
Adrian Grange | 7c43fb6 | 2014-06-09 15:22:17 -0700 | [diff] [blame] | 1909 | if (!vp9_read_sync_code(rb)) |
| 1910 | vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1911 | "Invalid frame sync code"); |
Adrian Grange | 9e5bb95 | 2013-05-29 17:16:00 -0700 | [diff] [blame] | 1912 | |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1913 | read_bitdepth_colorspace_sampling(cm, rb); |
Dmitry Kovalev | 0d4b8d7 | 2013-12-05 16:23:09 -0800 | [diff] [blame] | 1914 | pbi->refresh_frame_flags = (1 << REF_FRAMES) - 1; |
Dmitry Kovalev | 12345cb | 2013-06-05 20:56:37 -0700 | [diff] [blame] | 1915 | |
Dmitry Kovalev | ba41e9d | 2013-12-27 18:44:19 -0800 | [diff] [blame] | 1916 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
Adrian Grange | 3807dd8 | 2015-02-18 09:40:34 -0800 | [diff] [blame] | 1917 | cm->frame_refs[i].idx = INVALID_IDX; |
Adrian Grange | f68aaa3 | 2014-07-10 15:35:51 -0700 | [diff] [blame] | 1918 | cm->frame_refs[i].buf = NULL; |
Dmitry Kovalev | ba41e9d | 2013-12-27 18:44:19 -0800 | [diff] [blame] | 1919 | } |
Dmitry Kovalev | 514b8ad | 2013-06-07 13:41:44 -0700 | [diff] [blame] | 1920 | |
Dmitry Kovalev | 9366624 | 2014-03-25 14:21:26 -0700 | [diff] [blame] | 1921 | setup_frame_size(cm, rb); |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1922 | if (pbi->need_resync) { |
James Zern | f58011a | 2015-04-23 20:47:40 -0700 | [diff] [blame] | 1923 | memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map)); |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1924 | pbi->need_resync = 0; |
| 1925 | } |
Dmitry Kovalev | 12345cb | 2013-06-05 20:56:37 -0700 | [diff] [blame] | 1926 | } else { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1927 | cm->intra_only = cm->show_frame ? 0 : vpx_rb_read_bit(rb); |
Adrian Grange | 07a5777 | 2013-06-07 15:55:15 -0700 | [diff] [blame] | 1928 | |
Adrian Grange | eac344e | 2013-06-09 10:10:33 -0700 | [diff] [blame] | 1929 | cm->reset_frame_context = cm->error_resilient_mode ? |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1930 | 0 : vpx_rb_read_literal(rb, 2); |
Dmitry Kovalev | 12345cb | 2013-06-05 20:56:37 -0700 | [diff] [blame] | 1931 | |
Adrian Grange | 07a5777 | 2013-06-07 15:55:15 -0700 | [diff] [blame] | 1932 | if (cm->intra_only) { |
Adrian Grange | 7c43fb6 | 2014-06-09 15:22:17 -0700 | [diff] [blame] | 1933 | if (!vp9_read_sync_code(rb)) |
| 1934 | vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM, |
| 1935 | "Invalid frame sync code"); |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1936 | if (cm->profile > PROFILE_0) { |
| 1937 | read_bitdepth_colorspace_sampling(cm, rb); |
| 1938 | } else { |
| 1939 | // NOTE: The intra-only frame header does not include the specification |
| 1940 | // of either the color format or color sub-sampling in profile 0. VP9 |
Yaowu Xu | ecbca31 | 2015-01-09 10:36:43 -0800 | [diff] [blame] | 1941 | // specifies that the default color format should be YUV 4:2:0 in this |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1942 | // case (normative). |
Yaowu Xu | e94b415 | 2015-01-13 10:07:20 -0800 | [diff] [blame] | 1943 | cm->color_space = VPX_CS_BT_601; |
Yaowu Xu | 5684295 | 2015-10-16 16:25:08 -0700 | [diff] [blame] | 1944 | cm->color_range = VPX_CR_STUDIO_RANGE; |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1945 | cm->subsampling_y = cm->subsampling_x = 1; |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 1946 | cm->bit_depth = VPX_BITS_8; |
| 1947 | #if CONFIG_VP9_HIGHBITDEPTH |
| 1948 | cm->use_highbitdepth = 0; |
| 1949 | #endif |
Deb Mukherjee | 09bf1d6 | 2014-08-05 13:00:43 -0700 | [diff] [blame] | 1950 | } |
Adrian Grange | eac344e | 2013-06-09 10:10:33 -0700 | [diff] [blame] | 1951 | |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1952 | pbi->refresh_frame_flags = vpx_rb_read_literal(rb, REF_FRAMES); |
Dmitry Kovalev | 9366624 | 2014-03-25 14:21:26 -0700 | [diff] [blame] | 1953 | setup_frame_size(cm, rb); |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1954 | if (pbi->need_resync) { |
James Zern | f58011a | 2015-04-23 20:47:40 -0700 | [diff] [blame] | 1955 | memset(&cm->ref_frame_map, -1, sizeof(cm->ref_frame_map)); |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1956 | pbi->need_resync = 0; |
| 1957 | } |
| 1958 | } else if (pbi->need_resync != 1) { /* Skip if need resync */ |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1959 | pbi->refresh_frame_flags = vpx_rb_read_literal(rb, REF_FRAMES); |
Dmitry Kovalev | 0d4b8d7 | 2013-12-05 16:23:09 -0800 | [diff] [blame] | 1960 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1961 | const int ref = vpx_rb_read_literal(rb, REF_FRAMES_LOG2); |
Dmitry Kovalev | ba41e9d | 2013-12-27 18:44:19 -0800 | [diff] [blame] | 1962 | const int idx = cm->ref_frame_map[ref]; |
Jim Bankoski | 9f37d14 | 2014-06-27 10:03:15 -0700 | [diff] [blame] | 1963 | RefBuffer *const ref_frame = &cm->frame_refs[i]; |
| 1964 | ref_frame->idx = idx; |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 1965 | ref_frame->buf = &frame_bufs[idx].buf; |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1966 | cm->ref_frame_sign_bias[LAST_FRAME + i] = vpx_rb_read_bit(rb); |
Ronald S. Bultje | 6ef805e | 2013-06-06 13:44:34 -0700 | [diff] [blame] | 1967 | } |
Dmitry Kovalev | 12345cb | 2013-06-05 20:56:37 -0700 | [diff] [blame] | 1968 | |
Dmitry Kovalev | 9366624 | 2014-03-25 14:21:26 -0700 | [diff] [blame] | 1969 | setup_frame_size_with_refs(cm, rb); |
Adrian Grange | 07a5777 | 2013-06-07 15:55:15 -0700 | [diff] [blame] | 1970 | |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 1971 | cm->allow_high_precision_mv = vpx_rb_read_bit(rb); |
Dmitry Kovalev | 4264c93 | 2014-01-24 12:26:57 -0800 | [diff] [blame] | 1972 | cm->interp_filter = read_interp_filter(rb); |
Dmitry Kovalev | 4053fde | 2013-06-07 18:09:29 -0700 | [diff] [blame] | 1973 | |
Dmitry Kovalev | 0d4b8d7 | 2013-12-05 16:23:09 -0800 | [diff] [blame] | 1974 | for (i = 0; i < REFS_PER_FRAME; ++i) { |
Dmitry Kovalev | ba41e9d | 2013-12-27 18:44:19 -0800 | [diff] [blame] | 1975 | RefBuffer *const ref_buf = &cm->frame_refs[i]; |
Deb Mukherjee | 0d3c3d3 | 2014-09-16 12:47:18 -0700 | [diff] [blame] | 1976 | #if CONFIG_VP9_HIGHBITDEPTH |
| 1977 | vp9_setup_scale_factors_for_frame(&ref_buf->sf, |
| 1978 | ref_buf->buf->y_crop_width, |
| 1979 | ref_buf->buf->y_crop_height, |
| 1980 | cm->width, cm->height, |
| 1981 | cm->use_highbitdepth); |
| 1982 | #else |
Dmitry Kovalev | ba41e9d | 2013-12-27 18:44:19 -0800 | [diff] [blame] | 1983 | vp9_setup_scale_factors_for_frame(&ref_buf->sf, |
| 1984 | ref_buf->buf->y_crop_width, |
| 1985 | ref_buf->buf->y_crop_height, |
| 1986 | cm->width, cm->height); |
Deb Mukherjee | 0d3c3d3 | 2014-09-16 12:47:18 -0700 | [diff] [blame] | 1987 | #endif |
Jingning Han | 86d2a9b | 2013-11-22 10:56:41 -0800 | [diff] [blame] | 1988 | } |
Adrian Grange | 07a5777 | 2013-06-07 15:55:15 -0700 | [diff] [blame] | 1989 | } |
Dmitry Kovalev | 12345cb | 2013-06-05 20:56:37 -0700 | [diff] [blame] | 1990 | } |
Deb Mukherjee | 993d10a | 2014-09-24 06:36:34 -0700 | [diff] [blame] | 1991 | #if CONFIG_VP9_HIGHBITDEPTH |
| 1992 | get_frame_new_buffer(cm)->bit_depth = cm->bit_depth; |
| 1993 | #endif |
Yaowu Xu | 6cf3031 | 2015-02-23 13:01:14 -0800 | [diff] [blame] | 1994 | get_frame_new_buffer(cm)->color_space = cm->color_space; |
Ronald S. Bultje | eeb5ef0 | 2015-09-15 21:56:51 -0400 | [diff] [blame] | 1995 | get_frame_new_buffer(cm)->color_range = cm->color_range; |
Ronald S. Bultje | 812945a | 2015-09-25 21:51:55 -0400 | [diff] [blame] | 1996 | get_frame_new_buffer(cm)->render_width = cm->render_width; |
| 1997 | get_frame_new_buffer(cm)->render_height = cm->render_height; |
Dmitry Kovalev | 12345cb | 2013-06-05 20:56:37 -0700 | [diff] [blame] | 1998 | |
James Zern | 7ee073e | 2014-09-09 18:55:20 -0700 | [diff] [blame] | 1999 | if (pbi->need_resync) { |
| 2000 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 2001 | "Keyframe / intra-only frame required to reset decoder" |
| 2002 | " state"); |
| 2003 | } |
| 2004 | |
Dmitry Kovalev | 514b8ad | 2013-06-07 13:41:44 -0700 | [diff] [blame] | 2005 | if (!cm->error_resilient_mode) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 2006 | cm->refresh_frame_context = vpx_rb_read_bit(rb); |
| 2007 | cm->frame_parallel_decoding_mode = vpx_rb_read_bit(rb); |
Scott LaVarnway | 2c3b737 | 2015-11-23 14:42:15 -0800 | [diff] [blame] | 2008 | if (!cm->frame_parallel_decoding_mode) |
| 2009 | vp9_zero(cm->counts); |
Dmitry Kovalev | 514b8ad | 2013-06-07 13:41:44 -0700 | [diff] [blame] | 2010 | } else { |
Dmitry Kovalev | 514b8ad | 2013-06-07 13:41:44 -0700 | [diff] [blame] | 2011 | cm->refresh_frame_context = 0; |
| 2012 | cm->frame_parallel_decoding_mode = 1; |
| 2013 | } |
| 2014 | |
Alexander Voronov | d6a59fb | 2013-10-03 20:07:24 +0400 | [diff] [blame] | 2015 | // This flag will be overridden by the call to vp9_setup_past_independence |
| 2016 | // below, forcing the use of context 0 for those frame types. |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 2017 | cm->frame_context_idx = vpx_rb_read_literal(rb, FRAME_CONTEXTS_LOG2); |
Dmitry Kovalev | 12345cb | 2013-06-05 20:56:37 -0700 | [diff] [blame] | 2018 | |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 2019 | // Generate next_ref_frame_map. |
| 2020 | lock_buffer_pool(pool); |
| 2021 | for (mask = pbi->refresh_frame_flags; mask; mask >>= 1) { |
| 2022 | if (mask & 1) { |
| 2023 | cm->next_ref_frame_map[ref_index] = cm->new_fb_idx; |
| 2024 | ++frame_bufs[cm->new_fb_idx].ref_count; |
| 2025 | } else { |
| 2026 | cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index]; |
| 2027 | } |
| 2028 | // Current thread holds the reference frame. |
| 2029 | if (cm->ref_frame_map[ref_index] >= 0) |
| 2030 | ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count; |
| 2031 | ++ref_index; |
| 2032 | } |
| 2033 | |
| 2034 | for (; ref_index < REF_FRAMES; ++ref_index) { |
| 2035 | cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index]; |
| 2036 | // Current thread holds the reference frame. |
| 2037 | if (cm->ref_frame_map[ref_index] >= 0) |
| 2038 | ++frame_bufs[cm->ref_frame_map[ref_index]].ref_count; |
| 2039 | } |
| 2040 | unlock_buffer_pool(pool); |
| 2041 | pbi->hold_ref_buf = 1; |
| 2042 | |
Alexander Voronov | d6a59fb | 2013-10-03 20:07:24 +0400 | [diff] [blame] | 2043 | if (frame_is_intra_only(cm) || cm->error_resilient_mode) |
Dmitry Kovalev | b7616e3 | 2013-08-14 11:20:33 -0700 | [diff] [blame] | 2044 | vp9_setup_past_independence(cm); |
Adrian Grange | eac344e | 2013-06-09 10:10:33 -0700 | [diff] [blame] | 2045 | |
Dmitry Kovalev | 816d6c9 | 2013-08-09 14:41:51 -0700 | [diff] [blame] | 2046 | setup_loopfilter(&cm->lf, rb); |
James Zern | f0eabfd | 2013-10-25 13:50:05 +0200 | [diff] [blame] | 2047 | setup_quantization(cm, &pbi->mb, rb); |
Dmitry Kovalev | b7616e3 | 2013-08-14 11:20:33 -0700 | [diff] [blame] | 2048 | setup_segmentation(&cm->seg, rb); |
Scott LaVarnway | afcb62b | 2015-04-28 07:52:06 -0700 | [diff] [blame] | 2049 | setup_segmentation_dequant(cm); |
Dmitry Kovalev | a237576 | 2013-06-06 12:33:12 -0700 | [diff] [blame] | 2050 | |
| 2051 | setup_tile_info(cm, rb); |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 2052 | sz = vpx_rb_read_literal(rb, 16); |
Dmitry Kovalev | b8b91b2 | 2013-06-03 10:50:57 -0700 | [diff] [blame] | 2053 | |
Dmitry Kovalev | 156de9c | 2013-10-29 11:24:08 -0700 | [diff] [blame] | 2054 | if (sz == 0) |
| 2055 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 2056 | "Invalid header size"); |
| 2057 | |
| 2058 | return sz; |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 2059 | } |
| 2060 | |
Dmitry Kovalev | 56c2f41 | 2014-04-08 11:41:12 -0700 | [diff] [blame] | 2061 | static int read_compressed_header(VP9Decoder *pbi, const uint8_t *data, |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 2062 | size_t partition_size) { |
| 2063 | VP9_COMMON *const cm = &pbi->common; |
| 2064 | MACROBLOCKD *const xd = &pbi->mb; |
Yunqing Wang | 7c7e4d4 | 2014-10-22 14:37:38 -0700 | [diff] [blame] | 2065 | FRAME_CONTEXT *const fc = cm->fc; |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 2066 | vpx_reader r; |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 2067 | int k; |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 2068 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 2069 | if (vpx_reader_init(&r, data, partition_size, pbi->decrypt_cb, |
Joey Parrish | 18c0860 | 2014-04-15 14:10:58 -0700 | [diff] [blame] | 2070 | pbi->decrypt_state)) |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 2071 | vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR, |
| 2072 | "Failed to allocate bool decoder 0"); |
| 2073 | |
Dmitry Kovalev | c0eb574 | 2013-07-19 11:37:13 -0700 | [diff] [blame] | 2074 | cm->tx_mode = xd->lossless ? ONLY_4X4 : read_tx_mode(&r); |
| 2075 | if (cm->tx_mode == TX_MODE_SELECT) |
Dmitry Kovalev | c5bdc9d | 2013-11-26 16:46:13 -0800 | [diff] [blame] | 2076 | read_tx_mode_probs(&fc->tx_probs, &r); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 2077 | read_coef_probs(fc, cm->tx_mode, &r); |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 2078 | |
Dmitry Kovalev | b107f2c | 2014-01-29 14:48:42 -0800 | [diff] [blame] | 2079 | for (k = 0; k < SKIP_CONTEXTS; ++k) |
| 2080 | vp9_diff_update_prob(&r, &fc->skip_probs[k]); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 2081 | |
| 2082 | if (!frame_is_intra_only(cm)) { |
| 2083 | nmv_context *const nmvc = &fc->nmvc; |
| 2084 | int i, j; |
| 2085 | |
| 2086 | read_inter_mode_probs(fc, &r); |
| 2087 | |
Dmitry Kovalev | 4264c93 | 2014-01-24 12:26:57 -0800 | [diff] [blame] | 2088 | if (cm->interp_filter == SWITCHABLE) |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 2089 | read_switchable_interp_probs(fc, &r); |
| 2090 | |
| 2091 | for (i = 0; i < INTRA_INTER_CONTEXTS; i++) |
| 2092 | vp9_diff_update_prob(&r, &fc->intra_inter_prob[i]); |
| 2093 | |
Dmitry Kovalev | 69fd030 | 2014-02-19 15:33:59 +0100 | [diff] [blame] | 2094 | cm->reference_mode = read_frame_reference_mode(cm, &r); |
| 2095 | if (cm->reference_mode != SINGLE_REFERENCE) |
| 2096 | setup_compound_reference_mode(cm); |
| 2097 | read_frame_reference_mode_probs(cm, &r); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 2098 | |
| 2099 | for (j = 0; j < BLOCK_SIZE_GROUPS; j++) |
| 2100 | for (i = 0; i < INTRA_MODES - 1; ++i) |
| 2101 | vp9_diff_update_prob(&r, &fc->y_mode_prob[j][i]); |
| 2102 | |
| 2103 | for (j = 0; j < PARTITION_CONTEXTS; ++j) |
| 2104 | for (i = 0; i < PARTITION_TYPES - 1; ++i) |
Dmitry Kovalev | dde8069 | 2013-11-01 18:23:06 -0700 | [diff] [blame] | 2105 | vp9_diff_update_prob(&r, &fc->partition_prob[j][i]); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 2106 | |
Dmitry Kovalev | d172201 | 2013-11-05 14:43:35 -0800 | [diff] [blame] | 2107 | read_mv_probs(nmvc, cm->allow_high_precision_mv, &r); |
Dmitry Kovalev | 9d3f27f | 2013-10-21 18:12:08 -0700 | [diff] [blame] | 2108 | } |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 2109 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 2110 | return vpx_reader_has_error(&r); |
Dmitry Kovalev | 704afd0 | 2013-07-08 11:54:36 -0700 | [diff] [blame] | 2111 | } |
| 2112 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 2113 | static struct vpx_read_bit_buffer *init_read_bit_buffer( |
Joey Parrish | 18c0860 | 2014-04-15 14:10:58 -0700 | [diff] [blame] | 2114 | VP9Decoder *pbi, |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 2115 | struct vpx_read_bit_buffer *rb, |
Joey Parrish | 18c0860 | 2014-04-15 14:10:58 -0700 | [diff] [blame] | 2116 | const uint8_t *data, |
| 2117 | const uint8_t *data_end, |
James Zern | b105414 | 2015-06-10 15:53:30 -0700 | [diff] [blame] | 2118 | uint8_t clear_data[MAX_VP9_HEADER_SIZE]) { |
Joey Parrish | 18c0860 | 2014-04-15 14:10:58 -0700 | [diff] [blame] | 2119 | rb->bit_offset = 0; |
| 2120 | rb->error_handler = error_handler; |
| 2121 | rb->error_handler_data = &pbi->common; |
| 2122 | if (pbi->decrypt_cb) { |
James Zern | 5e16d39 | 2015-08-17 18:19:22 -0700 | [diff] [blame] | 2123 | const int n = (int)VPXMIN(MAX_VP9_HEADER_SIZE, data_end - data); |
Joey Parrish | 18c0860 | 2014-04-15 14:10:58 -0700 | [diff] [blame] | 2124 | pbi->decrypt_cb(pbi->decrypt_state, data, clear_data, n); |
| 2125 | rb->bit_buffer = clear_data; |
| 2126 | rb->bit_buffer_end = clear_data + n; |
| 2127 | } else { |
| 2128 | rb->bit_buffer = data; |
| 2129 | rb->bit_buffer_end = data_end; |
| 2130 | } |
| 2131 | return rb; |
| 2132 | } |
| 2133 | |
James Zern | b0bafd0 | 2015-06-10 15:47:57 -0700 | [diff] [blame] | 2134 | //------------------------------------------------------------------------------ |
| 2135 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 2136 | int vp9_read_sync_code(struct vpx_read_bit_buffer *const rb) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 2137 | return vpx_rb_read_literal(rb, 8) == VP9_SYNC_CODE_0 && |
| 2138 | vpx_rb_read_literal(rb, 8) == VP9_SYNC_CODE_1 && |
| 2139 | vpx_rb_read_literal(rb, 8) == VP9_SYNC_CODE_2; |
James Zern | b0bafd0 | 2015-06-10 15:47:57 -0700 | [diff] [blame] | 2140 | } |
| 2141 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 2142 | void vp9_read_frame_size(struct vpx_read_bit_buffer *rb, |
James Zern | b0bafd0 | 2015-06-10 15:47:57 -0700 | [diff] [blame] | 2143 | int *width, int *height) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 2144 | *width = vpx_rb_read_literal(rb, 16) + 1; |
| 2145 | *height = vpx_rb_read_literal(rb, 16) + 1; |
James Zern | b0bafd0 | 2015-06-10 15:47:57 -0700 | [diff] [blame] | 2146 | } |
| 2147 | |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 2148 | BITSTREAM_PROFILE vp9_read_profile(struct vpx_read_bit_buffer *rb) { |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 2149 | int profile = vpx_rb_read_bit(rb); |
| 2150 | profile |= vpx_rb_read_bit(rb) << 1; |
James Zern | b0bafd0 | 2015-06-10 15:47:57 -0700 | [diff] [blame] | 2151 | if (profile > 2) |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 2152 | profile += vpx_rb_read_bit(rb); |
James Zern | b0bafd0 | 2015-06-10 15:47:57 -0700 | [diff] [blame] | 2153 | return (BITSTREAM_PROFILE) profile; |
| 2154 | } |
| 2155 | |
Dmitry Kovalev | e7135a9 | 2014-05-21 15:51:40 -0700 | [diff] [blame] | 2156 | void vp9_decode_frame(VP9Decoder *pbi, |
| 2157 | const uint8_t *data, const uint8_t *data_end, |
| 2158 | const uint8_t **p_data_end) { |
James Zern | 924d745 | 2013-08-22 20:03:08 -0700 | [diff] [blame] | 2159 | VP9_COMMON *const cm = &pbi->common; |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 2160 | MACROBLOCKD *const xd = &pbi->mb; |
Yaowu Xu | bf82514 | 2015-07-20 13:49:15 -0700 | [diff] [blame] | 2161 | struct vpx_read_bit_buffer rb; |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 2162 | int context_updated = 0; |
Joey Parrish | 18c0860 | 2014-04-15 14:10:58 -0700 | [diff] [blame] | 2163 | uint8_t clear_data[MAX_VP9_HEADER_SIZE]; |
| 2164 | const size_t first_partition_size = read_uncompressed_header(pbi, |
| 2165 | init_read_bit_buffer(pbi, &rb, data, data_end, clear_data)); |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 2166 | const int tile_rows = 1 << cm->log2_tile_rows; |
Guillaume Martres | 5b984b3 | 2013-10-11 16:14:29 -0700 | [diff] [blame] | 2167 | const int tile_cols = 1 << cm->log2_tile_cols; |
James Zern | d39f279 | 2013-10-26 12:41:35 +0200 | [diff] [blame] | 2168 | YV12_BUFFER_CONFIG *const new_fb = get_frame_new_buffer(cm); |
hkuang | 25e5552 | 2013-12-12 20:33:06 -0800 | [diff] [blame] | 2169 | xd->cur_buf = new_fb; |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 2170 | |
John Koleszar | 2584a5e | 2012-10-03 12:11:05 -0700 | [diff] [blame] | 2171 | if (!first_partition_size) { |
Joey Parrish | 18c0860 | 2014-04-15 14:10:58 -0700 | [diff] [blame] | 2172 | // showing a frame directly |
Deb Mukherjee | 5acfafb | 2014-08-26 12:35:15 -0700 | [diff] [blame] | 2173 | *p_data_end = data + (cm->profile <= PROFILE_2 ? 1 : 2); |
Dmitry Kovalev | e7135a9 | 2014-05-21 15:51:40 -0700 | [diff] [blame] | 2174 | return; |
John Koleszar | 2584a5e | 2012-10-03 12:11:05 -0700 | [diff] [blame] | 2175 | } |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 2176 | |
Yaowu Xu | cbce003 | 2015-07-20 13:55:06 -0700 | [diff] [blame] | 2177 | data += vpx_rb_bytes_read(&rb); |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 2178 | if (!read_is_valid(data, first_partition_size, data_end)) |
James Zern | 924d745 | 2013-08-22 20:03:08 -0700 | [diff] [blame] | 2179 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
John Koleszar | 2bcc473 | 2013-06-11 10:06:31 -0700 | [diff] [blame] | 2180 | "Truncated packet or corrupt header length"); |
Dmitry Kovalev | 18c83b3 | 2013-05-28 18:07:54 -0700 | [diff] [blame] | 2181 | |
hkuang | 5557743 | 2014-10-27 16:19:04 -0700 | [diff] [blame] | 2182 | cm->use_prev_frame_mvs = !cm->error_resilient_mode && |
| 2183 | cm->width == cm->last_width && |
| 2184 | cm->height == cm->last_height && |
Scott LaVarnway | c9976b3 | 2015-06-08 03:38:13 -0700 | [diff] [blame] | 2185 | !cm->last_intra_only && |
| 2186 | cm->last_show_frame && |
| 2187 | (cm->last_frame_type != KEY_FRAME); |
Scott LaVarnway | ac6093d | 2013-09-11 13:45:44 -0400 | [diff] [blame] | 2188 | |
Jim Bankoski | 9dec771 | 2014-01-31 17:35:53 -0800 | [diff] [blame] | 2189 | vp9_setup_block_planes(xd, cm->subsampling_x, cm->subsampling_y); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 2190 | |
Yunqing Wang | 7c7e4d4 | 2014-10-22 14:37:38 -0700 | [diff] [blame] | 2191 | *cm->fc = cm->frame_contexts[cm->frame_context_idx]; |
Alexander Voronov | 6c6a978 | 2014-12-11 20:44:19 +0300 | [diff] [blame] | 2192 | if (!cm->fc->initialized) |
| 2193 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 2194 | "Uninitialized entropy context."); |
| 2195 | |
James Zern | d39f279 | 2013-10-26 12:41:35 +0200 | [diff] [blame] | 2196 | xd->corrupted = 0; |
| 2197 | new_fb->corrupted = read_compressed_header(pbi, data, first_partition_size); |
hkuang | dde8195 | 2014-12-04 15:06:31 -0800 | [diff] [blame] | 2198 | if (new_fb->corrupted) |
| 2199 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 2200 | "Decode failed. Frame data header is corrupted."); |
Scott LaVarnway | 353642b | 2013-05-29 16:42:23 -0400 | [diff] [blame] | 2201 | |
Frank Galligan | bfb6d48 | 2015-05-21 11:49:11 -0700 | [diff] [blame] | 2202 | if (cm->lf.filter_level && !cm->skip_loop_filter) { |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 2203 | vp9_loop_filter_frame_init(cm, cm->lf.filter_level); |
| 2204 | } |
| 2205 | |
| 2206 | // If encoded in frame parallel mode, frame context is ready after decoding |
| 2207 | // the frame header. |
| 2208 | if (pbi->frame_parallel_decode && cm->frame_parallel_decoding_mode) { |
Jingning Han | 04d2e57 | 2015-07-01 16:32:48 -0700 | [diff] [blame] | 2209 | VPxWorker *const worker = pbi->frame_worker_owner; |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 2210 | FrameWorkerData *const frame_worker_data = worker->data1; |
| 2211 | if (cm->refresh_frame_context) { |
| 2212 | context_updated = 1; |
| 2213 | cm->frame_contexts[cm->frame_context_idx] = *cm->fc; |
| 2214 | } |
| 2215 | vp9_frameworker_lock_stats(worker); |
| 2216 | pbi->cur_buf->row = -1; |
| 2217 | pbi->cur_buf->col = -1; |
| 2218 | frame_worker_data->frame_context_ready = 1; |
| 2219 | // Signal the main thread that context is ready. |
| 2220 | vp9_frameworker_signal_stats(worker); |
| 2221 | vp9_frameworker_unlock_stats(worker); |
| 2222 | } |
| 2223 | |
Yunqing Wang | b3b7645 | 2015-01-30 17:00:54 -0800 | [diff] [blame] | 2224 | if (pbi->max_threads > 1 && tile_rows == 1 && tile_cols > 1) { |
| 2225 | // Multi-threaded tile decoder |
Dmitry Kovalev | 88a10ab | 2014-03-20 15:01:37 -0700 | [diff] [blame] | 2226 | *p_data_end = decode_tiles_mt(pbi, data + first_partition_size, data_end); |
James Zern | db8b1b7 | 2014-08-27 17:02:03 -0700 | [diff] [blame] | 2227 | if (!xd->corrupted) { |
Frank Galligan | bfb6d48 | 2015-05-21 11:49:11 -0700 | [diff] [blame] | 2228 | if (!cm->skip_loop_filter) { |
| 2229 | // If multiple threads are used to decode tiles, then we use those |
| 2230 | // threads to do parallel loopfiltering. |
| 2231 | vp9_loop_filter_frame_mt(new_fb, cm, pbi->mb.plane, |
| 2232 | cm->lf.filter_level, 0, 0, pbi->tile_workers, |
| 2233 | pbi->num_tile_workers, &pbi->lf_row_sync); |
| 2234 | } |
hkuang | dde8195 | 2014-12-04 15:06:31 -0800 | [diff] [blame] | 2235 | } else { |
| 2236 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 2237 | "Decode failed. Frame data is corrupted."); |
James Zern | db8b1b7 | 2014-08-27 17:02:03 -0700 | [diff] [blame] | 2238 | } |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 2239 | } else { |
hkuang | 20c1edf | 2014-05-15 10:51:55 -0700 | [diff] [blame] | 2240 | *p_data_end = decode_tiles(pbi, data + first_partition_size, data_end); |
James Zern | fb48452 | 2013-10-26 14:33:45 +0200 | [diff] [blame] | 2241 | } |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 2242 | |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 2243 | if (!xd->corrupted) { |
Adrian Grange | 99892e8 | 2014-05-07 10:31:55 -0700 | [diff] [blame] | 2244 | if (!cm->error_resilient_mode && !cm->frame_parallel_decoding_mode) { |
| 2245 | vp9_adapt_coef_probs(cm); |
Paul Wilkins | a14ae84 | 2013-05-07 17:24:21 +0100 | [diff] [blame] | 2246 | |
Adrian Grange | 99892e8 | 2014-05-07 10:31:55 -0700 | [diff] [blame] | 2247 | if (!frame_is_intra_only(cm)) { |
| 2248 | vp9_adapt_mode_probs(cm); |
| 2249 | vp9_adapt_mv_probs(cm, cm->allow_high_precision_mv); |
| 2250 | } |
Deb Mukherjee | 01cafaa | 2013-01-15 06:43:35 -0800 | [diff] [blame] | 2251 | } |
hkuang | c147cf3 | 2014-07-01 16:04:53 -0700 | [diff] [blame] | 2252 | } else { |
| 2253 | vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME, |
| 2254 | "Decode failed. Frame data is corrupted."); |
John Koleszar | c6b9039 | 2012-07-13 15:21:29 -0700 | [diff] [blame] | 2255 | } |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 2256 | |
hkuang | be6aead | 2015-01-27 12:26:28 -0800 | [diff] [blame] | 2257 | // Non frame parallel update frame context here. |
| 2258 | if (cm->refresh_frame_context && !context_updated) |
Yunqing Wang | 7c7e4d4 | 2014-10-22 14:37:38 -0700 | [diff] [blame] | 2259 | cm->frame_contexts[cm->frame_context_idx] = *cm->fc; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 2260 | } |