blob: adb07b620bfc2a773cdd7fae6f2b44f14218891a [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
Yaowu Xuf883b422016-08-30 14:01:10 -070012#ifndef AV1_COMMON_ONYXC_INT_H_
13#define AV1_COMMON_ONYXC_INT_H_
Yaowu Xuc27fc142016-08-22 16:08:15 -070014
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
Yaowu Xucaf20232016-10-18 17:15:40 -070016#include "./av1_rtcd.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070017#include "aom/internal/aom_codec_internal.h"
18#include "aom_util/aom_thread.h"
Alex Converseeb780e72016-12-13 12:46:41 -080019#if CONFIG_ANS
20#include "aom_dsp/ans.h"
21#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070022#include "av1/common/alloccommon.h"
Tom Finegan17ce8b12017-02-08 12:46:31 -080023#include "av1/common/av1_loopfilter.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "av1/common/entropy.h"
25#include "av1/common/entropymode.h"
Yaowu Xucaf20232016-10-18 17:15:40 -070026#include "av1/common/entropymv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070027#include "av1/common/frame_buffers.h"
Yaowu Xucaf20232016-10-18 17:15:40 -070028#include "av1/common/mv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070029#include "av1/common/quant_common.h"
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070030#if CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -070031#include "av1/common/restoration.h"
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070032#endif // CONFIG_LOOP_RESTORATION
Yaowu Xucaf20232016-10-18 17:15:40 -070033#include "av1/common/tile_common.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070034#include "av1/common/odintrin.h"
35#if CONFIG_PVQ
36#include "av1/common/pvq.h"
37#endif
Luc Trudeaubaeb3752017-04-24 11:19:25 -040038#if CONFIG_CFL
39#include "av1/common/cfl.h"
40#endif
RogerZhoucc5d35d2017-08-07 22:20:15 -070041#if CONFIG_HASH_ME
42// TODO(youzhou@microsoft.com): Encoder only. Move it out of common
43#include "av1/encoder/hash_motion.h"
44#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070045#ifdef __cplusplus
46extern "C" {
47#endif
48
Jean-Marc Valine5759772017-03-23 18:46:38 -040049#define CDEF_MAX_STRENGTHS 16
50
Yaowu Xuc27fc142016-08-22 16:08:15 -070051#define REF_FRAMES_LOG2 3
52#define REF_FRAMES (1 << REF_FRAMES_LOG2)
53
54// 4 scratch frames for the new frames to support a maximum of 4 cores decoding
55// in parallel, 3 for scaled references on the encoder.
56// TODO(hkuang): Add ondemand frame buffers instead of hardcoding the number
57// of framebuffers.
58// TODO(jkoleszar): These 3 extra references could probably come from the
59// normal reference pool.
60#define FRAME_BUFFERS (REF_FRAMES + 7)
61
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +010062#if CONFIG_REFERENCE_BUFFER
63/* Constant values while waiting for the sequence header */
64#define FRAME_ID_NUMBERS_PRESENT_FLAG 1
65#define FRAME_ID_LENGTH_MINUS7 8 // Allows frame id up to 2^15-1
66#define DELTA_FRAME_ID_LENGTH_MINUS2 12 // Allows frame id deltas up to 2^14-1
67#endif
68
Thomas Daededa4d8b92017-06-05 15:44:14 -070069#if CONFIG_NO_FRAME_CONTEXT_SIGNALING
70#define FRAME_CONTEXTS (FRAME_BUFFERS + 1)
71// Extra frame context which is always kept at default values
72#define FRAME_CONTEXT_DEFAULTS (FRAME_CONTEXTS - 1)
73#else
74
Yaowu Xuc27fc142016-08-22 16:08:15 -070075#if CONFIG_EXT_REFS
76#define FRAME_CONTEXTS_LOG2 3
77#else
78#define FRAME_CONTEXTS_LOG2 2
79#endif
80
81#define FRAME_CONTEXTS (1 << FRAME_CONTEXTS_LOG2)
Thomas Daededa4d8b92017-06-05 15:44:14 -070082#endif // CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -070083
84#define NUM_PING_PONG_BUFFERS 2
85
86typedef enum {
87 SINGLE_REFERENCE = 0,
88 COMPOUND_REFERENCE = 1,
89 REFERENCE_MODE_SELECT = 2,
90 REFERENCE_MODES = 3,
91} REFERENCE_MODE;
92
Thomas Daedea6a854b2017-06-22 17:49:11 -070093#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -070094typedef enum {
95 RESET_FRAME_CONTEXT_NONE = 0,
96 RESET_FRAME_CONTEXT_CURRENT = 1,
97 RESET_FRAME_CONTEXT_ALL = 2,
98} RESET_FRAME_CONTEXT_MODE;
Thomas Daedea6a854b2017-06-22 17:49:11 -070099#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700100
101typedef enum {
102 /**
103 * Update frame context to values resulting from forward probability
104 * updates signaled in the frame header
105 */
106 REFRESH_FRAME_CONTEXT_FORWARD,
107 /**
108 * Update frame context to values resulting from backward probability
109 * updates based on entropy/counts in the decoded frame
110 */
111 REFRESH_FRAME_CONTEXT_BACKWARD,
112} REFRESH_FRAME_CONTEXT_MODE;
113
114typedef struct {
115 int_mv mv[2];
Yaowu Xu4306b6e2016-09-27 12:55:32 -0700116 int_mv pred_mv[2];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700117 MV_REFERENCE_FRAME ref_frame[2];
118} MV_REF;
119
120typedef struct {
121 int ref_count;
122 MV_REF *mvs;
123 int mi_rows;
124 int mi_cols;
Rupert Swarbrick1f990a62017-07-11 11:09:33 +0100125 // Width and height give the size of the buffer (before any upscaling, unlike
126 // the sizes that can be derived from the buf structure)
127 int width;
128 int height;
Sarah Parkerf1783292017-04-05 11:55:27 -0700129#if CONFIG_GLOBAL_MOTION
130 WarpedMotionParams global_motion[TOTAL_REFS_PER_FRAME];
131#endif // CONFIG_GLOBAL_MOTION
Yaowu Xuf883b422016-08-30 14:01:10 -0700132 aom_codec_frame_buffer_t raw_frame_buffer;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700133 YV12_BUFFER_CONFIG buf;
RogerZhoucc5d35d2017-08-07 22:20:15 -0700134#if CONFIG_HASH_ME
135 hash_table hash_table;
136#endif
Fangwen Fu8d164de2016-12-14 13:40:54 -0800137#if CONFIG_TEMPMV_SIGNALING
138 uint8_t intra_only;
139#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700140 // The Following variables will only be used in frame parallel decode.
141
142 // frame_worker_owner indicates which FrameWorker owns this buffer. NULL means
143 // that no FrameWorker owns, or is decoding, this buffer.
Yaowu Xuf883b422016-08-30 14:01:10 -0700144 AVxWorker *frame_worker_owner;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700145
146 // row and col indicate which position frame has been decoded to in real
147 // pixel unit. They are reset to -1 when decoding begins and set to INT_MAX
148 // when the frame is fully decoded.
149 int row;
150 int col;
151} RefCntBuffer;
152
153typedef struct BufferPool {
154// Protect BufferPool from being accessed by several FrameWorkers at
155// the same time during frame parallel decode.
156// TODO(hkuang): Try to use atomic variable instead of locking the whole pool.
157#if CONFIG_MULTITHREAD
158 pthread_mutex_t pool_mutex;
159#endif
160
161 // Private data associated with the frame buffer callbacks.
162 void *cb_priv;
163
Yaowu Xuf883b422016-08-30 14:01:10 -0700164 aom_get_frame_buffer_cb_fn_t get_fb_cb;
165 aom_release_frame_buffer_cb_fn_t release_fb_cb;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700166
167 RefCntBuffer frame_bufs[FRAME_BUFFERS];
168
169 // Frame buffers allocated internally by the codec.
170 InternalFrameBufferList int_frame_buffers;
171} BufferPool;
172
Yaowu Xuf883b422016-08-30 14:01:10 -0700173typedef struct AV1Common {
174 struct aom_internal_error_info error;
175 aom_color_space_t color_space;
anorkin76fb1262017-03-22 15:12:12 -0700176#if CONFIG_COLORSPACE_HEADERS
177 aom_transfer_function_t transfer_function;
178 aom_chroma_sample_position_t chroma_sample_position;
179#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700180 int color_range;
181 int width;
182 int height;
183 int render_width;
184 int render_height;
185 int last_width;
186 int last_height;
187
188 // TODO(jkoleszar): this implies chroma ss right now, but could vary per
189 // plane. Revisit as part of the future change to YV12_BUFFER_CONFIG to
190 // support additional planes.
191 int subsampling_x;
192 int subsampling_y;
193
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200194#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700195 // Marks if we need to use 16bit frame buffers (1: yes, 0: no).
196 int use_highbitdepth;
197#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700198 YV12_BUFFER_CONFIG *frame_to_show;
199 RefCntBuffer *prev_frame;
200
201 // TODO(hkuang): Combine this with cur_buf in macroblockd.
202 RefCntBuffer *cur_frame;
203
204 int ref_frame_map[REF_FRAMES]; /* maps fb_idx to reference slot */
205
206 // Prepare ref_frame_map for the next frame.
207 // Only used in frame parallel decode.
208 int next_ref_frame_map[REF_FRAMES];
209
210 // TODO(jkoleszar): could expand active_ref_idx to 4, with 0 as intra, and
211 // roll new_fb_idx into it.
212
213 // Each Inter frame can reference INTER_REFS_PER_FRAME buffers
214 RefBuffer frame_refs[INTER_REFS_PER_FRAME];
215
216 int new_fb_idx;
217
218 FRAME_TYPE last_frame_type; /* last frame's frame type for motion search.*/
Yaowu Xuc27fc142016-08-22 16:08:15 -0700219 FRAME_TYPE frame_type;
220
221 int show_frame;
222 int last_show_frame;
223 int show_existing_frame;
224#if CONFIG_EXT_REFS
225 // Flag for a frame used as a reference - not written to the bitstream
226 int is_reference_frame;
227#endif // CONFIG_EXT_REFS
228
229 // Flag signaling that the frame is encoded using only INTRA modes.
230 uint8_t intra_only;
231 uint8_t last_intra_only;
232
233 int allow_high_precision_mv;
234
235 int allow_screen_content_tools;
Debargha Mukherjee9e2c7a62017-05-23 21:18:42 -0700236#if CONFIG_EXT_INTER
237#if CONFIG_INTERINTRA
238 int allow_interintra_compound;
239#endif // CONFIG_INTERINTRA
240#if CONFIG_WEDGE || CONFIG_COMPOUND_SEGMENT
241 int allow_masked_compound;
242#endif // CONFIG_WEDGE || CONFIG_COMPOUND_SEGMENT
243#endif // CONFIG_EXT_INTER
Yaowu Xuc27fc142016-08-22 16:08:15 -0700244
Thomas Daedea6a854b2017-06-22 17:49:11 -0700245#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -0700246 // Flag signaling which frame contexts should be reset to default values.
247 RESET_FRAME_CONTEXT_MODE reset_frame_context;
Thomas Daedea6a854b2017-06-22 17:49:11 -0700248#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700249
250 // MBs, mb_rows/cols is in 16-pixel units; mi_rows/cols is in
251 // MODE_INFO (8-pixel) units.
252 int MBs;
253 int mb_rows, mi_rows;
254 int mb_cols, mi_cols;
255 int mi_stride;
256
257 /* profile settings */
258 TX_MODE tx_mode;
259
260 int base_qindex;
261 int y_dc_delta_q;
262 int uv_dc_delta_q;
263 int uv_ac_delta_q;
264 int16_t y_dequant[MAX_SEGMENTS][2];
265 int16_t uv_dequant[MAX_SEGMENTS][2];
266
267#if CONFIG_AOM_QM
268 // Global quant matrix tables
Thomas Davies6675adf2017-05-04 17:39:21 +0100269 qm_val_t *giqmatrix[NUM_QM_LEVELS][2][2][TX_SIZES_ALL];
270 qm_val_t *gqmatrix[NUM_QM_LEVELS][2][2][TX_SIZES_ALL];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700271
272 // Local quant matrix tables for each frame
Thomas Davies6675adf2017-05-04 17:39:21 +0100273 qm_val_t *y_iqmatrix[MAX_SEGMENTS][2][TX_SIZES_ALL];
274 qm_val_t *uv_iqmatrix[MAX_SEGMENTS][2][TX_SIZES_ALL];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700275 // Encoder
Thomas Davies6675adf2017-05-04 17:39:21 +0100276 qm_val_t *y_qmatrix[MAX_SEGMENTS][2][TX_SIZES_ALL];
277 qm_val_t *uv_qmatrix[MAX_SEGMENTS][2][TX_SIZES_ALL];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700278
279 int using_qmatrix;
280 int min_qmlevel;
281 int max_qmlevel;
282#endif
283#if CONFIG_NEW_QUANT
284 dequant_val_type_nuq y_dequant_nuq[MAX_SEGMENTS][QUANT_PROFILES][COEF_BANDS];
285 dequant_val_type_nuq uv_dequant_nuq[MAX_SEGMENTS][QUANT_PROFILES][COEF_BANDS];
286#endif
287
288 /* We allocate a MODE_INFO struct for each macroblock, together with
289 an extra row on top and column on the left to simplify prediction. */
290 int mi_alloc_size;
291 MODE_INFO *mip; /* Base of allocated array */
292 MODE_INFO *mi; /* Corresponds to upper left visible macroblock */
293
294 // TODO(agrange): Move prev_mi into encoder structure.
295 // prev_mip and prev_mi will only be allocated in encoder.
296 MODE_INFO *prev_mip; /* MODE_INFO array 'mip' from last decoded frame */
297 MODE_INFO *prev_mi; /* 'mi' from last frame (points into prev_mip) */
298
299 // Separate mi functions between encoder and decoder.
Yaowu Xuf883b422016-08-30 14:01:10 -0700300 int (*alloc_mi)(struct AV1Common *cm, int mi_size);
301 void (*free_mi)(struct AV1Common *cm);
302 void (*setup_mi)(struct AV1Common *cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700303
304 // Grid of pointers to 8x8 MODE_INFO structs. Any 8x8 not in the visible
305 // area will be NULL.
306 MODE_INFO **mi_grid_base;
307 MODE_INFO **mi_grid_visible;
308 MODE_INFO **prev_mi_grid_base;
309 MODE_INFO **prev_mi_grid_visible;
310
311 // Whether to use previous frame's motion vectors for prediction.
312 int use_prev_frame_mvs;
313
314 // Persistent mb segment id map used in prediction.
315 int seg_map_idx;
316 int prev_seg_map_idx;
317
318 uint8_t *seg_map_array[NUM_PING_PONG_BUFFERS];
319 uint8_t *last_frame_seg_map;
320 uint8_t *current_frame_seg_map;
321 int seg_map_alloc_size;
322
James Zern7b9407a2016-05-18 23:48:05 -0700323 InterpFilter interp_filter;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700324
325 loop_filter_info_n lf_info;
Fergus Simpsonbc189932017-05-16 17:02:39 -0700326#if CONFIG_FRAME_SUPERRES
327 // The numerator of the superres scale; the denominator is fixed.
328 uint8_t superres_scale_numerator;
Fergus Simpson87cf61b2017-06-15 00:50:34 -0700329 uint8_t superres_kf_scale_numerator;
Fergus Simpsond2bcbb52017-05-22 23:15:05 -0700330 int superres_upscaled_width;
331 int superres_upscaled_height;
Fergus Simpsonbc189932017-05-16 17:02:39 -0700332#endif // CONFIG_FRAME_SUPERRES
Yaowu Xuc27fc142016-08-22 16:08:15 -0700333#if CONFIG_LOOP_RESTORATION
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800334 RestorationInfo rst_info[MAX_MB_PLANE];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700335 RestorationInternal rst_internal;
336#endif // CONFIG_LOOP_RESTORATION
337
338 // Flag signaling how frame contexts should be updated at the end of
339 // a frame decode
340 REFRESH_FRAME_CONTEXT_MODE refresh_frame_context;
341
342 int ref_frame_sign_bias[TOTAL_REFS_PER_FRAME]; /* Two state 0, 1 */
343
344 struct loopfilter lf;
345 struct segmentation seg;
Thomas Daedef636d5c2017-06-29 13:48:27 -0700346 int all_lossless;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700347 int frame_parallel_decode; // frame-based threading.
348
Sarah Parkere68a3e42017-02-16 14:03:24 -0800349#if CONFIG_EXT_TX
350 int reduced_tx_set_used;
351#endif // CONFIG_EXT_TX
352
Yaowu Xuc27fc142016-08-22 16:08:15 -0700353// Context probabilities for reference frame prediction
354#if CONFIG_EXT_REFS
355 MV_REFERENCE_FRAME comp_fwd_ref[FWD_REFS];
356 MV_REFERENCE_FRAME comp_bwd_ref[BWD_REFS];
357#else
358 MV_REFERENCE_FRAME comp_fixed_ref;
359 MV_REFERENCE_FRAME comp_var_ref[COMP_REFS];
360#endif // CONFIG_EXT_REFS
361 REFERENCE_MODE reference_mode;
362
363 FRAME_CONTEXT *fc; /* this frame entropy */
364 FRAME_CONTEXT *frame_contexts; // FRAME_CONTEXTS
Thomas Daede10e1da92017-04-26 13:22:21 -0700365 FRAME_CONTEXT *pre_fc; // Context referenced in this frame
Thomas Daededa4d8b92017-06-05 15:44:14 -0700366#if !CONFIG_NO_FRAME_CONTEXT_SIGNALING
Yaowu Xuc27fc142016-08-22 16:08:15 -0700367 unsigned int frame_context_idx; /* Context to use/update */
Thomas Daededa4d8b92017-06-05 15:44:14 -0700368#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700369 FRAME_COUNTS counts;
370
Yaowu Xuc27fc142016-08-22 16:08:15 -0700371 unsigned int current_video_frame;
372 BITSTREAM_PROFILE profile;
373
Yaowu Xuf883b422016-08-30 14:01:10 -0700374 // AOM_BITS_8 in profile 0 or 1, AOM_BITS_10 or AOM_BITS_12 in profile 2 or 3.
375 aom_bit_depth_t bit_depth;
376 aom_bit_depth_t dequant_bit_depth; // bit_depth of current dequantizer
Yaowu Xuc27fc142016-08-22 16:08:15 -0700377
378 int error_resilient_mode;
379
Yunqing Wangeeb08a92017-07-07 21:25:18 -0700380 int log2_tile_cols, log2_tile_rows; // Used in non-large_scale_tile_coding.
Yaowu Xuc27fc142016-08-22 16:08:15 -0700381 int tile_cols, tile_rows;
Yi Luo10e23002017-07-31 11:54:43 -0700382 int last_tile_cols, last_tile_rows;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700383 int tile_width, tile_height; // In MI units
Yunqing Wangd8cd55f2017-02-27 12:16:00 -0800384#if CONFIG_EXT_TILE
Yunqing Wangeeb08a92017-07-07 21:25:18 -0700385 unsigned int large_scale_tile;
386 unsigned int single_tile_decoding;
Yunqing Wangd8cd55f2017-02-27 12:16:00 -0800387#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700388
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800389#if CONFIG_DEPENDENT_HORZTILES
390 int dependent_horz_tiles;
Fangwen Fu73126c02017-02-08 22:37:47 -0800391 int tile_group_start_row[MAX_TILE_ROWS][MAX_TILE_COLS];
392 int tile_group_start_col[MAX_TILE_ROWS][MAX_TILE_COLS];
393#endif
Ryan Lei9b02b0e2017-01-30 15:52:20 -0800394#if CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -0800395 int loop_filter_across_tiles_enabled;
Ryan Lei9b02b0e2017-01-30 15:52:20 -0800396#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -0800397
Yaowu Xuc27fc142016-08-22 16:08:15 -0700398 int byte_alignment;
399 int skip_loop_filter;
400
401 // Private data associated with the frame buffer callbacks.
402 void *cb_priv;
Yaowu Xuf883b422016-08-30 14:01:10 -0700403 aom_get_frame_buffer_cb_fn_t get_fb_cb;
404 aom_release_frame_buffer_cb_fn_t release_fb_cb;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700405
406 // Handles memory for the codec.
407 InternalFrameBufferList int_frame_buffers;
408
409 // External BufferPool passed from outside.
410 BufferPool *buffer_pool;
411
412 PARTITION_CONTEXT *above_seg_context;
413 ENTROPY_CONTEXT *above_context[MAX_MB_PLANE];
414#if CONFIG_VAR_TX
415 TXFM_CONTEXT *above_txfm_context;
Jingning Han6e4955d2017-05-30 22:54:48 -0700416 TXFM_CONTEXT *top_txfm_context[MAX_MB_PLANE];
417 TXFM_CONTEXT left_txfm_context[MAX_MB_PLANE][2 * MAX_MIB_SIZE];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700418#endif
419 int above_context_alloc_cols;
420
421 // scratch memory for intraonly/keyframe forward updates from default tables
422 // - this is intentionally not placed in FRAME_CONTEXT since it's reset upon
423 // each keyframe and not used afterwards
Yaowu Xuf883b422016-08-30 14:01:10 -0700424 aom_prob kf_y_prob[INTRA_MODES][INTRA_MODES][INTRA_MODES - 1];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700425#if CONFIG_GLOBAL_MOTION
David Barkercf3d0b02016-11-10 10:14:49 +0000426 WarpedMotionParams global_motion[TOTAL_REFS_PER_FRAME];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700427#endif
428
429 BLOCK_SIZE sb_size; // Size of the superblock used for this frame
430 int mib_size; // Size of the superblock in units of MI blocks
431 int mib_size_log2; // Log 2 of above.
Jean-Marc Valin01435132017-02-18 14:12:53 -0500432#if CONFIG_CDEF
Steinar Midtskogen94de0aa2017-08-02 10:30:12 +0200433 int cdef_pri_damping;
434 int cdef_sec_damping;
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -0400435 int nb_cdef_strengths;
436 int cdef_strengths[CDEF_MAX_STRENGTHS];
Jean-Marc Valine9f77422017-03-22 17:09:51 -0400437 int cdef_uv_strengths[CDEF_MAX_STRENGTHS];
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -0400438 int cdef_bits;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700439#endif
Thomas Davies80188d12016-10-26 16:08:35 -0700440
Arild Fuldseth07441162016-08-15 15:07:52 +0200441#if CONFIG_DELTA_Q
442 int delta_q_present_flag;
Thomas Daviesf6936102016-09-05 16:51:31 +0100443 // Resolution of delta quant
444 int delta_q_res;
Fangwen Fu231fe422017-04-24 17:52:29 -0700445#if CONFIG_EXT_DELTA_Q
446 int delta_lf_present_flag;
447 // Resolution of delta lf level
448 int delta_lf_res;
449#endif
Arild Fuldseth07441162016-08-15 15:07:52 +0200450#endif
Thomas Davies80188d12016-10-26 16:08:35 -0700451 int num_tg;
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +0100452#if CONFIG_REFERENCE_BUFFER
453 int current_frame_id;
454 int ref_frame_id[REF_FRAMES];
455 int valid_for_referencing[REF_FRAMES];
456 int refresh_mask;
457 int invalid_delta_frame_id_minus1;
458#endif
Alex Converseeb780e72016-12-13 12:46:41 -0800459#if CONFIG_ANS && ANS_MAX_SYMBOLS
460 int ans_window_size_log2;
461#endif
Wei-Ting Lin482551b2017-08-03 12:29:24 -0700462#if CONFIG_NCOBMC_ADAPT_WEIGHT
463 NCOBMC_KERNELS ncobmc_kernels[ADAPT_OVERLAP_BLOCKS][ALL_NCOBMC_MODES];
464#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700465} AV1_COMMON;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700466
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +0100467#if CONFIG_REFERENCE_BUFFER
468/* Initial version of sequence header structure */
469typedef struct SequenceHeader {
470 int frame_id_numbers_present_flag;
471 int frame_id_length_minus7;
472 int delta_frame_id_length_minus2;
473} SequenceHeader;
474#endif
475
Yaowu Xuc27fc142016-08-22 16:08:15 -0700476// TODO(hkuang): Don't need to lock the whole pool after implementing atomic
477// frame reference count.
478static void lock_buffer_pool(BufferPool *const pool) {
479#if CONFIG_MULTITHREAD
480 pthread_mutex_lock(&pool->pool_mutex);
481#else
482 (void)pool;
483#endif
484}
485
486static void unlock_buffer_pool(BufferPool *const pool) {
487#if CONFIG_MULTITHREAD
488 pthread_mutex_unlock(&pool->pool_mutex);
489#else
490 (void)pool;
491#endif
492}
493
Yaowu Xuf883b422016-08-30 14:01:10 -0700494static INLINE YV12_BUFFER_CONFIG *get_ref_frame(AV1_COMMON *cm, int index) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700495 if (index < 0 || index >= REF_FRAMES) return NULL;
496 if (cm->ref_frame_map[index] < 0) return NULL;
497 assert(cm->ref_frame_map[index] < FRAME_BUFFERS);
498 return &cm->buffer_pool->frame_bufs[cm->ref_frame_map[index]].buf;
499}
500
501static INLINE YV12_BUFFER_CONFIG *get_frame_new_buffer(
Yaowu Xuf883b422016-08-30 14:01:10 -0700502 const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700503 return &cm->buffer_pool->frame_bufs[cm->new_fb_idx].buf;
504}
505
Yaowu Xuf883b422016-08-30 14:01:10 -0700506static INLINE int get_free_fb(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700507 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
508 int i;
509
510 lock_buffer_pool(cm->buffer_pool);
511 for (i = 0; i < FRAME_BUFFERS; ++i)
512 if (frame_bufs[i].ref_count == 0) break;
513
514 if (i != FRAME_BUFFERS) {
515 frame_bufs[i].ref_count = 1;
516 } else {
517 // Reset i to be INVALID_IDX to indicate no free buffer found.
518 i = INVALID_IDX;
519 }
520
521 unlock_buffer_pool(cm->buffer_pool);
522 return i;
523}
524
525static INLINE void ref_cnt_fb(RefCntBuffer *bufs, int *idx, int new_idx) {
526 const int ref_index = *idx;
527
528 if (ref_index >= 0 && bufs[ref_index].ref_count > 0)
529 bufs[ref_index].ref_count--;
530
531 *idx = new_idx;
532
533 bufs[new_idx].ref_count++;
534}
535
Rupert Swarbrick1f990a62017-07-11 11:09:33 +0100536#if CONFIG_TEMPMV_SIGNALING
537// Returns 1 if this frame might use mvs from some previous frame. This
538// function doesn't consider whether prev_frame is actually suitable (see
539// frame_can_use_prev_frame_mvs for that)
540static INLINE int frame_might_use_prev_frame_mvs(const AV1_COMMON *cm) {
541 return !cm->error_resilient_mode && !cm->intra_only;
542}
543
544// Returns 1 if this frame really can use MVs from some previous frame.
545static INLINE int frame_can_use_prev_frame_mvs(const AV1_COMMON *cm) {
546 return (frame_might_use_prev_frame_mvs(cm) && cm->last_show_frame &&
547 cm->prev_frame && !cm->prev_frame->intra_only &&
548 cm->width == cm->prev_frame->width &&
549 cm->height == cm->prev_frame->height);
550}
551#endif
552
553static INLINE void ensure_mv_buffer(RefCntBuffer *buf, AV1_COMMON *cm) {
554 if (buf->mvs == NULL || buf->mi_rows < cm->mi_rows ||
555 buf->mi_cols < cm->mi_cols) {
556 aom_free(buf->mvs);
557 buf->mi_rows = cm->mi_rows;
558 buf->mi_cols = cm->mi_cols;
559 CHECK_MEM_ERROR(
560 cm, buf->mvs,
561 (MV_REF *)aom_calloc(cm->mi_rows * cm->mi_cols, sizeof(*buf->mvs)));
562 }
563}
564
Zoe Liu7b1ec7a2017-05-24 22:28:24 -0700565#if CONFIG_VAR_REFS
566#define LAST_IS_VALID(cm) ((cm)->frame_refs[LAST_FRAME - 1].is_valid)
567#define LAST2_IS_VALID(cm) ((cm)->frame_refs[LAST2_FRAME - 1].is_valid)
568#define LAST3_IS_VALID(cm) ((cm)->frame_refs[LAST3_FRAME - 1].is_valid)
569#define GOLDEN_IS_VALID(cm) ((cm)->frame_refs[GOLDEN_FRAME - 1].is_valid)
570#define BWDREF_IS_VALID(cm) ((cm)->frame_refs[BWDREF_FRAME - 1].is_valid)
Zoe Liu97ad0582017-02-09 10:51:00 -0800571#if CONFIG_ALTREF2
572#define ALTREF2_IS_VALID(cm) ((cm)->frame_refs[ALTREF2_FRAME - 1].is_valid)
573#endif // CONFIG_ALTREF2
Zoe Liu7b1ec7a2017-05-24 22:28:24 -0700574#define ALTREF_IS_VALID(cm) ((cm)->frame_refs[ALTREF_FRAME - 1].is_valid)
575
576#define L_OR_L2(cm) (LAST_IS_VALID(cm) || LAST2_IS_VALID(cm))
577#define L_AND_L2(cm) (LAST_IS_VALID(cm) && LAST2_IS_VALID(cm))
Zoe Liufcf5fa22017-06-26 16:00:38 -0700578#define L_AND_L3(cm) (LAST_IS_VALID(cm) && LAST3_IS_VALID(cm))
579#define L_AND_G(cm) (LAST_IS_VALID(cm) && GOLDEN_IS_VALID(cm))
Zoe Liu7b1ec7a2017-05-24 22:28:24 -0700580
581#define L3_OR_G(cm) (LAST3_IS_VALID(cm) || GOLDEN_IS_VALID(cm))
582#define L3_AND_G(cm) (LAST3_IS_VALID(cm) && GOLDEN_IS_VALID(cm))
583
Zoe Liu97ad0582017-02-09 10:51:00 -0800584#if CONFIG_ALTREF2
585#define BWD_OR_ALT2(cm) (BWDREF_IS_VALID(cm) || ALTREF2_IS_VALID(cm))
586#define BWD_AND_ALT2(cm) (BWDREF_IS_VALID(cm) && ALTREF2_IS_VALID(cm))
587#endif // CONFIG_ALTREF2
Zoe Liu7b1ec7a2017-05-24 22:28:24 -0700588#define BWD_OR_ALT(cm) (BWDREF_IS_VALID(cm) || ALTREF_IS_VALID(cm))
589#define BWD_AND_ALT(cm) (BWDREF_IS_VALID(cm) && ALTREF_IS_VALID(cm))
590#endif // CONFIG_VAR_REFS
591
Yaowu Xuf883b422016-08-30 14:01:10 -0700592static INLINE int mi_cols_aligned_to_sb(const AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700593 return ALIGN_POWER_OF_TWO(cm->mi_cols, cm->mib_size_log2);
594}
595
Yaowu Xuf883b422016-08-30 14:01:10 -0700596static INLINE int mi_rows_aligned_to_sb(const AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700597 return ALIGN_POWER_OF_TWO(cm->mi_rows, cm->mib_size_log2);
598}
599
Yaowu Xuf883b422016-08-30 14:01:10 -0700600static INLINE int frame_is_intra_only(const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700601 return cm->frame_type == KEY_FRAME || cm->intra_only;
602}
603
Yaowu Xuf883b422016-08-30 14:01:10 -0700604static INLINE void av1_init_macroblockd(AV1_COMMON *cm, MACROBLOCKD *xd,
Yushin Cho77bba8d2016-11-04 16:36:56 -0700605#if CONFIG_PVQ
606 tran_low_t *pvq_ref_coeff,
607#endif
Luc Trudeauf8164152017-04-11 16:20:51 -0400608#if CONFIG_CFL
609 CFL_CTX *cfl,
610#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700611 tran_low_t *dqcoeff) {
Luc Trudeaue2ac9852017-06-05 16:20:01 -0400612 for (int i = 0; i < MAX_MB_PLANE; ++i) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700613 xd->plane[i].dqcoeff = dqcoeff;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700614#if CONFIG_PVQ
615 xd->plane[i].pvq_ref_coeff = pvq_ref_coeff;
616#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700617 xd->above_context[i] = cm->above_context[i];
618 if (xd->plane[i].plane_type == PLANE_TYPE_Y) {
619 memcpy(xd->plane[i].seg_dequant, cm->y_dequant, sizeof(cm->y_dequant));
620#if CONFIG_AOM_QM
621 memcpy(xd->plane[i].seg_iqmatrix, cm->y_iqmatrix, sizeof(cm->y_iqmatrix));
622#endif
623
624#if CONFIG_NEW_QUANT
625 memcpy(xd->plane[i].seg_dequant_nuq, cm->y_dequant_nuq,
626 sizeof(cm->y_dequant_nuq));
627#endif
628 } else {
629 memcpy(xd->plane[i].seg_dequant, cm->uv_dequant, sizeof(cm->uv_dequant));
630#if CONFIG_AOM_QM
631 memcpy(xd->plane[i].seg_iqmatrix, cm->uv_iqmatrix,
632 sizeof(cm->uv_iqmatrix));
633#endif
634#if CONFIG_NEW_QUANT
635 memcpy(xd->plane[i].seg_dequant_nuq, cm->uv_dequant_nuq,
636 sizeof(cm->uv_dequant_nuq));
637#endif
638 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700639 }
Luc Trudeaue2ac9852017-06-05 16:20:01 -0400640 xd->fc = cm->fc;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700641 xd->above_seg_context = cm->above_seg_context;
642#if CONFIG_VAR_TX
643 xd->above_txfm_context = cm->above_txfm_context;
644#endif
Luc Trudeauf817e532017-06-05 16:07:16 -0400645#if CONFIG_CFL
646 cfl_init(cfl, cm);
647 xd->cfl = cfl;
648#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700649 xd->mi_stride = cm->mi_stride;
650 xd->error_info = &cm->error;
651}
652
653static INLINE void set_skip_context(MACROBLOCKD *xd, int mi_row, int mi_col) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700654 int i;
Jingning Hanf65b89a2017-05-23 16:10:50 -0700655 int row_offset = mi_row;
656 int col_offset = mi_col;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700657 for (i = 0; i < MAX_MB_PLANE; ++i) {
658 struct macroblockd_plane *const pd = &xd->plane[i];
Jingning Han91d9a792017-04-18 12:01:52 -0700659#if CONFIG_CHROMA_SUB8X8
660 if (xd->mi[0]->mbmi.sb_type < BLOCK_8X8) {
661 // Offset the buffer pointer
Jingning Hanf65b89a2017-05-23 16:10:50 -0700662 if (pd->subsampling_y && (mi_row & 0x01)) row_offset = mi_row - 1;
663 if (pd->subsampling_x && (mi_col & 0x01)) col_offset = mi_col - 1;
Jingning Han91d9a792017-04-18 12:01:52 -0700664 }
665#endif
Jingning Hanf65b89a2017-05-23 16:10:50 -0700666 int above_idx = col_offset << (MI_SIZE_LOG2 - tx_size_wide_log2[0]);
667 int left_idx = (row_offset & MAX_MIB_MASK)
Timothy B. Terriberry5e816432017-05-05 13:58:32 -0700668 << (MI_SIZE_LOG2 - tx_size_high_log2[0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700669 pd->above_context = &xd->above_context[i][above_idx >> pd->subsampling_x];
670 pd->left_context = &xd->left_context[i][left_idx >> pd->subsampling_y];
671 }
672}
673
674static INLINE int calc_mi_size(int len) {
675 // len is in mi units.
676 return len + MAX_MIB_SIZE;
677}
678
Jingning Hanfaad0e12016-12-07 10:54:57 -0800679static INLINE void set_plane_n4(MACROBLOCKD *const xd, int bw, int bh) {
Jingning Hana6923f72016-07-15 08:50:14 -0700680 int i;
681 for (i = 0; i < MAX_MB_PLANE; i++) {
682 xd->plane[i].n4_w = (bw << 1) >> xd->plane[i].subsampling_x;
683 xd->plane[i].n4_h = (bh << 1) >> xd->plane[i].subsampling_y;
Jingning Hanfaad0e12016-12-07 10:54:57 -0800684
685 xd->plane[i].width = (bw * MI_SIZE) >> xd->plane[i].subsampling_x;
686 xd->plane[i].height = (bh * MI_SIZE) >> xd->plane[i].subsampling_y;
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800687
Jingning Han31b6a4f2017-02-23 11:05:53 -0800688#if !CONFIG_CHROMA_2X2
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800689 xd->plane[i].width = AOMMAX(xd->plane[i].width, 4);
690 xd->plane[i].height = AOMMAX(xd->plane[i].height, 4);
Jingning Han31b6a4f2017-02-23 11:05:53 -0800691#endif
Jingning Hana6923f72016-07-15 08:50:14 -0700692 }
693}
694
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700695static INLINE void set_mi_row_col(MACROBLOCKD *xd, const TileInfo *const tile,
696 int mi_row, int bh, int mi_col, int bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800697#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700698 int dependent_horz_tile_flag,
699#endif // CONFIG_DEPENDENT_HORZTILES
Yaowu Xuc27fc142016-08-22 16:08:15 -0700700 int mi_rows, int mi_cols) {
701 xd->mb_to_top_edge = -((mi_row * MI_SIZE) * 8);
702 xd->mb_to_bottom_edge = ((mi_rows - bh - mi_row) * MI_SIZE) * 8;
703 xd->mb_to_left_edge = -((mi_col * MI_SIZE) * 8);
704 xd->mb_to_right_edge = ((mi_cols - bw - mi_col) * MI_SIZE) * 8;
705
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700706#if CONFIG_DEPENDENT_HORZTILES
707 if (dependent_horz_tile_flag) {
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700708 xd->up_available = (mi_row > tile->mi_row_start) || !tile->tg_horz_boundary;
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700709 } else {
710#endif // CONFIG_DEPENDENT_HORZTILES
711 // Are edges available for intra prediction?
712 xd->up_available = (mi_row > tile->mi_row_start);
713#if CONFIG_DEPENDENT_HORZTILES
714 }
715#endif // CONFIG_DEPENDENT_HORZTILES
716
Yaowu Xuc27fc142016-08-22 16:08:15 -0700717 xd->left_available = (mi_col > tile->mi_col_start);
Jingning Han3da18d62017-05-02 12:43:58 -0700718#if CONFIG_CHROMA_SUB8X8
719 xd->chroma_up_available = xd->up_available;
720 xd->chroma_left_available = xd->left_available;
721 if (xd->plane[1].subsampling_x && bw < mi_size_wide[BLOCK_8X8])
722 xd->chroma_left_available = (mi_col - 1) > tile->mi_col_start;
723 if (xd->plane[1].subsampling_y && bh < mi_size_high[BLOCK_8X8])
724 xd->chroma_up_available = (mi_row - 1) > tile->mi_row_start;
725#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700726 if (xd->up_available) {
727 xd->above_mi = xd->mi[-xd->mi_stride];
728 // above_mi may be NULL in encoder's first pass.
729 xd->above_mbmi = xd->above_mi ? &xd->above_mi->mbmi : NULL;
730 } else {
731 xd->above_mi = NULL;
732 xd->above_mbmi = NULL;
733 }
734
735 if (xd->left_available) {
736 xd->left_mi = xd->mi[-1];
737 // left_mi may be NULL in encoder's first pass.
738 xd->left_mbmi = xd->left_mi ? &xd->left_mi->mbmi : NULL;
739 } else {
740 xd->left_mi = NULL;
741 xd->left_mbmi = NULL;
742 }
743
744 xd->n8_h = bh;
745 xd->n8_w = bw;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700746 xd->is_sec_rect = 0;
747 if (xd->n8_w < xd->n8_h)
748 if (mi_col & (xd->n8_h - 1)) xd->is_sec_rect = 1;
749
750 if (xd->n8_w > xd->n8_h)
751 if (mi_row & (xd->n8_w - 1)) xd->is_sec_rect = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700752}
753
Yaowu Xuf883b422016-08-30 14:01:10 -0700754static INLINE const aom_prob *get_y_mode_probs(const AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700755 const MODE_INFO *mi,
756 const MODE_INFO *above_mi,
757 const MODE_INFO *left_mi,
758 int block) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700759 const PREDICTION_MODE above = av1_above_block_mode(mi, above_mi, block);
760 const PREDICTION_MODE left = av1_left_block_mode(mi, left_mi, block);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700761 return cm->kf_y_prob[above][left];
762}
763
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000764static INLINE aom_cdf_prob *get_y_mode_cdf(FRAME_CONTEXT *tile_ctx,
765 const MODE_INFO *mi,
Thomas9ac55082016-09-23 18:04:17 +0100766 const MODE_INFO *above_mi,
767 const MODE_INFO *left_mi,
768 int block) {
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400769 const PREDICTION_MODE above = av1_above_block_mode(mi, above_mi, block);
770 const PREDICTION_MODE left = av1_left_block_mode(mi, left_mi, block);
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000771 return tile_ctx->kf_y_cdf[above][left];
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400772}
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400773
Yaowu Xuc27fc142016-08-22 16:08:15 -0700774static INLINE void update_partition_context(MACROBLOCKD *xd, int mi_row,
775 int mi_col, BLOCK_SIZE subsize,
776 BLOCK_SIZE bsize) {
777 PARTITION_CONTEXT *const above_ctx = xd->above_seg_context + mi_col;
778 PARTITION_CONTEXT *const left_ctx =
779 xd->left_seg_context + (mi_row & MAX_MIB_MASK);
780
781#if CONFIG_EXT_PARTITION_TYPES
Jingning Hanc709e1f2016-12-06 14:48:09 -0800782 const int bw = mi_size_wide[bsize];
783 const int bh = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700784 memset(above_ctx, partition_context_lookup[subsize].above, bw);
785 memset(left_ctx, partition_context_lookup[subsize].left, bh);
786#else
787 // num_4x4_blocks_wide_lookup[bsize] / 2
Jingning Hanc709e1f2016-12-06 14:48:09 -0800788 const int bs = mi_size_wide[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700789
790 // update the partition context at the end notes. set partition bits
791 // of block sizes larger than the current one to be one, and partition
792 // bits of smaller block sizes to be zero.
793 memset(above_ctx, partition_context_lookup[subsize].above, bs);
794 memset(left_ctx, partition_context_lookup[subsize].left, bs);
795#endif // CONFIG_EXT_PARTITION_TYPES
796}
797
Jingning Han18c53c82017-02-17 14:49:57 -0800798#if CONFIG_CB4X4
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700799static INLINE int is_chroma_reference(int mi_row, int mi_col, BLOCK_SIZE bsize,
800 int subsampling_x, int subsampling_y) {
Jingning Han31b6a4f2017-02-23 11:05:53 -0800801#if CONFIG_CHROMA_2X2
802 return 1;
803#endif
Jingning Hane2db3872017-04-18 11:50:53 -0700804
805#if CONFIG_CHROMA_SUB8X8
806 const int bw = mi_size_wide[bsize];
807 const int bh = mi_size_high[bsize];
808
809 int ref_pos = ((mi_row & 0x01) || !(bh & 0x01) || !subsampling_y) &&
810 ((mi_col & 0x01) || !(bw & 0x01) || !subsampling_x);
811
812 return ref_pos;
813#else
Jingning Hand3a64432017-04-06 17:04:17 -0700814 int ref_pos = !(((mi_row & 0x01) && subsampling_y) ||
815 ((mi_col & 0x01) && subsampling_x));
816
817 if (bsize >= BLOCK_8X8) ref_pos = 1;
818
819 return ref_pos;
Jingning Hane2db3872017-04-18 11:50:53 -0700820#endif
Jingning Han18c53c82017-02-17 14:49:57 -0800821}
Jingning Han2d2dac22017-04-11 09:41:10 -0700822
Yue Chen8e689e42017-06-02 10:56:10 -0700823#if CONFIG_SUPERTX
824static INLINE int need_handle_chroma_sub8x8(BLOCK_SIZE bsize, int subsampling_x,
825 int subsampling_y) {
826 const int bw = mi_size_wide[bsize];
827 const int bh = mi_size_high[bsize];
828
829 if (bsize >= BLOCK_8X8 ||
830 ((!(bh & 0x01) || !subsampling_y) && (!(bw & 0x01) || !subsampling_x)))
831 return 0;
832 else
833 return 1;
834}
835#endif
836
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700837static INLINE BLOCK_SIZE scale_chroma_bsize(BLOCK_SIZE bsize, int subsampling_x,
838 int subsampling_y) {
Jingning Han2d2dac22017-04-11 09:41:10 -0700839 BLOCK_SIZE bs = bsize;
840
841 if (bs < BLOCK_8X8) {
842 if (subsampling_x == 1 && subsampling_y == 1)
843 bs = BLOCK_8X8;
844 else if (subsampling_x == 1)
845 bs = BLOCK_8X4;
846 else if (subsampling_y == 1)
847 bs = BLOCK_4X8;
848 }
849
850 return bs;
851}
Jingning Han18c53c82017-02-17 14:49:57 -0800852#endif
853
Yaowu Xuc27fc142016-08-22 16:08:15 -0700854#if CONFIG_EXT_PARTITION_TYPES
855static INLINE void update_ext_partition_context(MACROBLOCKD *xd, int mi_row,
856 int mi_col, BLOCK_SIZE subsize,
857 BLOCK_SIZE bsize,
858 PARTITION_TYPE partition) {
859 if (bsize >= BLOCK_8X8) {
Jingning Han456e0862017-01-19 16:04:26 -0800860 const int hbs = mi_size_wide[bsize] / 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700861 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
862 switch (partition) {
863 case PARTITION_SPLIT:
864 if (bsize != BLOCK_8X8) break;
865 case PARTITION_NONE:
866 case PARTITION_HORZ:
867 case PARTITION_VERT:
Rupert Swarbrick93c39e92017-07-12 11:11:02 +0100868 case PARTITION_HORZ_4:
869 case PARTITION_VERT_4:
Yaowu Xuc27fc142016-08-22 16:08:15 -0700870 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
871 break;
872 case PARTITION_HORZ_A:
873 update_partition_context(xd, mi_row, mi_col, bsize2, subsize);
874 update_partition_context(xd, mi_row + hbs, mi_col, subsize, subsize);
875 break;
876 case PARTITION_HORZ_B:
877 update_partition_context(xd, mi_row, mi_col, subsize, subsize);
878 update_partition_context(xd, mi_row + hbs, mi_col, bsize2, subsize);
879 break;
880 case PARTITION_VERT_A:
881 update_partition_context(xd, mi_row, mi_col, bsize2, subsize);
882 update_partition_context(xd, mi_row, mi_col + hbs, subsize, subsize);
883 break;
884 case PARTITION_VERT_B:
885 update_partition_context(xd, mi_row, mi_col, subsize, subsize);
886 update_partition_context(xd, mi_row, mi_col + hbs, bsize2, subsize);
887 break;
888 default: assert(0 && "Invalid partition type");
889 }
890 }
891}
892#endif // CONFIG_EXT_PARTITION_TYPES
893
894static INLINE int partition_plane_context(const MACROBLOCKD *xd, int mi_row,
Alex Converse55c6bde2017-01-12 15:55:31 -0800895 int mi_col,
896#if CONFIG_UNPOISON_PARTITION_CTX
897 int has_rows, int has_cols,
898#endif
899 BLOCK_SIZE bsize) {
900#if CONFIG_UNPOISON_PARTITION_CTX
901 const PARTITION_CONTEXT *above_ctx = xd->above_seg_context + mi_col;
902 const PARTITION_CONTEXT *left_ctx =
903 xd->left_seg_context + (mi_row & MAX_MIB_MASK);
904 // Minimum partition point is 8x8. Offset the bsl accordingly.
905 const int bsl = mi_width_log2_lookup[bsize] - mi_width_log2_lookup[BLOCK_8X8];
906 int above = (*above_ctx >> bsl) & 1, left = (*left_ctx >> bsl) & 1;
907
908 assert(b_width_log2_lookup[bsize] == b_height_log2_lookup[bsize]);
909 assert(bsl >= 0);
910
911 if (has_rows && has_cols)
912 return (left * 2 + above) + bsl * PARTITION_PLOFFSET;
913 else if (has_rows && !has_cols)
914 return PARTITION_CONTEXTS_PRIMARY + bsl;
915 else if (!has_rows && has_cols)
916 return PARTITION_CONTEXTS_PRIMARY + PARTITION_BLOCK_SIZES + bsl;
917 else
Alex Converse2b9d19d2017-04-03 11:11:17 -0700918 return PARTITION_CONTEXTS; // Bogus context, forced SPLIT
Alex Converse55c6bde2017-01-12 15:55:31 -0800919#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700920 const PARTITION_CONTEXT *above_ctx = xd->above_seg_context + mi_col;
921 const PARTITION_CONTEXT *left_ctx =
922 xd->left_seg_context + (mi_row & MAX_MIB_MASK);
Jingning Hanbcf62ea2016-12-08 16:08:38 -0800923 // Minimum partition point is 8x8. Offset the bsl accordingly.
924 const int bsl = mi_width_log2_lookup[bsize] - mi_width_log2_lookup[BLOCK_8X8];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700925 int above = (*above_ctx >> bsl) & 1, left = (*left_ctx >> bsl) & 1;
926
927 assert(b_width_log2_lookup[bsize] == b_height_log2_lookup[bsize]);
928 assert(bsl >= 0);
929
930 return (left * 2 + above) + bsl * PARTITION_PLOFFSET;
Alex Converse55c6bde2017-01-12 15:55:31 -0800931#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700932}
933
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700934static INLINE int max_block_wide(const MACROBLOCKD *xd, BLOCK_SIZE bsize,
935 int plane) {
Jingning Hanf65b8702016-10-31 12:13:20 -0700936 int max_blocks_wide = block_size_wide[bsize];
937 const struct macroblockd_plane *const pd = &xd->plane[plane];
938
939 if (xd->mb_to_right_edge < 0)
940 max_blocks_wide += xd->mb_to_right_edge >> (3 + pd->subsampling_x);
941
942 // Scale the width in the transform block unit.
943 return max_blocks_wide >> tx_size_wide_log2[0];
944}
945
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700946static INLINE int max_block_high(const MACROBLOCKD *xd, BLOCK_SIZE bsize,
947 int plane) {
Jingning Hanf65b8702016-10-31 12:13:20 -0700948 int max_blocks_high = block_size_high[bsize];
949 const struct macroblockd_plane *const pd = &xd->plane[plane];
950
951 if (xd->mb_to_bottom_edge < 0)
952 max_blocks_high += xd->mb_to_bottom_edge >> (3 + pd->subsampling_y);
953
954 // Scale the width in the transform block unit.
955 return max_blocks_high >> tx_size_wide_log2[0];
956}
957
Luc Trudeau3e18e4a2017-06-13 13:54:14 -0400958#if CONFIG_CFL
959static INLINE int max_intra_block_width(const MACROBLOCKD *xd,
960 BLOCK_SIZE plane_bsize, int plane,
961 TX_SIZE tx_size) {
962 const int max_blocks_wide = max_block_wide(xd, plane_bsize, plane)
963 << tx_size_wide_log2[0];
964 return ALIGN_POWER_OF_TWO(max_blocks_wide, tx_size_wide_log2[tx_size]);
965}
966
967static INLINE int max_intra_block_height(const MACROBLOCKD *xd,
968 BLOCK_SIZE plane_bsize, int plane,
969 TX_SIZE tx_size) {
970 const int max_blocks_high = max_block_high(xd, plane_bsize, plane)
971 << tx_size_high_log2[0];
972 return ALIGN_POWER_OF_TWO(max_blocks_high, tx_size_high_log2[tx_size]);
973}
974#endif // CONFIG_CFL
975
Yaowu Xuf883b422016-08-30 14:01:10 -0700976static INLINE void av1_zero_above_context(AV1_COMMON *const cm,
977 int mi_col_start, int mi_col_end) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700978 const int width = mi_col_end - mi_col_start;
David Barkerd706ed22017-05-02 14:16:01 +0100979 const int aligned_width = ALIGN_POWER_OF_TWO(width, cm->mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700980
Timothy B. Terriberry5e816432017-05-05 13:58:32 -0700981 const int offset_y = mi_col_start << (MI_SIZE_LOG2 - tx_size_wide_log2[0]);
Fergus Simpson8c70d912017-05-24 13:05:33 -0700982 const int width_y = aligned_width << (MI_SIZE_LOG2 - tx_size_wide_log2[0]);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700983 const int offset_uv = offset_y >> cm->subsampling_x;
984 const int width_uv = width_y >> cm->subsampling_x;
985
Yaowu Xuf883b422016-08-30 14:01:10 -0700986 av1_zero_array(cm->above_context[0] + offset_y, width_y);
987 av1_zero_array(cm->above_context[1] + offset_uv, width_uv);
988 av1_zero_array(cm->above_context[2] + offset_uv, width_uv);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700989
David Barkerd706ed22017-05-02 14:16:01 +0100990 av1_zero_array(cm->above_seg_context + mi_col_start, aligned_width);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700991
992#if CONFIG_VAR_TX
Jingning Han331662e2017-05-30 17:03:32 -0700993 av1_zero_array(cm->above_txfm_context + (mi_col_start << TX_UNIT_WIDE_LOG2),
994 aligned_width << TX_UNIT_WIDE_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700995#endif // CONFIG_VAR_TX
996}
997
Yaowu Xuf883b422016-08-30 14:01:10 -0700998static INLINE void av1_zero_left_context(MACROBLOCKD *const xd) {
999 av1_zero(xd->left_context);
1000 av1_zero(xd->left_seg_context);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001001#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -07001002 av1_zero(xd->left_txfm_context_buffer);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001003#endif
1004}
1005
James Zern5d61b602017-06-19 18:07:37 -07001006// Disable array-bounds checks as the TX_SIZE enum contains values larger than
1007// TX_SIZES_ALL (TX_INVALID) which make extending the array as a workaround
1008// infeasible. The assert is enough for static analysis and this or other tools
1009// asan, valgrind would catch oob access at runtime.
1010#if defined(__GNUC__) && __GNUC__ >= 4
1011#pragma GCC diagnostic ignored "-Warray-bounds"
1012#endif
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001013static INLINE TX_SIZE get_min_tx_size(TX_SIZE tx_size) {
Urvang Joshiaffbe5e2017-05-31 16:28:57 -07001014 assert(tx_size < TX_SIZES_ALL);
Jingning Hane67b38a2016-11-04 10:30:00 -07001015 return txsize_sqr_map[tx_size];
1016}
James Zern5d61b602017-06-19 18:07:37 -07001017#if defined(__GNUC__) && __GNUC__ >= 4
1018#pragma GCC diagnostic warning "-Warray-bounds"
1019#endif
Jingning Hane67b38a2016-11-04 10:30:00 -07001020
Jingning Han243b66b2017-06-23 12:11:47 -07001021#if CONFIG_VAR_TX
Jingning Han8b9478a2016-11-01 15:43:23 -07001022static INLINE void set_txfm_ctx(TXFM_CONTEXT *txfm_ctx, uint8_t txs, int len) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001023 int i;
Jingning Han8b9478a2016-11-01 15:43:23 -07001024 for (i = 0; i < len; ++i) txfm_ctx[i] = txs;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001025}
1026
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001027static INLINE void set_txfm_ctxs(TX_SIZE tx_size, int n8_w, int n8_h, int skip,
1028 const MACROBLOCKD *xd) {
Jingning Han8b9478a2016-11-01 15:43:23 -07001029 uint8_t bw = tx_size_wide[tx_size];
1030 uint8_t bh = tx_size_high[tx_size];
Jingning Han1b1dc932016-11-09 10:55:30 -08001031
1032 if (skip) {
Jingning Hanff6ee6a2016-12-07 09:55:21 -08001033 bw = n8_w * MI_SIZE;
1034 bh = n8_h * MI_SIZE;
Jingning Han1b1dc932016-11-09 10:55:30 -08001035 }
1036
Jingning Han331662e2017-05-30 17:03:32 -07001037 set_txfm_ctx(xd->above_txfm_context, bw, n8_w << TX_UNIT_WIDE_LOG2);
1038 set_txfm_ctx(xd->left_txfm_context, bh, n8_h << TX_UNIT_HIGH_LOG2);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001039}
1040
1041static INLINE void txfm_partition_update(TXFM_CONTEXT *above_ctx,
1042 TXFM_CONTEXT *left_ctx,
Jingning Han581d1692017-01-05 16:03:54 -08001043 TX_SIZE tx_size, TX_SIZE txb_size) {
1044 BLOCK_SIZE bsize = txsize_to_bsize[txb_size];
Jingning Han331662e2017-05-30 17:03:32 -07001045 int bh = mi_size_high[bsize] << TX_UNIT_HIGH_LOG2;
1046 int bw = mi_size_wide[bsize] << TX_UNIT_WIDE_LOG2;
Jingning Han8b9478a2016-11-01 15:43:23 -07001047 uint8_t txw = tx_size_wide[tx_size];
1048 uint8_t txh = tx_size_high[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001049 int i;
Jingning Han8b9478a2016-11-01 15:43:23 -07001050 for (i = 0; i < bh; ++i) left_ctx[i] = txh;
1051 for (i = 0; i < bw; ++i) above_ctx[i] = txw;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001052}
1053
Jingning Han6e4955d2017-05-30 22:54:48 -07001054static INLINE TX_SIZE get_sqr_tx_size(int tx_dim) {
1055 TX_SIZE tx_size;
1056 switch (tx_dim) {
1057#if CONFIG_EXT_PARTITION
1058 case 128:
1059#endif
1060 case 64:
1061 case 32: tx_size = TX_32X32; break;
1062 case 16: tx_size = TX_16X16; break;
1063 case 8: tx_size = TX_8X8; break;
1064 default: tx_size = TX_4X4;
1065 }
1066 return tx_size;
1067}
1068
Yaowu Xuc27fc142016-08-22 16:08:15 -07001069static INLINE int txfm_partition_context(TXFM_CONTEXT *above_ctx,
1070 TXFM_CONTEXT *left_ctx,
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001071 BLOCK_SIZE bsize, TX_SIZE tx_size) {
Jingning Han8b9478a2016-11-01 15:43:23 -07001072 const uint8_t txw = tx_size_wide[tx_size];
1073 const uint8_t txh = tx_size_high[tx_size];
1074 const int above = *above_ctx < txw;
1075 const int left = *left_ctx < txh;
Debargha Mukherjee153e1f82016-11-17 09:59:14 -08001076 int category = TXFM_PARTITION_CONTEXTS - 1;
Jingning Han607fa6a2016-10-26 10:46:28 -07001077
Jingning Han0c70a802017-01-10 15:29:45 -08001078 // dummy return, not used by others.
1079 if (tx_size <= TX_4X4) return 0;
1080
Jingning Han6e4955d2017-05-30 22:54:48 -07001081 TX_SIZE max_tx_size =
1082 get_sqr_tx_size(AOMMAX(block_size_wide[bsize], block_size_high[bsize]));
Jingning Han0c70a802017-01-10 15:29:45 -08001083
Debargha Mukherjee153e1f82016-11-17 09:59:14 -08001084 if (max_tx_size >= TX_8X8) {
1085 category = (tx_size != max_tx_size && max_tx_size > TX_8X8) +
Debargha Mukherjee932cf692016-11-18 08:14:10 -08001086 (TX_SIZES - 1 - max_tx_size) * 2;
Jingning Hanc8b89362016-11-01 10:28:53 -07001087 }
Debargha Mukherjee153e1f82016-11-17 09:59:14 -08001088 if (category == TXFM_PARTITION_CONTEXTS - 1) return category;
Jingning Hanc8b89362016-11-01 10:28:53 -07001089 return category * 3 + above + left;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001090}
1091#endif
1092
Yaowu Xuf883b422016-08-30 14:01:10 -07001093static INLINE PARTITION_TYPE get_partition(const AV1_COMMON *const cm,
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001094 int mi_row, int mi_col,
1095 BLOCK_SIZE bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001096 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) {
1097 return PARTITION_INVALID;
1098 } else {
1099 const int offset = mi_row * cm->mi_stride + mi_col;
1100 MODE_INFO **mi = cm->mi_grid_visible + offset;
1101 const MB_MODE_INFO *const mbmi = &mi[0]->mbmi;
1102 const int bsl = b_width_log2_lookup[bsize];
1103 const PARTITION_TYPE partition = partition_lookup[bsl][mbmi->sb_type];
1104#if !CONFIG_EXT_PARTITION_TYPES
1105 return partition;
1106#else
Jingning Hanc709e1f2016-12-06 14:48:09 -08001107 const int hbs = mi_size_wide[bsize] / 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001108
1109 assert(cm->mi_grid_visible[offset] == &cm->mi[offset]);
1110
Rupert Swarbrick93c39e92017-07-12 11:11:02 +01001111 if (partition == PARTITION_HORZ_4 || partition == PARTITION_VERT_4)
1112 return partition;
1113
Yaowu Xuc27fc142016-08-22 16:08:15 -07001114 if (partition != PARTITION_NONE && bsize > BLOCK_8X8 &&
1115 mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols) {
1116 const BLOCK_SIZE h = get_subsize(bsize, PARTITION_HORZ_A);
1117 const BLOCK_SIZE v = get_subsize(bsize, PARTITION_VERT_A);
1118 const MB_MODE_INFO *const mbmi_right = &mi[hbs]->mbmi;
1119 const MB_MODE_INFO *const mbmi_below = &mi[hbs * cm->mi_stride]->mbmi;
1120 if (mbmi->sb_type == h) {
1121 return mbmi_below->sb_type == h ? PARTITION_HORZ : PARTITION_HORZ_B;
1122 } else if (mbmi->sb_type == v) {
1123 return mbmi_right->sb_type == v ? PARTITION_VERT : PARTITION_VERT_B;
1124 } else if (mbmi_below->sb_type == h) {
1125 return PARTITION_HORZ_A;
1126 } else if (mbmi_right->sb_type == v) {
1127 return PARTITION_VERT_A;
1128 } else {
1129 return PARTITION_SPLIT;
1130 }
1131 }
1132
1133 return partition;
1134#endif // !CONFIG_EXT_PARTITION_TYPES
1135 }
1136}
1137
Yaowu Xu4ff59b52017-04-24 12:41:56 -07001138static INLINE void set_sb_size(AV1_COMMON *const cm, BLOCK_SIZE sb_size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001139 cm->sb_size = sb_size;
Jingning Hanc709e1f2016-12-06 14:48:09 -08001140 cm->mib_size = mi_size_wide[cm->sb_size];
Jingning Hanbfcd1f12016-12-06 09:09:32 -08001141#if CONFIG_CB4X4
Jingning Hanbfcd1f12016-12-06 09:09:32 -08001142 cm->mib_size_log2 = b_width_log2_lookup[cm->sb_size];
1143#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001144 cm->mib_size_log2 = mi_width_log2_lookup[cm->sb_size];
Jingning Hanbfcd1f12016-12-06 09:09:32 -08001145#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001146}
1147
Thomas Daedef636d5c2017-06-29 13:48:27 -07001148static INLINE int all_lossless(const AV1_COMMON *cm, const MACROBLOCKD *xd) {
1149 int i;
1150 int all_lossless = 1;
1151 if (cm->seg.enabled) {
1152 for (i = 0; i < MAX_SEGMENTS; ++i) {
1153 if (!xd->lossless[i]) {
1154 all_lossless = 0;
1155 break;
1156 }
1157 }
1158 } else {
1159 all_lossless = xd->lossless[0];
1160 }
1161 return all_lossless;
1162}
1163
Yaowu Xuc27fc142016-08-22 16:08:15 -07001164#ifdef __cplusplus
1165} // extern "C"
1166#endif
1167
Yaowu Xuf883b422016-08-30 14:01:10 -07001168#endif // AV1_COMMON_ONYXC_INT_H_