blob: 62191f4437a10e8afaef38ce719f5d01e983abbf [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
Yaowu Xuf883b422016-08-30 14:01:10 -070012#ifndef AV1_COMMON_ONYXC_INT_H_
13#define AV1_COMMON_ONYXC_INT_H_
Yaowu Xuc27fc142016-08-22 16:08:15 -070014
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
Yaowu Xucaf20232016-10-18 17:15:40 -070016#include "./av1_rtcd.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070017#include "aom/internal/aom_codec_internal.h"
18#include "aom_util/aom_thread.h"
Alex Converseeb780e72016-12-13 12:46:41 -080019#if CONFIG_ANS
20#include "aom_dsp/ans.h"
21#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070022#include "av1/common/alloccommon.h"
Tom Finegan17ce8b12017-02-08 12:46:31 -080023#include "av1/common/av1_loopfilter.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070024#include "av1/common/entropy.h"
25#include "av1/common/entropymode.h"
Yaowu Xucaf20232016-10-18 17:15:40 -070026#include "av1/common/entropymv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070027#include "av1/common/frame_buffers.h"
Yaowu Xucaf20232016-10-18 17:15:40 -070028#include "av1/common/mv.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070029#include "av1/common/quant_common.h"
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070030#if CONFIG_LOOP_RESTORATION
Yaowu Xuc27fc142016-08-22 16:08:15 -070031#include "av1/common/restoration.h"
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070032#endif // CONFIG_LOOP_RESTORATION
Yaowu Xucaf20232016-10-18 17:15:40 -070033#include "av1/common/tile_common.h"
Yushin Cho77bba8d2016-11-04 16:36:56 -070034#include "av1/common/odintrin.h"
35#if CONFIG_PVQ
36#include "av1/common/pvq.h"
37#endif
Luc Trudeaubaeb3752017-04-24 11:19:25 -040038#if CONFIG_CFL
39#include "av1/common/cfl.h"
40#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -070041#ifdef __cplusplus
42extern "C" {
43#endif
44
Jean-Marc Valine5759772017-03-23 18:46:38 -040045#define CDEF_MAX_STRENGTHS 16
46
Yaowu Xuc27fc142016-08-22 16:08:15 -070047#define REF_FRAMES_LOG2 3
48#define REF_FRAMES (1 << REF_FRAMES_LOG2)
49
50// 4 scratch frames for the new frames to support a maximum of 4 cores decoding
51// in parallel, 3 for scaled references on the encoder.
52// TODO(hkuang): Add ondemand frame buffers instead of hardcoding the number
53// of framebuffers.
54// TODO(jkoleszar): These 3 extra references could probably come from the
55// normal reference pool.
56#define FRAME_BUFFERS (REF_FRAMES + 7)
57
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +010058#if CONFIG_REFERENCE_BUFFER
59/* Constant values while waiting for the sequence header */
60#define FRAME_ID_NUMBERS_PRESENT_FLAG 1
61#define FRAME_ID_LENGTH_MINUS7 8 // Allows frame id up to 2^15-1
62#define DELTA_FRAME_ID_LENGTH_MINUS2 12 // Allows frame id deltas up to 2^14-1
63#endif
64
Yaowu Xuc27fc142016-08-22 16:08:15 -070065#if CONFIG_EXT_REFS
66#define FRAME_CONTEXTS_LOG2 3
67#else
68#define FRAME_CONTEXTS_LOG2 2
69#endif
70
71#define FRAME_CONTEXTS (1 << FRAME_CONTEXTS_LOG2)
72
73#define NUM_PING_PONG_BUFFERS 2
74
75typedef enum {
76 SINGLE_REFERENCE = 0,
77 COMPOUND_REFERENCE = 1,
78 REFERENCE_MODE_SELECT = 2,
79 REFERENCE_MODES = 3,
80} REFERENCE_MODE;
81
82typedef enum {
83 RESET_FRAME_CONTEXT_NONE = 0,
84 RESET_FRAME_CONTEXT_CURRENT = 1,
85 RESET_FRAME_CONTEXT_ALL = 2,
86} RESET_FRAME_CONTEXT_MODE;
87
88typedef enum {
89 /**
90 * Update frame context to values resulting from forward probability
91 * updates signaled in the frame header
92 */
93 REFRESH_FRAME_CONTEXT_FORWARD,
94 /**
95 * Update frame context to values resulting from backward probability
96 * updates based on entropy/counts in the decoded frame
97 */
98 REFRESH_FRAME_CONTEXT_BACKWARD,
99} REFRESH_FRAME_CONTEXT_MODE;
100
101typedef struct {
102 int_mv mv[2];
Yaowu Xu4306b6e2016-09-27 12:55:32 -0700103 int_mv pred_mv[2];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700104 MV_REFERENCE_FRAME ref_frame[2];
105} MV_REF;
106
107typedef struct {
108 int ref_count;
109 MV_REF *mvs;
110 int mi_rows;
111 int mi_cols;
Sarah Parkerf1783292017-04-05 11:55:27 -0700112#if CONFIG_GLOBAL_MOTION
113 WarpedMotionParams global_motion[TOTAL_REFS_PER_FRAME];
114#endif // CONFIG_GLOBAL_MOTION
Yaowu Xuf883b422016-08-30 14:01:10 -0700115 aom_codec_frame_buffer_t raw_frame_buffer;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700116 YV12_BUFFER_CONFIG buf;
Fangwen Fu8d164de2016-12-14 13:40:54 -0800117#if CONFIG_TEMPMV_SIGNALING
118 uint8_t intra_only;
119#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700120 // The Following variables will only be used in frame parallel decode.
121
122 // frame_worker_owner indicates which FrameWorker owns this buffer. NULL means
123 // that no FrameWorker owns, or is decoding, this buffer.
Yaowu Xuf883b422016-08-30 14:01:10 -0700124 AVxWorker *frame_worker_owner;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700125
126 // row and col indicate which position frame has been decoded to in real
127 // pixel unit. They are reset to -1 when decoding begins and set to INT_MAX
128 // when the frame is fully decoded.
129 int row;
130 int col;
131} RefCntBuffer;
132
133typedef struct BufferPool {
134// Protect BufferPool from being accessed by several FrameWorkers at
135// the same time during frame parallel decode.
136// TODO(hkuang): Try to use atomic variable instead of locking the whole pool.
137#if CONFIG_MULTITHREAD
138 pthread_mutex_t pool_mutex;
139#endif
140
141 // Private data associated with the frame buffer callbacks.
142 void *cb_priv;
143
Yaowu Xuf883b422016-08-30 14:01:10 -0700144 aom_get_frame_buffer_cb_fn_t get_fb_cb;
145 aom_release_frame_buffer_cb_fn_t release_fb_cb;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700146
147 RefCntBuffer frame_bufs[FRAME_BUFFERS];
148
149 // Frame buffers allocated internally by the codec.
150 InternalFrameBufferList int_frame_buffers;
151} BufferPool;
152
Yaowu Xuf883b422016-08-30 14:01:10 -0700153typedef struct AV1Common {
154 struct aom_internal_error_info error;
155 aom_color_space_t color_space;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700156 int color_range;
157 int width;
158 int height;
159 int render_width;
160 int render_height;
161 int last_width;
162 int last_height;
163
Fergus Simpsond91c8c92017-04-07 12:12:00 -0700164#if CONFIG_FRAME_SUPERRES
165 // The numerator of the superres scale, the denominator is fixed
166 uint8_t superres_scale_numerator;
167 int superres_width, superres_height;
168#endif // CONFIG_FRAME_SUPERRES
169
Yaowu Xuc27fc142016-08-22 16:08:15 -0700170 // TODO(jkoleszar): this implies chroma ss right now, but could vary per
171 // plane. Revisit as part of the future change to YV12_BUFFER_CONFIG to
172 // support additional planes.
173 int subsampling_x;
174 int subsampling_y;
175
Sebastien Alaiwan71e87842017-04-12 16:03:28 +0200176#if CONFIG_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700177 // Marks if we need to use 16bit frame buffers (1: yes, 0: no).
178 int use_highbitdepth;
179#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700180 YV12_BUFFER_CONFIG *frame_to_show;
181 RefCntBuffer *prev_frame;
182
183 // TODO(hkuang): Combine this with cur_buf in macroblockd.
184 RefCntBuffer *cur_frame;
185
186 int ref_frame_map[REF_FRAMES]; /* maps fb_idx to reference slot */
187
188 // Prepare ref_frame_map for the next frame.
189 // Only used in frame parallel decode.
190 int next_ref_frame_map[REF_FRAMES];
191
192 // TODO(jkoleszar): could expand active_ref_idx to 4, with 0 as intra, and
193 // roll new_fb_idx into it.
194
195 // Each Inter frame can reference INTER_REFS_PER_FRAME buffers
196 RefBuffer frame_refs[INTER_REFS_PER_FRAME];
197
198 int new_fb_idx;
199
200 FRAME_TYPE last_frame_type; /* last frame's frame type for motion search.*/
Yaowu Xuc27fc142016-08-22 16:08:15 -0700201 FRAME_TYPE frame_type;
202
203 int show_frame;
204 int last_show_frame;
205 int show_existing_frame;
206#if CONFIG_EXT_REFS
207 // Flag for a frame used as a reference - not written to the bitstream
208 int is_reference_frame;
209#endif // CONFIG_EXT_REFS
210
211 // Flag signaling that the frame is encoded using only INTRA modes.
212 uint8_t intra_only;
213 uint8_t last_intra_only;
214
215 int allow_high_precision_mv;
216
Urvang Joshib100db72016-10-12 16:28:56 -0700217#if CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700218 int allow_screen_content_tools;
Urvang Joshib100db72016-10-12 16:28:56 -0700219#endif // CONFIG_PALETTE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700220
221 // Flag signaling which frame contexts should be reset to default values.
222 RESET_FRAME_CONTEXT_MODE reset_frame_context;
223
224 // MBs, mb_rows/cols is in 16-pixel units; mi_rows/cols is in
225 // MODE_INFO (8-pixel) units.
226 int MBs;
227 int mb_rows, mi_rows;
228 int mb_cols, mi_cols;
229 int mi_stride;
230
231 /* profile settings */
232 TX_MODE tx_mode;
233
234 int base_qindex;
235 int y_dc_delta_q;
236 int uv_dc_delta_q;
237 int uv_ac_delta_q;
238 int16_t y_dequant[MAX_SEGMENTS][2];
239 int16_t uv_dequant[MAX_SEGMENTS][2];
240
241#if CONFIG_AOM_QM
242 // Global quant matrix tables
243 qm_val_t *giqmatrix[NUM_QM_LEVELS][2][2][TX_SIZES];
244 qm_val_t *gqmatrix[NUM_QM_LEVELS][2][2][TX_SIZES];
245
246 // Local quant matrix tables for each frame
247 qm_val_t *y_iqmatrix[MAX_SEGMENTS][2][TX_SIZES];
248 qm_val_t *uv_iqmatrix[MAX_SEGMENTS][2][TX_SIZES];
249 // Encoder
250 qm_val_t *y_qmatrix[MAX_SEGMENTS][2][TX_SIZES];
251 qm_val_t *uv_qmatrix[MAX_SEGMENTS][2][TX_SIZES];
252
253 int using_qmatrix;
254 int min_qmlevel;
255 int max_qmlevel;
256#endif
257#if CONFIG_NEW_QUANT
258 dequant_val_type_nuq y_dequant_nuq[MAX_SEGMENTS][QUANT_PROFILES][COEF_BANDS];
259 dequant_val_type_nuq uv_dequant_nuq[MAX_SEGMENTS][QUANT_PROFILES][COEF_BANDS];
260#endif
261
262 /* We allocate a MODE_INFO struct for each macroblock, together with
263 an extra row on top and column on the left to simplify prediction. */
264 int mi_alloc_size;
265 MODE_INFO *mip; /* Base of allocated array */
266 MODE_INFO *mi; /* Corresponds to upper left visible macroblock */
267
268 // TODO(agrange): Move prev_mi into encoder structure.
269 // prev_mip and prev_mi will only be allocated in encoder.
270 MODE_INFO *prev_mip; /* MODE_INFO array 'mip' from last decoded frame */
271 MODE_INFO *prev_mi; /* 'mi' from last frame (points into prev_mip) */
272
273 // Separate mi functions between encoder and decoder.
Yaowu Xuf883b422016-08-30 14:01:10 -0700274 int (*alloc_mi)(struct AV1Common *cm, int mi_size);
275 void (*free_mi)(struct AV1Common *cm);
276 void (*setup_mi)(struct AV1Common *cm);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700277
278 // Grid of pointers to 8x8 MODE_INFO structs. Any 8x8 not in the visible
279 // area will be NULL.
280 MODE_INFO **mi_grid_base;
281 MODE_INFO **mi_grid_visible;
282 MODE_INFO **prev_mi_grid_base;
283 MODE_INFO **prev_mi_grid_visible;
284
285 // Whether to use previous frame's motion vectors for prediction.
286 int use_prev_frame_mvs;
287
288 // Persistent mb segment id map used in prediction.
289 int seg_map_idx;
290 int prev_seg_map_idx;
291
292 uint8_t *seg_map_array[NUM_PING_PONG_BUFFERS];
293 uint8_t *last_frame_seg_map;
294 uint8_t *current_frame_seg_map;
295 int seg_map_alloc_size;
296
James Zern7b9407a2016-05-18 23:48:05 -0700297 InterpFilter interp_filter;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700298
299 loop_filter_info_n lf_info;
300#if CONFIG_LOOP_RESTORATION
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800301 RestorationInfo rst_info[MAX_MB_PLANE];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700302 RestorationInternal rst_internal;
303#endif // CONFIG_LOOP_RESTORATION
304
305 // Flag signaling how frame contexts should be updated at the end of
306 // a frame decode
307 REFRESH_FRAME_CONTEXT_MODE refresh_frame_context;
308
309 int ref_frame_sign_bias[TOTAL_REFS_PER_FRAME]; /* Two state 0, 1 */
310
311 struct loopfilter lf;
312 struct segmentation seg;
313
314 int frame_parallel_decode; // frame-based threading.
315
Sarah Parkere68a3e42017-02-16 14:03:24 -0800316#if CONFIG_EXT_TX
317 int reduced_tx_set_used;
318#endif // CONFIG_EXT_TX
319
Yaowu Xuc27fc142016-08-22 16:08:15 -0700320// Context probabilities for reference frame prediction
321#if CONFIG_EXT_REFS
322 MV_REFERENCE_FRAME comp_fwd_ref[FWD_REFS];
323 MV_REFERENCE_FRAME comp_bwd_ref[BWD_REFS];
324#else
325 MV_REFERENCE_FRAME comp_fixed_ref;
326 MV_REFERENCE_FRAME comp_var_ref[COMP_REFS];
327#endif // CONFIG_EXT_REFS
328 REFERENCE_MODE reference_mode;
329
330 FRAME_CONTEXT *fc; /* this frame entropy */
331 FRAME_CONTEXT *frame_contexts; // FRAME_CONTEXTS
Thomas Daede10e1da92017-04-26 13:22:21 -0700332 FRAME_CONTEXT *pre_fc; // Context referenced in this frame
Yaowu Xuc27fc142016-08-22 16:08:15 -0700333 unsigned int frame_context_idx; /* Context to use/update */
334 FRAME_COUNTS counts;
335
Yaowu Xuc27fc142016-08-22 16:08:15 -0700336 unsigned int current_video_frame;
337 BITSTREAM_PROFILE profile;
338
Yaowu Xuf883b422016-08-30 14:01:10 -0700339 // AOM_BITS_8 in profile 0 or 1, AOM_BITS_10 or AOM_BITS_12 in profile 2 or 3.
340 aom_bit_depth_t bit_depth;
341 aom_bit_depth_t dequant_bit_depth; // bit_depth of current dequantizer
Yaowu Xuc27fc142016-08-22 16:08:15 -0700342
343 int error_resilient_mode;
344
345#if !CONFIG_EXT_TILE
346 int log2_tile_cols, log2_tile_rows;
347#endif // !CONFIG_EXT_TILE
348 int tile_cols, tile_rows;
349 int tile_width, tile_height; // In MI units
Yunqing Wangd8cd55f2017-02-27 12:16:00 -0800350#if CONFIG_EXT_TILE
351 unsigned int tile_encoding_mode;
352#endif // CONFIG_EXT_TILE
Yaowu Xuc27fc142016-08-22 16:08:15 -0700353
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800354#if CONFIG_DEPENDENT_HORZTILES
355 int dependent_horz_tiles;
Fangwen Fu73126c02017-02-08 22:37:47 -0800356#if CONFIG_TILE_GROUPS
357 int tile_group_start_row[MAX_TILE_ROWS][MAX_TILE_COLS];
358 int tile_group_start_col[MAX_TILE_ROWS][MAX_TILE_COLS];
359#endif
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800360#endif
Ryan Lei9b02b0e2017-01-30 15:52:20 -0800361#if CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -0800362 int loop_filter_across_tiles_enabled;
Ryan Lei9b02b0e2017-01-30 15:52:20 -0800363#endif // CONFIG_LOOPFILTERING_ACROSS_TILES
Ryan Lei7386eda2016-12-08 21:08:31 -0800364
Yaowu Xuc27fc142016-08-22 16:08:15 -0700365 int byte_alignment;
366 int skip_loop_filter;
367
368 // Private data associated with the frame buffer callbacks.
369 void *cb_priv;
Yaowu Xuf883b422016-08-30 14:01:10 -0700370 aom_get_frame_buffer_cb_fn_t get_fb_cb;
371 aom_release_frame_buffer_cb_fn_t release_fb_cb;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700372
373 // Handles memory for the codec.
374 InternalFrameBufferList int_frame_buffers;
375
376 // External BufferPool passed from outside.
377 BufferPool *buffer_pool;
378
379 PARTITION_CONTEXT *above_seg_context;
380 ENTROPY_CONTEXT *above_context[MAX_MB_PLANE];
381#if CONFIG_VAR_TX
382 TXFM_CONTEXT *above_txfm_context;
383 TXFM_CONTEXT left_txfm_context[MAX_MIB_SIZE];
384#endif
385 int above_context_alloc_cols;
386
387 // scratch memory for intraonly/keyframe forward updates from default tables
388 // - this is intentionally not placed in FRAME_CONTEXT since it's reset upon
389 // each keyframe and not used afterwards
Yaowu Xuf883b422016-08-30 14:01:10 -0700390 aom_prob kf_y_prob[INTRA_MODES][INTRA_MODES][INTRA_MODES - 1];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700391#if CONFIG_GLOBAL_MOTION
David Barkercf3d0b02016-11-10 10:14:49 +0000392 WarpedMotionParams global_motion[TOTAL_REFS_PER_FRAME];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700393#endif
394
395 BLOCK_SIZE sb_size; // Size of the superblock used for this frame
396 int mib_size; // Size of the superblock in units of MI blocks
397 int mib_size_log2; // Log 2 of above.
Jean-Marc Valin01435132017-02-18 14:12:53 -0500398#if CONFIG_CDEF
Steinar Midtskogen0c966a52017-04-18 14:38:13 +0200399 int cdef_dering_damping;
400 int cdef_clpf_damping;
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -0400401 int nb_cdef_strengths;
402 int cdef_strengths[CDEF_MAX_STRENGTHS];
Jean-Marc Valine9f77422017-03-22 17:09:51 -0400403 int cdef_uv_strengths[CDEF_MAX_STRENGTHS];
Jean-Marc Valin5f5c1322017-03-21 16:20:21 -0400404 int cdef_bits;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700405#endif
Thomas Davies80188d12016-10-26 16:08:35 -0700406
Arild Fuldseth07441162016-08-15 15:07:52 +0200407#if CONFIG_DELTA_Q
408 int delta_q_present_flag;
Thomas Daviesf6936102016-09-05 16:51:31 +0100409 // Resolution of delta quant
410 int delta_q_res;
Fangwen Fu231fe422017-04-24 17:52:29 -0700411#if CONFIG_EXT_DELTA_Q
412 int delta_lf_present_flag;
413 // Resolution of delta lf level
414 int delta_lf_res;
415#endif
Arild Fuldseth07441162016-08-15 15:07:52 +0200416#endif
Thomas Davies80188d12016-10-26 16:08:35 -0700417#if CONFIG_TILE_GROUPS
418 int num_tg;
419#endif
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +0100420#if CONFIG_REFERENCE_BUFFER
421 int current_frame_id;
422 int ref_frame_id[REF_FRAMES];
423 int valid_for_referencing[REF_FRAMES];
424 int refresh_mask;
425 int invalid_delta_frame_id_minus1;
426#endif
Alex Converseeb780e72016-12-13 12:46:41 -0800427#if CONFIG_ANS && ANS_MAX_SYMBOLS
428 int ans_window_size_log2;
429#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700430} AV1_COMMON;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700431
Arild Fuldseth (arilfuld)5114b7b2016-11-09 13:32:54 +0100432#if CONFIG_REFERENCE_BUFFER
433/* Initial version of sequence header structure */
434typedef struct SequenceHeader {
435 int frame_id_numbers_present_flag;
436 int frame_id_length_minus7;
437 int delta_frame_id_length_minus2;
438} SequenceHeader;
439#endif
440
Yaowu Xuc27fc142016-08-22 16:08:15 -0700441// TODO(hkuang): Don't need to lock the whole pool after implementing atomic
442// frame reference count.
443static void lock_buffer_pool(BufferPool *const pool) {
444#if CONFIG_MULTITHREAD
445 pthread_mutex_lock(&pool->pool_mutex);
446#else
447 (void)pool;
448#endif
449}
450
451static void unlock_buffer_pool(BufferPool *const pool) {
452#if CONFIG_MULTITHREAD
453 pthread_mutex_unlock(&pool->pool_mutex);
454#else
455 (void)pool;
456#endif
457}
458
Yaowu Xuf883b422016-08-30 14:01:10 -0700459static INLINE YV12_BUFFER_CONFIG *get_ref_frame(AV1_COMMON *cm, int index) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700460 if (index < 0 || index >= REF_FRAMES) return NULL;
461 if (cm->ref_frame_map[index] < 0) return NULL;
462 assert(cm->ref_frame_map[index] < FRAME_BUFFERS);
463 return &cm->buffer_pool->frame_bufs[cm->ref_frame_map[index]].buf;
464}
465
466static INLINE YV12_BUFFER_CONFIG *get_frame_new_buffer(
Yaowu Xuf883b422016-08-30 14:01:10 -0700467 const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700468 return &cm->buffer_pool->frame_bufs[cm->new_fb_idx].buf;
469}
470
Yaowu Xuf883b422016-08-30 14:01:10 -0700471static INLINE int get_free_fb(AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700472 RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
473 int i;
474
475 lock_buffer_pool(cm->buffer_pool);
476 for (i = 0; i < FRAME_BUFFERS; ++i)
477 if (frame_bufs[i].ref_count == 0) break;
478
479 if (i != FRAME_BUFFERS) {
480 frame_bufs[i].ref_count = 1;
481 } else {
482 // Reset i to be INVALID_IDX to indicate no free buffer found.
483 i = INVALID_IDX;
484 }
485
486 unlock_buffer_pool(cm->buffer_pool);
487 return i;
488}
489
490static INLINE void ref_cnt_fb(RefCntBuffer *bufs, int *idx, int new_idx) {
491 const int ref_index = *idx;
492
493 if (ref_index >= 0 && bufs[ref_index].ref_count > 0)
494 bufs[ref_index].ref_count--;
495
496 *idx = new_idx;
497
498 bufs[new_idx].ref_count++;
499}
500
Yaowu Xuf883b422016-08-30 14:01:10 -0700501static INLINE int mi_cols_aligned_to_sb(const AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700502 return ALIGN_POWER_OF_TWO(cm->mi_cols, cm->mib_size_log2);
503}
504
Yaowu Xuf883b422016-08-30 14:01:10 -0700505static INLINE int mi_rows_aligned_to_sb(const AV1_COMMON *cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700506 return ALIGN_POWER_OF_TWO(cm->mi_rows, cm->mib_size_log2);
507}
508
Yaowu Xuf883b422016-08-30 14:01:10 -0700509static INLINE int frame_is_intra_only(const AV1_COMMON *const cm) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700510 return cm->frame_type == KEY_FRAME || cm->intra_only;
511}
512
Yaowu Xuf883b422016-08-30 14:01:10 -0700513static INLINE void av1_init_macroblockd(AV1_COMMON *cm, MACROBLOCKD *xd,
Yushin Cho77bba8d2016-11-04 16:36:56 -0700514#if CONFIG_PVQ
515 tran_low_t *pvq_ref_coeff,
516#endif
Luc Trudeauf8164152017-04-11 16:20:51 -0400517#if CONFIG_CFL
518 CFL_CTX *cfl,
519#endif
Yaowu Xuf883b422016-08-30 14:01:10 -0700520 tran_low_t *dqcoeff) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700521 int i;
522 for (i = 0; i < MAX_MB_PLANE; ++i) {
523 xd->plane[i].dqcoeff = dqcoeff;
Yushin Cho77bba8d2016-11-04 16:36:56 -0700524#if CONFIG_PVQ
525 xd->plane[i].pvq_ref_coeff = pvq_ref_coeff;
526#endif
Luc Trudeauf8164152017-04-11 16:20:51 -0400527#if CONFIG_CFL
528 xd->cfl = cfl;
Luc Trudeaubaeb3752017-04-24 11:19:25 -0400529 cfl_init(cfl, cm, xd->plane[AOM_PLANE_U].subsampling_x,
530 xd->plane[AOM_PLANE_U].subsampling_y);
Luc Trudeauf8164152017-04-11 16:20:51 -0400531#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700532 xd->above_context[i] = cm->above_context[i];
533 if (xd->plane[i].plane_type == PLANE_TYPE_Y) {
534 memcpy(xd->plane[i].seg_dequant, cm->y_dequant, sizeof(cm->y_dequant));
535#if CONFIG_AOM_QM
536 memcpy(xd->plane[i].seg_iqmatrix, cm->y_iqmatrix, sizeof(cm->y_iqmatrix));
537#endif
538
539#if CONFIG_NEW_QUANT
540 memcpy(xd->plane[i].seg_dequant_nuq, cm->y_dequant_nuq,
541 sizeof(cm->y_dequant_nuq));
542#endif
543 } else {
544 memcpy(xd->plane[i].seg_dequant, cm->uv_dequant, sizeof(cm->uv_dequant));
545#if CONFIG_AOM_QM
546 memcpy(xd->plane[i].seg_iqmatrix, cm->uv_iqmatrix,
547 sizeof(cm->uv_iqmatrix));
548#endif
549#if CONFIG_NEW_QUANT
550 memcpy(xd->plane[i].seg_dequant_nuq, cm->uv_dequant_nuq,
551 sizeof(cm->uv_dequant_nuq));
552#endif
553 }
554 xd->fc = cm->fc;
555 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700556 xd->above_seg_context = cm->above_seg_context;
557#if CONFIG_VAR_TX
558 xd->above_txfm_context = cm->above_txfm_context;
559#endif
560 xd->mi_stride = cm->mi_stride;
561 xd->error_info = &cm->error;
562}
563
564static INLINE void set_skip_context(MACROBLOCKD *xd, int mi_row, int mi_col) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700565 int i;
566 for (i = 0; i < MAX_MB_PLANE; ++i) {
567 struct macroblockd_plane *const pd = &xd->plane[i];
Jingning Han91d9a792017-04-18 12:01:52 -0700568#if CONFIG_CHROMA_SUB8X8
569 if (xd->mi[0]->mbmi.sb_type < BLOCK_8X8) {
570 // Offset the buffer pointer
571 if (pd->subsampling_y && (mi_row & 0x01)) mi_row -= 1;
572 if (pd->subsampling_x && (mi_col & 0x01)) mi_col -= 1;
573 }
574#endif
575 int above_idx = mi_col * 2;
576 int left_idx = (mi_row * 2) & MAX_MIB_MASK_2;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700577 pd->above_context = &xd->above_context[i][above_idx >> pd->subsampling_x];
578 pd->left_context = &xd->left_context[i][left_idx >> pd->subsampling_y];
579 }
580}
581
582static INLINE int calc_mi_size(int len) {
583 // len is in mi units.
584 return len + MAX_MIB_SIZE;
585}
586
Jingning Hanfaad0e12016-12-07 10:54:57 -0800587static INLINE void set_plane_n4(MACROBLOCKD *const xd, int bw, int bh) {
Jingning Hana6923f72016-07-15 08:50:14 -0700588 int i;
589 for (i = 0; i < MAX_MB_PLANE; i++) {
590 xd->plane[i].n4_w = (bw << 1) >> xd->plane[i].subsampling_x;
591 xd->plane[i].n4_h = (bh << 1) >> xd->plane[i].subsampling_y;
Jingning Hanfaad0e12016-12-07 10:54:57 -0800592
593 xd->plane[i].width = (bw * MI_SIZE) >> xd->plane[i].subsampling_x;
594 xd->plane[i].height = (bh * MI_SIZE) >> xd->plane[i].subsampling_y;
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800595
Jingning Han31b6a4f2017-02-23 11:05:53 -0800596#if !CONFIG_CHROMA_2X2
Jingning Hanc20dc8e2017-02-17 15:37:28 -0800597 xd->plane[i].width = AOMMAX(xd->plane[i].width, 4);
598 xd->plane[i].height = AOMMAX(xd->plane[i].height, 4);
Jingning Han31b6a4f2017-02-23 11:05:53 -0800599#endif
Jingning Hana6923f72016-07-15 08:50:14 -0700600 }
601}
602
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700603static INLINE void set_mi_row_col(MACROBLOCKD *xd, const TileInfo *const tile,
604 int mi_row, int bh, int mi_col, int bw,
Fangwen Fu7b9f2b32017-01-17 14:01:52 -0800605#if CONFIG_DEPENDENT_HORZTILES
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700606 int dependent_horz_tile_flag,
607#endif // CONFIG_DEPENDENT_HORZTILES
Yaowu Xuc27fc142016-08-22 16:08:15 -0700608 int mi_rows, int mi_cols) {
609 xd->mb_to_top_edge = -((mi_row * MI_SIZE) * 8);
610 xd->mb_to_bottom_edge = ((mi_rows - bh - mi_row) * MI_SIZE) * 8;
611 xd->mb_to_left_edge = -((mi_col * MI_SIZE) * 8);
612 xd->mb_to_right_edge = ((mi_cols - bw - mi_col) * MI_SIZE) * 8;
613
Urvang Joshi359dc2b2017-04-27 15:41:47 -0700614#if CONFIG_DEPENDENT_HORZTILES
615 if (dependent_horz_tile_flag) {
616#if CONFIG_TILE_GROUPS
617 xd->up_available = (mi_row > tile->mi_row_start) || !tile->tg_horz_boundary;
618#else
619 xd->up_available = (mi_row > 0);
620#endif // CONFIG_TILE_GROUPS
621 } else {
622#endif // CONFIG_DEPENDENT_HORZTILES
623 // Are edges available for intra prediction?
624 xd->up_available = (mi_row > tile->mi_row_start);
625#if CONFIG_DEPENDENT_HORZTILES
626 }
627#endif // CONFIG_DEPENDENT_HORZTILES
628
Yaowu Xuc27fc142016-08-22 16:08:15 -0700629 xd->left_available = (mi_col > tile->mi_col_start);
Jingning Han3da18d62017-05-02 12:43:58 -0700630#if CONFIG_CHROMA_SUB8X8
631 xd->chroma_up_available = xd->up_available;
632 xd->chroma_left_available = xd->left_available;
633 if (xd->plane[1].subsampling_x && bw < mi_size_wide[BLOCK_8X8])
634 xd->chroma_left_available = (mi_col - 1) > tile->mi_col_start;
635 if (xd->plane[1].subsampling_y && bh < mi_size_high[BLOCK_8X8])
636 xd->chroma_up_available = (mi_row - 1) > tile->mi_row_start;
637#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700638 if (xd->up_available) {
639 xd->above_mi = xd->mi[-xd->mi_stride];
640 // above_mi may be NULL in encoder's first pass.
641 xd->above_mbmi = xd->above_mi ? &xd->above_mi->mbmi : NULL;
642 } else {
643 xd->above_mi = NULL;
644 xd->above_mbmi = NULL;
645 }
646
647 if (xd->left_available) {
648 xd->left_mi = xd->mi[-1];
649 // left_mi may be NULL in encoder's first pass.
650 xd->left_mbmi = xd->left_mi ? &xd->left_mi->mbmi : NULL;
651 } else {
652 xd->left_mi = NULL;
653 xd->left_mbmi = NULL;
654 }
655
656 xd->n8_h = bh;
657 xd->n8_w = bw;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700658 xd->is_sec_rect = 0;
659 if (xd->n8_w < xd->n8_h)
660 if (mi_col & (xd->n8_h - 1)) xd->is_sec_rect = 1;
661
662 if (xd->n8_w > xd->n8_h)
663 if (mi_row & (xd->n8_w - 1)) xd->is_sec_rect = 1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700664}
665
Yaowu Xuf883b422016-08-30 14:01:10 -0700666static INLINE const aom_prob *get_y_mode_probs(const AV1_COMMON *cm,
Yaowu Xuc27fc142016-08-22 16:08:15 -0700667 const MODE_INFO *mi,
668 const MODE_INFO *above_mi,
669 const MODE_INFO *left_mi,
670 int block) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700671 const PREDICTION_MODE above = av1_above_block_mode(mi, above_mi, block);
672 const PREDICTION_MODE left = av1_left_block_mode(mi, left_mi, block);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700673 return cm->kf_y_prob[above][left];
674}
675
Nathan E. Egge10ba2be2016-11-16 09:44:26 -0500676#if CONFIG_EC_MULTISYMBOL
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000677static INLINE aom_cdf_prob *get_y_mode_cdf(FRAME_CONTEXT *tile_ctx,
678 const MODE_INFO *mi,
Thomas9ac55082016-09-23 18:04:17 +0100679 const MODE_INFO *above_mi,
680 const MODE_INFO *left_mi,
681 int block) {
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400682 const PREDICTION_MODE above = av1_above_block_mode(mi, above_mi, block);
683 const PREDICTION_MODE left = av1_left_block_mode(mi, left_mi, block);
Thomas Davies1bfb5ed2017-01-11 15:28:11 +0000684 return tile_ctx->kf_y_cdf[above][left];
Nathan E. Egge3ef926e2016-09-07 18:20:41 -0400685}
686#endif
687
Yaowu Xuc27fc142016-08-22 16:08:15 -0700688static INLINE void update_partition_context(MACROBLOCKD *xd, int mi_row,
689 int mi_col, BLOCK_SIZE subsize,
690 BLOCK_SIZE bsize) {
691 PARTITION_CONTEXT *const above_ctx = xd->above_seg_context + mi_col;
692 PARTITION_CONTEXT *const left_ctx =
693 xd->left_seg_context + (mi_row & MAX_MIB_MASK);
694
695#if CONFIG_EXT_PARTITION_TYPES
Jingning Hanc709e1f2016-12-06 14:48:09 -0800696 const int bw = mi_size_wide[bsize];
697 const int bh = mi_size_high[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700698 memset(above_ctx, partition_context_lookup[subsize].above, bw);
699 memset(left_ctx, partition_context_lookup[subsize].left, bh);
700#else
701 // num_4x4_blocks_wide_lookup[bsize] / 2
Jingning Hanc709e1f2016-12-06 14:48:09 -0800702 const int bs = mi_size_wide[bsize];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700703
704 // update the partition context at the end notes. set partition bits
705 // of block sizes larger than the current one to be one, and partition
706 // bits of smaller block sizes to be zero.
707 memset(above_ctx, partition_context_lookup[subsize].above, bs);
708 memset(left_ctx, partition_context_lookup[subsize].left, bs);
709#endif // CONFIG_EXT_PARTITION_TYPES
710}
711
Jingning Han18c53c82017-02-17 14:49:57 -0800712#if CONFIG_CB4X4
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700713static INLINE int is_chroma_reference(int mi_row, int mi_col, BLOCK_SIZE bsize,
714 int subsampling_x, int subsampling_y) {
Jingning Han31b6a4f2017-02-23 11:05:53 -0800715#if CONFIG_CHROMA_2X2
716 return 1;
717#endif
Jingning Hane2db3872017-04-18 11:50:53 -0700718
719#if CONFIG_CHROMA_SUB8X8
720 const int bw = mi_size_wide[bsize];
721 const int bh = mi_size_high[bsize];
722
723 int ref_pos = ((mi_row & 0x01) || !(bh & 0x01) || !subsampling_y) &&
724 ((mi_col & 0x01) || !(bw & 0x01) || !subsampling_x);
725
726 return ref_pos;
727#else
Jingning Hand3a64432017-04-06 17:04:17 -0700728 int ref_pos = !(((mi_row & 0x01) && subsampling_y) ||
729 ((mi_col & 0x01) && subsampling_x));
730
731 if (bsize >= BLOCK_8X8) ref_pos = 1;
732
733 return ref_pos;
Jingning Hane2db3872017-04-18 11:50:53 -0700734#endif
Jingning Han18c53c82017-02-17 14:49:57 -0800735}
Jingning Han2d2dac22017-04-11 09:41:10 -0700736
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700737static INLINE BLOCK_SIZE scale_chroma_bsize(BLOCK_SIZE bsize, int subsampling_x,
738 int subsampling_y) {
Jingning Han2d2dac22017-04-11 09:41:10 -0700739 BLOCK_SIZE bs = bsize;
740
741 if (bs < BLOCK_8X8) {
742 if (subsampling_x == 1 && subsampling_y == 1)
743 bs = BLOCK_8X8;
744 else if (subsampling_x == 1)
745 bs = BLOCK_8X4;
746 else if (subsampling_y == 1)
747 bs = BLOCK_4X8;
748 }
749
750 return bs;
751}
Jingning Han18c53c82017-02-17 14:49:57 -0800752#endif
753
Yaowu Xuc27fc142016-08-22 16:08:15 -0700754#if CONFIG_EXT_PARTITION_TYPES
755static INLINE void update_ext_partition_context(MACROBLOCKD *xd, int mi_row,
756 int mi_col, BLOCK_SIZE subsize,
757 BLOCK_SIZE bsize,
758 PARTITION_TYPE partition) {
759 if (bsize >= BLOCK_8X8) {
Jingning Han456e0862017-01-19 16:04:26 -0800760 const int hbs = mi_size_wide[bsize] / 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700761 BLOCK_SIZE bsize2 = get_subsize(bsize, PARTITION_SPLIT);
762 switch (partition) {
763 case PARTITION_SPLIT:
764 if (bsize != BLOCK_8X8) break;
765 case PARTITION_NONE:
766 case PARTITION_HORZ:
767 case PARTITION_VERT:
768 update_partition_context(xd, mi_row, mi_col, subsize, bsize);
769 break;
770 case PARTITION_HORZ_A:
771 update_partition_context(xd, mi_row, mi_col, bsize2, subsize);
772 update_partition_context(xd, mi_row + hbs, mi_col, subsize, subsize);
773 break;
774 case PARTITION_HORZ_B:
775 update_partition_context(xd, mi_row, mi_col, subsize, subsize);
776 update_partition_context(xd, mi_row + hbs, mi_col, bsize2, subsize);
777 break;
778 case PARTITION_VERT_A:
779 update_partition_context(xd, mi_row, mi_col, bsize2, subsize);
780 update_partition_context(xd, mi_row, mi_col + hbs, subsize, subsize);
781 break;
782 case PARTITION_VERT_B:
783 update_partition_context(xd, mi_row, mi_col, subsize, subsize);
784 update_partition_context(xd, mi_row, mi_col + hbs, bsize2, subsize);
785 break;
786 default: assert(0 && "Invalid partition type");
787 }
788 }
789}
790#endif // CONFIG_EXT_PARTITION_TYPES
791
792static INLINE int partition_plane_context(const MACROBLOCKD *xd, int mi_row,
Alex Converse55c6bde2017-01-12 15:55:31 -0800793 int mi_col,
794#if CONFIG_UNPOISON_PARTITION_CTX
795 int has_rows, int has_cols,
796#endif
797 BLOCK_SIZE bsize) {
798#if CONFIG_UNPOISON_PARTITION_CTX
799 const PARTITION_CONTEXT *above_ctx = xd->above_seg_context + mi_col;
800 const PARTITION_CONTEXT *left_ctx =
801 xd->left_seg_context + (mi_row & MAX_MIB_MASK);
802 // Minimum partition point is 8x8. Offset the bsl accordingly.
803 const int bsl = mi_width_log2_lookup[bsize] - mi_width_log2_lookup[BLOCK_8X8];
804 int above = (*above_ctx >> bsl) & 1, left = (*left_ctx >> bsl) & 1;
805
806 assert(b_width_log2_lookup[bsize] == b_height_log2_lookup[bsize]);
807 assert(bsl >= 0);
808
809 if (has_rows && has_cols)
810 return (left * 2 + above) + bsl * PARTITION_PLOFFSET;
811 else if (has_rows && !has_cols)
812 return PARTITION_CONTEXTS_PRIMARY + bsl;
813 else if (!has_rows && has_cols)
814 return PARTITION_CONTEXTS_PRIMARY + PARTITION_BLOCK_SIZES + bsl;
815 else
Alex Converse2b9d19d2017-04-03 11:11:17 -0700816 return PARTITION_CONTEXTS; // Bogus context, forced SPLIT
Alex Converse55c6bde2017-01-12 15:55:31 -0800817#else
Yaowu Xuc27fc142016-08-22 16:08:15 -0700818 const PARTITION_CONTEXT *above_ctx = xd->above_seg_context + mi_col;
819 const PARTITION_CONTEXT *left_ctx =
820 xd->left_seg_context + (mi_row & MAX_MIB_MASK);
Jingning Hanbcf62ea2016-12-08 16:08:38 -0800821 // Minimum partition point is 8x8. Offset the bsl accordingly.
822 const int bsl = mi_width_log2_lookup[bsize] - mi_width_log2_lookup[BLOCK_8X8];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700823 int above = (*above_ctx >> bsl) & 1, left = (*left_ctx >> bsl) & 1;
824
825 assert(b_width_log2_lookup[bsize] == b_height_log2_lookup[bsize]);
826 assert(bsl >= 0);
827
828 return (left * 2 + above) + bsl * PARTITION_PLOFFSET;
Alex Converse55c6bde2017-01-12 15:55:31 -0800829#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -0700830}
831
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700832static INLINE int max_block_wide(const MACROBLOCKD *xd, BLOCK_SIZE bsize,
833 int plane) {
Jingning Hanf65b8702016-10-31 12:13:20 -0700834 int max_blocks_wide = block_size_wide[bsize];
835 const struct macroblockd_plane *const pd = &xd->plane[plane];
836
837 if (xd->mb_to_right_edge < 0)
838 max_blocks_wide += xd->mb_to_right_edge >> (3 + pd->subsampling_x);
839
840 // Scale the width in the transform block unit.
841 return max_blocks_wide >> tx_size_wide_log2[0];
842}
843
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700844static INLINE int max_block_high(const MACROBLOCKD *xd, BLOCK_SIZE bsize,
845 int plane) {
Jingning Hanf65b8702016-10-31 12:13:20 -0700846 int max_blocks_high = block_size_high[bsize];
847 const struct macroblockd_plane *const pd = &xd->plane[plane];
848
849 if (xd->mb_to_bottom_edge < 0)
850 max_blocks_high += xd->mb_to_bottom_edge >> (3 + pd->subsampling_y);
851
852 // Scale the width in the transform block unit.
853 return max_blocks_high >> tx_size_wide_log2[0];
854}
855
Yaowu Xuf883b422016-08-30 14:01:10 -0700856static INLINE void av1_zero_above_context(AV1_COMMON *const cm,
857 int mi_col_start, int mi_col_end) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700858 const int width = mi_col_end - mi_col_start;
David Barkerd706ed22017-05-02 14:16:01 +0100859 const int aligned_width = ALIGN_POWER_OF_TWO(width, cm->mib_size_log2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700860
861 const int offset_y = 2 * mi_col_start;
David Barkerd706ed22017-05-02 14:16:01 +0100862 const int width_y = 2 * aligned_width;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700863 const int offset_uv = offset_y >> cm->subsampling_x;
864 const int width_uv = width_y >> cm->subsampling_x;
865
Yaowu Xuf883b422016-08-30 14:01:10 -0700866 av1_zero_array(cm->above_context[0] + offset_y, width_y);
867 av1_zero_array(cm->above_context[1] + offset_uv, width_uv);
868 av1_zero_array(cm->above_context[2] + offset_uv, width_uv);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700869
David Barkerd706ed22017-05-02 14:16:01 +0100870 av1_zero_array(cm->above_seg_context + mi_col_start, aligned_width);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700871
872#if CONFIG_VAR_TX
David Barkerd706ed22017-05-02 14:16:01 +0100873 av1_zero_array(cm->above_txfm_context + mi_col_start, aligned_width);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700874#endif // CONFIG_VAR_TX
875}
876
Yaowu Xuf883b422016-08-30 14:01:10 -0700877static INLINE void av1_zero_left_context(MACROBLOCKD *const xd) {
878 av1_zero(xd->left_context);
879 av1_zero(xd->left_seg_context);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700880#if CONFIG_VAR_TX
Yaowu Xuf883b422016-08-30 14:01:10 -0700881 av1_zero(xd->left_txfm_context_buffer);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700882#endif
883}
884
885#if CONFIG_VAR_TX
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700886static INLINE TX_SIZE get_min_tx_size(TX_SIZE tx_size) {
Jingning Hane67b38a2016-11-04 10:30:00 -0700887 if (tx_size >= TX_SIZES_ALL) assert(0);
888 return txsize_sqr_map[tx_size];
889}
890
Jingning Han8b9478a2016-11-01 15:43:23 -0700891static INLINE void set_txfm_ctx(TXFM_CONTEXT *txfm_ctx, uint8_t txs, int len) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700892 int i;
Jingning Han8b9478a2016-11-01 15:43:23 -0700893 for (i = 0; i < len; ++i) txfm_ctx[i] = txs;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700894}
895
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700896static INLINE void set_txfm_ctxs(TX_SIZE tx_size, int n8_w, int n8_h, int skip,
897 const MACROBLOCKD *xd) {
Jingning Han8b9478a2016-11-01 15:43:23 -0700898 uint8_t bw = tx_size_wide[tx_size];
899 uint8_t bh = tx_size_high[tx_size];
Jingning Han1b1dc932016-11-09 10:55:30 -0800900
901 if (skip) {
Jingning Hanff6ee6a2016-12-07 09:55:21 -0800902 bw = n8_w * MI_SIZE;
903 bh = n8_h * MI_SIZE;
Jingning Han1b1dc932016-11-09 10:55:30 -0800904 }
905
Jingning Han8b9478a2016-11-01 15:43:23 -0700906 set_txfm_ctx(xd->above_txfm_context, bw, n8_w);
907 set_txfm_ctx(xd->left_txfm_context, bh, n8_h);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700908}
909
910static INLINE void txfm_partition_update(TXFM_CONTEXT *above_ctx,
911 TXFM_CONTEXT *left_ctx,
Jingning Han581d1692017-01-05 16:03:54 -0800912 TX_SIZE tx_size, TX_SIZE txb_size) {
913 BLOCK_SIZE bsize = txsize_to_bsize[txb_size];
Jingning Hanc709e1f2016-12-06 14:48:09 -0800914 int bh = mi_size_high[bsize];
915 int bw = mi_size_wide[bsize];
Jingning Han8b9478a2016-11-01 15:43:23 -0700916 uint8_t txw = tx_size_wide[tx_size];
917 uint8_t txh = tx_size_high[tx_size];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700918 int i;
Jingning Han8b9478a2016-11-01 15:43:23 -0700919 for (i = 0; i < bh; ++i) left_ctx[i] = txh;
920 for (i = 0; i < bw; ++i) above_ctx[i] = txw;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700921}
922
923static INLINE int txfm_partition_context(TXFM_CONTEXT *above_ctx,
924 TXFM_CONTEXT *left_ctx,
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700925 BLOCK_SIZE bsize, TX_SIZE tx_size) {
Jingning Han8b9478a2016-11-01 15:43:23 -0700926 const uint8_t txw = tx_size_wide[tx_size];
927 const uint8_t txh = tx_size_high[tx_size];
928 const int above = *above_ctx < txw;
929 const int left = *left_ctx < txh;
Jingning Hanc8b89362016-11-01 10:28:53 -0700930 TX_SIZE max_tx_size = max_txsize_lookup[bsize];
Debargha Mukherjee153e1f82016-11-17 09:59:14 -0800931 int category = TXFM_PARTITION_CONTEXTS - 1;
Jingning Han607fa6a2016-10-26 10:46:28 -0700932
Jingning Han0c70a802017-01-10 15:29:45 -0800933 // dummy return, not used by others.
934 if (tx_size <= TX_4X4) return 0;
935
936 switch (AOMMAX(block_size_wide[bsize], block_size_high[bsize])) {
Yi Luo49378562017-04-07 15:31:33 -0700937#if CONFIG_EXT_PARTITION
938 case 128:
939#endif
Jingning Han0c70a802017-01-10 15:29:45 -0800940 case 64:
941 case 32: max_tx_size = TX_32X32; break;
942 case 16: max_tx_size = TX_16X16; break;
943 case 8: max_tx_size = TX_8X8; break;
944 default: assert(0);
945 }
946
Debargha Mukherjee153e1f82016-11-17 09:59:14 -0800947 if (max_tx_size >= TX_8X8) {
948 category = (tx_size != max_tx_size && max_tx_size > TX_8X8) +
Debargha Mukherjee932cf692016-11-18 08:14:10 -0800949 (TX_SIZES - 1 - max_tx_size) * 2;
Jingning Hanc8b89362016-11-01 10:28:53 -0700950 }
Debargha Mukherjee153e1f82016-11-17 09:59:14 -0800951 if (category == TXFM_PARTITION_CONTEXTS - 1) return category;
Jingning Hanc8b89362016-11-01 10:28:53 -0700952 return category * 3 + above + left;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700953}
954#endif
955
Yaowu Xuf883b422016-08-30 14:01:10 -0700956static INLINE PARTITION_TYPE get_partition(const AV1_COMMON *const cm,
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700957 int mi_row, int mi_col,
958 BLOCK_SIZE bsize) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700959 if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols) {
960 return PARTITION_INVALID;
961 } else {
962 const int offset = mi_row * cm->mi_stride + mi_col;
963 MODE_INFO **mi = cm->mi_grid_visible + offset;
964 const MB_MODE_INFO *const mbmi = &mi[0]->mbmi;
965 const int bsl = b_width_log2_lookup[bsize];
966 const PARTITION_TYPE partition = partition_lookup[bsl][mbmi->sb_type];
967#if !CONFIG_EXT_PARTITION_TYPES
968 return partition;
969#else
Jingning Hanc709e1f2016-12-06 14:48:09 -0800970 const int hbs = mi_size_wide[bsize] / 2;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700971
972 assert(cm->mi_grid_visible[offset] == &cm->mi[offset]);
973
974 if (partition != PARTITION_NONE && bsize > BLOCK_8X8 &&
975 mi_row + hbs < cm->mi_rows && mi_col + hbs < cm->mi_cols) {
976 const BLOCK_SIZE h = get_subsize(bsize, PARTITION_HORZ_A);
977 const BLOCK_SIZE v = get_subsize(bsize, PARTITION_VERT_A);
978 const MB_MODE_INFO *const mbmi_right = &mi[hbs]->mbmi;
979 const MB_MODE_INFO *const mbmi_below = &mi[hbs * cm->mi_stride]->mbmi;
980 if (mbmi->sb_type == h) {
981 return mbmi_below->sb_type == h ? PARTITION_HORZ : PARTITION_HORZ_B;
982 } else if (mbmi->sb_type == v) {
983 return mbmi_right->sb_type == v ? PARTITION_VERT : PARTITION_VERT_B;
984 } else if (mbmi_below->sb_type == h) {
985 return PARTITION_HORZ_A;
986 } else if (mbmi_right->sb_type == v) {
987 return PARTITION_VERT_A;
988 } else {
989 return PARTITION_SPLIT;
990 }
991 }
992
993 return partition;
994#endif // !CONFIG_EXT_PARTITION_TYPES
995 }
996}
997
Yaowu Xu4ff59b52017-04-24 12:41:56 -0700998static INLINE void set_sb_size(AV1_COMMON *const cm, BLOCK_SIZE sb_size) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700999 cm->sb_size = sb_size;
Jingning Hanc709e1f2016-12-06 14:48:09 -08001000 cm->mib_size = mi_size_wide[cm->sb_size];
Jingning Hanbfcd1f12016-12-06 09:09:32 -08001001#if CONFIG_CB4X4
Jingning Hanbfcd1f12016-12-06 09:09:32 -08001002 cm->mib_size_log2 = b_width_log2_lookup[cm->sb_size];
1003#else
Yaowu Xuc27fc142016-08-22 16:08:15 -07001004 cm->mib_size_log2 = mi_width_log2_lookup[cm->sb_size];
Jingning Hanbfcd1f12016-12-06 09:09:32 -08001005#endif
Yaowu Xuc27fc142016-08-22 16:08:15 -07001006}
1007
1008#ifdef __cplusplus
1009} // extern "C"
1010#endif
1011
Yaowu Xuf883b422016-08-30 14:01:10 -07001012#endif // AV1_COMMON_ONYXC_INT_H_