blob: a85d597d47541f6d7582f82e60267f3718990f15 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xu2ab7ff02016-09-02 12:04:54 -07004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 *
Yaowu Xuc27fc142016-08-22 16:08:15 -070011 */
12
13#include <math.h>
14
Yaowu Xuf883b422016-08-30 14:01:10 -070015#include "./aom_config.h"
16#include "./aom_dsp_rtcd.h"
David Barker9666e752016-12-08 11:25:47 +000017#include "./aom_scale_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070018#include "av1/common/onyxc_int.h"
19#include "av1/common/restoration.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070020#include "aom_dsp/aom_dsp_common.h"
21#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070022#include "aom_ports/mem.h"
23
Debargha Mukherjee3981be92016-11-21 09:35:44 -080024static int domaintxfmrf_vtable[DOMAINTXFMRF_ITERS][DOMAINTXFMRF_PARAMS][256];
25
Debargha Mukherjee818e42a2016-12-12 11:52:56 -080026// Whether to filter only y or not
27static const int override_y_only[RESTORE_TYPES] = { 1, 1, 1, 1, 1 };
28
Debargha Mukherjee3981be92016-11-21 09:35:44 -080029static const int domaintxfmrf_params[DOMAINTXFMRF_PARAMS] = {
30 48, 52, 56, 60, 64, 68, 72, 76, 80, 82, 84, 86, 88,
31 90, 92, 94, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
32 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118,
33 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 130, 132, 134,
34 136, 138, 140, 142, 146, 150, 154, 158, 162, 166, 170, 174
35};
36
Debargha Mukherjee8f209a82016-10-12 10:47:01 -070037const sgr_params_type sgr_params[SGRPROJ_PARAMS] = {
38 // r1, eps1, r2, eps2
39 { 2, 27, 1, 11 }, { 2, 31, 1, 12 }, { 2, 37, 1, 12 }, { 2, 44, 1, 12 },
40 { 2, 49, 1, 13 }, { 2, 54, 1, 14 }, { 2, 60, 1, 15 }, { 2, 68, 1, 15 },
41};
42
Yaowu Xuc27fc142016-08-22 16:08:15 -070043typedef void (*restore_func_type)(uint8_t *data8, int width, int height,
44 int stride, RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +000045 uint8_t *dst8, int dst_stride);
Yaowu Xuf883b422016-08-30 14:01:10 -070046#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -070047typedef void (*restore_func_highbd_type)(uint8_t *data8, int width, int height,
48 int stride, RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +000049 int bit_depth, uint8_t *dst8,
50 int dst_stride);
Yaowu Xuf883b422016-08-30 14:01:10 -070051#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -070052
Debargha Mukherjee3981be92016-11-21 09:35:44 -080053static void GenDomainTxfmRFVtable() {
54 int i, j;
55 const double sigma_s = sqrt(2.0);
56 for (i = 0; i < DOMAINTXFMRF_ITERS; ++i) {
57 const int nm = (1 << (DOMAINTXFMRF_ITERS - i - 1));
58 const double A = exp(-DOMAINTXFMRF_MULT / (sigma_s * nm));
59 for (j = 0; j < DOMAINTXFMRF_PARAMS; ++j) {
60 const double sigma_r =
61 (double)domaintxfmrf_params[j] / DOMAINTXFMRF_SIGMA_SCALE;
62 const double scale = sigma_s / sigma_r;
63 int k;
64 for (k = 0; k < 256; ++k) {
65 domaintxfmrf_vtable[i][j][k] =
66 RINT(DOMAINTXFMRF_VTABLE_PREC * pow(A, 1.0 + k * scale));
67 }
68 }
69 }
70}
71
Debargha Mukherjee0e67b252016-12-08 09:22:44 -080072void av1_loop_restoration_precal() { GenDomainTxfmRFVtable(); }
Yaowu Xuc27fc142016-08-22 16:08:15 -070073
Yaowu Xuf883b422016-08-30 14:01:10 -070074void av1_loop_restoration_init(RestorationInternal *rst, RestorationInfo *rsi,
75 int kf, int width, int height) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070076 int i, tile_idx;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070077 rst->rsi = rsi;
78 rst->keyframe = kf;
Yaowu Xuc27fc142016-08-22 16:08:15 -070079 rst->subsampling_x = 0;
80 rst->subsampling_y = 0;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070081 rst->ntiles =
clang-formatbda8d612016-09-19 15:55:46 -070082 av1_get_rest_ntiles(width, height, &rst->tile_width, &rst->tile_height,
83 &rst->nhtiles, &rst->nvtiles);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070084 if (rsi->frame_restoration_type == RESTORE_WIENER) {
Yaowu Xuc27fc142016-08-22 16:08:15 -070085 for (tile_idx = 0; tile_idx < rst->ntiles; ++tile_idx) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -080086 if (rsi->wiener_info[tile_idx].level) {
87 rsi->wiener_info[tile_idx].vfilter[RESTORATION_HALFWIN] =
88 rsi->wiener_info[tile_idx].hfilter[RESTORATION_HALFWIN] =
89 RESTORATION_FILT_STEP;
90 for (i = 0; i < RESTORATION_HALFWIN; ++i) {
91 rsi->wiener_info[tile_idx].vfilter[RESTORATION_WIN - 1 - i] =
92 rsi->wiener_info[tile_idx].vfilter[i];
93 rsi->wiener_info[tile_idx].hfilter[RESTORATION_WIN - 1 - i] =
94 rsi->wiener_info[tile_idx].hfilter[i];
95 rsi->wiener_info[tile_idx].vfilter[RESTORATION_HALFWIN] -=
96 2 * rsi->wiener_info[tile_idx].vfilter[i];
97 rsi->wiener_info[tile_idx].hfilter[RESTORATION_HALFWIN] -=
98 2 * rsi->wiener_info[tile_idx].hfilter[i];
99 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700100 }
101 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700102 } else if (rsi->frame_restoration_type == RESTORE_SWITCHABLE) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700103 for (tile_idx = 0; tile_idx < rst->ntiles; ++tile_idx) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700104 if (rsi->restoration_type[tile_idx] == RESTORE_WIENER) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -0700105 rsi->wiener_info[tile_idx].vfilter[RESTORATION_HALFWIN] =
106 rsi->wiener_info[tile_idx].hfilter[RESTORATION_HALFWIN] =
107 RESTORATION_FILT_STEP;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700108 for (i = 0; i < RESTORATION_HALFWIN; ++i) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -0700109 rsi->wiener_info[tile_idx].vfilter[RESTORATION_WIN - 1 - i] =
110 rsi->wiener_info[tile_idx].vfilter[i];
111 rsi->wiener_info[tile_idx].hfilter[RESTORATION_WIN - 1 - i] =
112 rsi->wiener_info[tile_idx].hfilter[i];
113 rsi->wiener_info[tile_idx].vfilter[RESTORATION_HALFWIN] -=
114 2 * rsi->wiener_info[tile_idx].vfilter[i];
115 rsi->wiener_info[tile_idx].hfilter[RESTORATION_HALFWIN] -=
116 2 * rsi->wiener_info[tile_idx].hfilter[i];
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700117 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700118 }
119 }
120 }
121}
122
David Barker9666e752016-12-08 11:25:47 +0000123// Some filters do not write the outermost RESTORATION_HALFWIN pixels,
124// so copy them over explicitly.
125static void copy_border(uint8_t *data, int width, int height, int stride,
126 uint8_t *dst, int dst_stride) {
127 int i;
128 for (i = RESTORATION_HALFWIN; i < height - RESTORATION_HALFWIN; ++i) {
129 memcpy(dst + i * dst_stride, data + i * stride, RESTORATION_HALFWIN);
130 memcpy(dst + i * dst_stride + (width - RESTORATION_HALFWIN),
131 data + i * stride + (width - RESTORATION_HALFWIN),
132 RESTORATION_HALFWIN);
133 }
134 for (i = 0; i < RESTORATION_HALFWIN; ++i) {
135 memcpy(dst + i * dst_stride, data + i * stride, width);
136 }
137 for (i = height - RESTORATION_HALFWIN; i < height; ++i)
138 memcpy(dst + i * dst_stride, data + i * stride, width);
139}
140
David Barker025b2542016-12-08 11:50:42 +0000141static void extend_frame(uint8_t *data, int width, int height, int stride) {
142 uint8_t *data_p;
143 int i;
144 for (i = 0; i < height; ++i) {
145 data_p = data + i * stride;
146 memset(data_p - RESTORATION_HALFWIN, data_p[0], RESTORATION_HALFWIN);
147 memset(data_p + width, data_p[width - 1], RESTORATION_HALFWIN);
148 }
149 data_p = data - RESTORATION_HALFWIN;
150 for (i = -RESTORATION_HALFWIN; i < 0; ++i) {
151 memcpy(data_p + i * stride, data_p, width + 2 * RESTORATION_HALFWIN);
152 }
153 for (i = height; i < height + RESTORATION_HALFWIN; ++i) {
154 memcpy(data_p + i * stride, data_p + (height - 1) * stride,
155 width + 2 * RESTORATION_HALFWIN);
156 }
157}
158
David Barker9666e752016-12-08 11:25:47 +0000159static void loop_copy_tile(uint8_t *data, int tile_idx, int subtile_idx,
160 int subtile_bits, int width, int height, int stride,
161 RestorationInternal *rst, uint8_t *dst,
162 int dst_stride) {
163 const int tile_width = rst->tile_width >> rst->subsampling_x;
164 const int tile_height = rst->tile_height >> rst->subsampling_y;
165 int i;
166 int h_start, h_end, v_start, v_end;
167 av1_get_rest_tile_limits(tile_idx, subtile_idx, subtile_bits, rst->nhtiles,
168 rst->nvtiles, tile_width, tile_height, width, height,
169 0, 0, &h_start, &h_end, &v_start, &v_end);
170 for (i = v_start; i < v_end; ++i)
171 memcpy(dst + i * dst_stride + h_start, data + i * stride + h_start,
172 h_end - h_start);
173}
174
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700175static void loop_wiener_filter_tile(uint8_t *data, int tile_idx, int width,
176 int height, int stride,
David Barker025b2542016-12-08 11:50:42 +0000177 RestorationInternal *rst, uint8_t *dst,
David Barker9666e752016-12-08 11:25:47 +0000178 int dst_stride) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700179 const int tile_width = rst->tile_width >> rst->subsampling_x;
180 const int tile_height = rst->tile_height >> rst->subsampling_y;
181 int i, j;
182 int h_start, h_end, v_start, v_end;
David Barker025b2542016-12-08 11:50:42 +0000183 DECLARE_ALIGNED(16, InterpKernel, hkernel);
184 DECLARE_ALIGNED(16, InterpKernel, vkernel);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700185
David Barker9666e752016-12-08 11:25:47 +0000186 if (rst->rsi->wiener_info[tile_idx].level == 0) {
187 loop_copy_tile(data, tile_idx, 0, 0, width, height, stride, rst, dst,
188 dst_stride);
189 return;
190 }
David Barker025b2542016-12-08 11:50:42 +0000191 // TODO(david.barker): Store hfilter/vfilter as an InterpKernel
192 // instead of the current format. Then this can be removed.
193 assert(RESTORATION_WIN == SUBPEL_TAPS - 1);
194 for (i = 0; i < RESTORATION_WIN; ++i) {
195 hkernel[i] = rst->rsi->wiener_info[tile_idx].hfilter[i];
196 vkernel[i] = rst->rsi->wiener_info[tile_idx].vfilter[i];
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700197 }
David Barker025b2542016-12-08 11:50:42 +0000198 hkernel[RESTORATION_WIN] = 0;
199 vkernel[RESTORATION_WIN] = 0;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700200 av1_get_rest_tile_limits(tile_idx, 0, 0, rst->nhtiles, rst->nvtiles,
David Barker025b2542016-12-08 11:50:42 +0000201 tile_width, tile_height, width, height, 0, 0,
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700202 &h_start, &h_end, &v_start, &v_end);
David Barker025b2542016-12-08 11:50:42 +0000203 // Convolve the whole tile (done in blocks here to match the requirements
204 // of the vectorized convolve functions, but the result is equivalent)
205 for (i = v_start; i < v_end; i += MAX_SB_SIZE)
206 for (j = h_start; j < h_end; j += MAX_SB_SIZE) {
207 int w = AOMMIN(MAX_SB_SIZE, (h_end - j + 15) & ~15);
208 int h = AOMMIN(MAX_SB_SIZE, (v_end - i + 15) & ~15);
209 const uint8_t *data_p = data + i * stride + j;
210 uint8_t *dst_p = dst + i * dst_stride + j;
211 aom_convolve8(data_p, stride, dst_p, dst_stride, hkernel, 16, vkernel, 16,
212 w, h);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700213 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700214}
215
Yaowu Xuc27fc142016-08-22 16:08:15 -0700216static void loop_wiener_filter(uint8_t *data, int width, int height, int stride,
David Barker025b2542016-12-08 11:50:42 +0000217 RestorationInternal *rst, uint8_t *dst,
218 int dst_stride) {
219 int tile_idx;
220 extend_frame(data, width, height, stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700221 for (tile_idx = 0; tile_idx < rst->ntiles; ++tile_idx) {
David Barker025b2542016-12-08 11:50:42 +0000222 loop_wiener_filter_tile(data, tile_idx, width, height, stride, rst, dst,
223 dst_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700224 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700225}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700226
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700227static void boxsum(int64_t *src, int width, int height, int src_stride, int r,
228 int sqr, int64_t *dst, int dst_stride, int64_t *tmp,
229 int tmp_stride) {
230 int i, j;
231
232 if (sqr) {
233 for (j = 0; j < width; ++j) tmp[j] = src[j] * src[j];
234 for (j = 0; j < width; ++j)
235 for (i = 1; i < height; ++i)
236 tmp[i * tmp_stride + j] =
237 tmp[(i - 1) * tmp_stride + j] +
238 src[i * src_stride + j] * src[i * src_stride + j];
239 } else {
240 memcpy(tmp, src, sizeof(*tmp) * width);
241 for (j = 0; j < width; ++j)
242 for (i = 1; i < height; ++i)
243 tmp[i * tmp_stride + j] =
244 tmp[(i - 1) * tmp_stride + j] + src[i * src_stride + j];
245 }
246 for (i = 0; i <= r; ++i)
247 memcpy(&dst[i * dst_stride], &tmp[(i + r) * tmp_stride],
248 sizeof(*tmp) * width);
249 for (i = r + 1; i < height - r; ++i)
250 for (j = 0; j < width; ++j)
251 dst[i * dst_stride + j] =
252 tmp[(i + r) * tmp_stride + j] - tmp[(i - r - 1) * tmp_stride + j];
253 for (i = height - r; i < height; ++i)
254 for (j = 0; j < width; ++j)
255 dst[i * dst_stride + j] = tmp[(height - 1) * tmp_stride + j] -
256 tmp[(i - r - 1) * tmp_stride + j];
257
258 for (i = 0; i < height; ++i) tmp[i * tmp_stride] = dst[i * dst_stride];
259 for (i = 0; i < height; ++i)
260 for (j = 1; j < width; ++j)
261 tmp[i * tmp_stride + j] =
262 tmp[i * tmp_stride + j - 1] + dst[i * src_stride + j];
263
264 for (j = 0; j <= r; ++j)
265 for (i = 0; i < height; ++i)
266 dst[i * dst_stride + j] = tmp[i * tmp_stride + j + r];
267 for (j = r + 1; j < width - r; ++j)
268 for (i = 0; i < height; ++i)
269 dst[i * dst_stride + j] =
270 tmp[i * tmp_stride + j + r] - tmp[i * tmp_stride + j - r - 1];
271 for (j = width - r; j < width; ++j)
272 for (i = 0; i < height; ++i)
273 dst[i * dst_stride + j] =
274 tmp[i * tmp_stride + width - 1] - tmp[i * tmp_stride + j - r - 1];
275}
276
277static void boxnum(int width, int height, int r, int8_t *num, int num_stride) {
278 int i, j;
Debargha Mukherjee1ee98b62016-12-07 11:24:13 -0800279 for (i = 0; i <= AOMMIN(r, height - 1); ++i) {
280 for (j = 0; j <= AOMMIN(r, width - 1); ++j) {
281 num[i * num_stride + j] =
282 AOMMIN(r + 1 + i, height) * AOMMIN(r + 1 + j, width);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700283 num[i * num_stride + (width - 1 - j)] = num[i * num_stride + j];
284 num[(height - 1 - i) * num_stride + j] = num[i * num_stride + j];
285 num[(height - 1 - i) * num_stride + (width - 1 - j)] =
286 num[i * num_stride + j];
287 }
288 }
Debargha Mukherjee1ee98b62016-12-07 11:24:13 -0800289 for (j = 0; j <= AOMMIN(r, width - 1); ++j) {
290 const int val = AOMMIN(2 * r + 1, height) * AOMMIN(r + 1 + j, width);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700291 for (i = r + 1; i < height - r; ++i) {
292 num[i * num_stride + j] = val;
293 num[i * num_stride + (width - 1 - j)] = val;
294 }
295 }
Debargha Mukherjee1ee98b62016-12-07 11:24:13 -0800296 for (i = 0; i <= AOMMIN(r, height - 1); ++i) {
297 const int val = AOMMIN(2 * r + 1, width) * AOMMIN(r + 1 + i, height);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700298 for (j = r + 1; j < width - r; ++j) {
299 num[i * num_stride + j] = val;
300 num[(height - 1 - i) * num_stride + j] = val;
301 }
302 }
303 for (i = r + 1; i < height - r; ++i) {
304 for (j = r + 1; j < width - r; ++j) {
Debargha Mukherjee1ee98b62016-12-07 11:24:13 -0800305 num[i * num_stride + j] =
306 AOMMIN(2 * r + 1, height) * AOMMIN(2 * r + 1, width);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700307 }
308 }
309}
310
311void decode_xq(int *xqd, int *xq) {
312 xq[0] = -xqd[0];
313 xq[1] = (1 << SGRPROJ_PRJ_BITS) - xq[0] - xqd[1];
314}
315
316#define APPROXIMATE_SGR 1
317void av1_selfguided_restoration(int64_t *dgd, int width, int height, int stride,
318 int bit_depth, int r, int eps, void *tmpbuf) {
319 int64_t *A = (int64_t *)tmpbuf;
320 int64_t *B = A + RESTORATION_TILEPELS_MAX;
321 int64_t *T = B + RESTORATION_TILEPELS_MAX;
322 int8_t num[RESTORATION_TILEPELS_MAX];
323 int i, j;
324 eps <<= 2 * (bit_depth - 8);
325
326 boxsum(dgd, width, height, stride, r, 0, B, width, T, width);
327 boxsum(dgd, width, height, stride, r, 1, A, width, T, width);
328 boxnum(width, height, r, num, width);
329 for (i = 0; i < height; ++i) {
330 for (j = 0; j < width; ++j) {
331 const int k = i * width + j;
332 const int n = num[k];
333 int64_t den;
334 A[k] = A[k] * n - B[k] * B[k];
335 den = A[k] + n * n * eps;
336 A[k] = ((A[k] << SGRPROJ_SGR_BITS) + (den >> 1)) / den;
337 B[k] = ((SGRPROJ_SGR - A[k]) * B[k] + (n >> 1)) / n;
338 }
339 }
340#if APPROXIMATE_SGR
341 i = 0;
342 j = 0;
343 {
344 const int k = i * width + j;
345 const int l = i * stride + j;
346 const int nb = 3;
347 const int64_t a =
348 3 * A[k] + 2 * A[k + 1] + 2 * A[k + width] + A[k + width + 1];
349 const int64_t b =
350 3 * B[k] + 2 * B[k + 1] + 2 * B[k + width] + B[k + width + 1];
351 const int64_t v =
352 (((a * dgd[l] + b) << SGRPROJ_RST_BITS) + (1 << nb) / 2) >> nb;
353 dgd[l] = ROUND_POWER_OF_TWO(v, SGRPROJ_SGR_BITS);
354 }
355 i = 0;
356 j = width - 1;
357 {
358 const int k = i * width + j;
359 const int l = i * stride + j;
360 const int nb = 3;
361 const int64_t a =
362 3 * A[k] + 2 * A[k - 1] + 2 * A[k + width] + A[k + width - 1];
363 const int64_t b =
364 3 * B[k] + 2 * B[k - 1] + 2 * B[k + width] + B[k + width - 1];
365 const int64_t v =
366 (((a * dgd[l] + b) << SGRPROJ_RST_BITS) + (1 << nb) / 2) >> nb;
367 dgd[l] = ROUND_POWER_OF_TWO(v, SGRPROJ_SGR_BITS);
368 }
369 i = height - 1;
370 j = 0;
371 {
372 const int k = i * width + j;
373 const int l = i * stride + j;
374 const int nb = 3;
375 const int64_t a =
376 3 * A[k] + 2 * A[k + 1] + 2 * A[k - width] + A[k - width + 1];
377 const int64_t b =
378 3 * B[k] + 2 * B[k + 1] + 2 * B[k - width] + B[k - width + 1];
379 const int64_t v =
380 (((a * dgd[l] + b) << SGRPROJ_RST_BITS) + (1 << nb) / 2) >> nb;
381 dgd[l] = ROUND_POWER_OF_TWO(v, SGRPROJ_SGR_BITS);
382 }
383 i = height - 1;
384 j = width - 1;
385 {
386 const int k = i * width + j;
387 const int l = i * stride + j;
388 const int nb = 3;
389 const int64_t a =
390 3 * A[k] + 2 * A[k - 1] + 2 * A[k - width] + A[k - width - 1];
391 const int64_t b =
392 3 * B[k] + 2 * B[k - 1] + 2 * B[k - width] + B[k - width - 1];
393 const int64_t v =
394 (((a * dgd[l] + b) << SGRPROJ_RST_BITS) + (1 << nb) / 2) >> nb;
395 dgd[l] = ROUND_POWER_OF_TWO(v, SGRPROJ_SGR_BITS);
396 }
397 i = 0;
398 for (j = 1; j < width - 1; ++j) {
399 const int k = i * width + j;
400 const int l = i * stride + j;
401 const int nb = 3;
402 const int64_t a = A[k] + 2 * (A[k - 1] + A[k + 1]) + A[k + width] +
403 A[k + width - 1] + A[k + width + 1];
404 const int64_t b = B[k] + 2 * (B[k - 1] + B[k + 1]) + B[k + width] +
405 B[k + width - 1] + B[k + width + 1];
406 const int64_t v =
407 (((a * dgd[l] + b) << SGRPROJ_RST_BITS) + (1 << nb) / 2) >> nb;
408 dgd[l] = ROUND_POWER_OF_TWO(v, SGRPROJ_SGR_BITS);
409 }
410 i = height - 1;
411 for (j = 1; j < width - 1; ++j) {
412 const int k = i * width + j;
413 const int l = i * stride + j;
414 const int nb = 3;
415 const int64_t a = A[k] + 2 * (A[k - 1] + A[k + 1]) + A[k - width] +
416 A[k - width - 1] + A[k - width + 1];
417 const int64_t b = B[k] + 2 * (B[k - 1] + B[k + 1]) + B[k - width] +
418 B[k - width - 1] + B[k - width + 1];
419 const int64_t v =
420 (((a * dgd[l] + b) << SGRPROJ_RST_BITS) + (1 << nb) / 2) >> nb;
421 dgd[l] = ROUND_POWER_OF_TWO(v, SGRPROJ_SGR_BITS);
422 }
423 j = 0;
424 for (i = 1; i < height - 1; ++i) {
425 const int k = i * width + j;
426 const int l = i * stride + j;
427 const int nb = 3;
428 const int64_t a = A[k] + 2 * (A[k - width] + A[k + width]) + A[k + 1] +
429 A[k - width + 1] + A[k + width + 1];
430 const int64_t b = B[k] + 2 * (B[k - width] + B[k + width]) + B[k + 1] +
431 B[k - width + 1] + B[k + width + 1];
432 const int64_t v =
433 (((a * dgd[l] + b) << SGRPROJ_RST_BITS) + (1 << nb) / 2) >> nb;
434 dgd[l] = ROUND_POWER_OF_TWO(v, SGRPROJ_SGR_BITS);
435 }
436 j = width - 1;
437 for (i = 1; i < height - 1; ++i) {
438 const int k = i * width + j;
439 const int l = i * stride + j;
440 const int nb = 3;
441 const int64_t a = A[k] + 2 * (A[k - width] + A[k + width]) + A[k - 1] +
442 A[k - width - 1] + A[k + width - 1];
443 const int64_t b = B[k] + 2 * (B[k - width] + B[k + width]) + B[k - 1] +
444 B[k - width - 1] + B[k + width - 1];
445 const int64_t v =
446 (((a * dgd[l] + b) << SGRPROJ_RST_BITS) + (1 << nb) / 2) >> nb;
447 dgd[l] = ROUND_POWER_OF_TWO(v, SGRPROJ_SGR_BITS);
448 }
449 for (i = 1; i < height - 1; ++i) {
450 for (j = 1; j < width - 1; ++j) {
451 const int k = i * width + j;
452 const int l = i * stride + j;
453 const int nb = 5;
454 const int64_t a =
455 (A[k] + A[k - 1] + A[k + 1] + A[k - width] + A[k + width]) * 4 +
456 (A[k - 1 - width] + A[k - 1 + width] + A[k + 1 - width] +
457 A[k + 1 + width]) *
458 3;
459 const int64_t b =
460 (B[k] + B[k - 1] + B[k + 1] + B[k - width] + B[k + width]) * 4 +
461 (B[k - 1 - width] + B[k - 1 + width] + B[k + 1 - width] +
462 B[k + 1 + width]) *
463 3;
464 const int64_t v =
465 (((a * dgd[l] + b) << SGRPROJ_RST_BITS) + (1 << nb) / 2) >> nb;
466 dgd[l] = ROUND_POWER_OF_TWO(v, SGRPROJ_SGR_BITS);
467 }
468 }
469#else
470 if (r > 1) boxnum(width, height, r = 1, num, width);
471 boxsum(A, width, height, width, r, 0, A, width, T, width);
472 boxsum(B, width, height, width, r, 0, B, width, T, width);
473 for (i = 0; i < height; ++i) {
474 for (j = 0; j < width; ++j) {
475 const int k = i * width + j;
476 const int l = i * stride + j;
477 const int n = num[k];
478 const int64_t v =
479 (((A[k] * dgd[l] + B[k]) << SGRPROJ_RST_BITS) + (n >> 1)) / n;
480 dgd[l] = ROUND_POWER_OF_TWO(v, SGRPROJ_SGR_BITS);
481 }
482 }
483#endif // APPROXIMATE_SGR
484}
485
486static void apply_selfguided_restoration(int64_t *dat, int width, int height,
487 int stride, int bit_depth, int eps,
488 int *xqd, void *tmpbuf) {
489 int xq[2];
490 int64_t *flt1 = (int64_t *)tmpbuf;
491 int64_t *flt2 = flt1 + RESTORATION_TILEPELS_MAX;
492 uint8_t *tmpbuf2 = (uint8_t *)(flt2 + RESTORATION_TILEPELS_MAX);
493 int i, j;
494 for (i = 0; i < height; ++i) {
495 for (j = 0; j < width; ++j) {
496 assert(i * width + j < RESTORATION_TILEPELS_MAX);
497 flt1[i * width + j] = dat[i * stride + j];
498 flt2[i * width + j] = dat[i * stride + j];
499 }
500 }
501 av1_selfguided_restoration(flt1, width, height, width, bit_depth,
502 sgr_params[eps].r1, sgr_params[eps].e1, tmpbuf2);
503 av1_selfguided_restoration(flt2, width, height, width, bit_depth,
504 sgr_params[eps].r2, sgr_params[eps].e2, tmpbuf2);
505 decode_xq(xqd, xq);
506 for (i = 0; i < height; ++i) {
507 for (j = 0; j < width; ++j) {
508 const int k = i * width + j;
509 const int l = i * stride + j;
510 const int64_t u = ((int64_t)dat[l] << SGRPROJ_RST_BITS);
511 const int64_t f1 = (int64_t)flt1[k] - u;
512 const int64_t f2 = (int64_t)flt2[k] - u;
513 const int64_t v = xq[0] * f1 + xq[1] * f2 + (u << SGRPROJ_PRJ_BITS);
514 const int16_t w =
515 (int16_t)ROUND_POWER_OF_TWO(v, SGRPROJ_PRJ_BITS + SGRPROJ_RST_BITS);
516 dat[l] = w;
517 }
518 }
519}
520
521static void loop_sgrproj_filter_tile(uint8_t *data, int tile_idx, int width,
522 int height, int stride,
David Barker9666e752016-12-08 11:25:47 +0000523 RestorationInternal *rst, void *tmpbuf,
524 uint8_t *dst, int dst_stride) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700525 const int tile_width = rst->tile_width >> rst->subsampling_x;
526 const int tile_height = rst->tile_height >> rst->subsampling_y;
527 int i, j;
528 int h_start, h_end, v_start, v_end;
David Barker9666e752016-12-08 11:25:47 +0000529 uint8_t *data_p, *dst_p;
Debargha Mukherjeee166e252016-12-01 07:04:46 -0800530 int64_t *dat = (int64_t *)tmpbuf;
531 tmpbuf = (uint8_t *)tmpbuf + RESTORATION_TILEPELS_MAX * sizeof(*dat);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700532
David Barker9666e752016-12-08 11:25:47 +0000533 if (rst->rsi->sgrproj_info[tile_idx].level == 0) {
534 loop_copy_tile(data, tile_idx, 0, 0, width, height, stride, rst, dst,
535 dst_stride);
536 return;
537 }
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700538 av1_get_rest_tile_limits(tile_idx, 0, 0, rst->nhtiles, rst->nvtiles,
539 tile_width, tile_height, width, height, 0, 0,
540 &h_start, &h_end, &v_start, &v_end);
541 data_p = data + h_start + v_start * stride;
542 for (i = 0; i < (v_end - v_start); ++i) {
543 for (j = 0; j < (h_end - h_start); ++j) {
544 dat[i * (h_end - h_start) + j] = data_p[i * stride + j];
545 }
546 }
547 apply_selfguided_restoration(dat, h_end - h_start, v_end - v_start,
548 h_end - h_start, 8,
549 rst->rsi->sgrproj_info[tile_idx].ep,
550 rst->rsi->sgrproj_info[tile_idx].xqd, tmpbuf);
David Barker9666e752016-12-08 11:25:47 +0000551 dst_p = dst + h_start + v_start * dst_stride;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700552 for (i = 0; i < (v_end - v_start); ++i) {
553 for (j = 0; j < (h_end - h_start); ++j) {
David Barker9666e752016-12-08 11:25:47 +0000554 dst_p[i * dst_stride + j] =
555 clip_pixel((int)dat[i * (h_end - h_start) + j]);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700556 }
557 }
558}
559
560static void loop_sgrproj_filter(uint8_t *data, int width, int height,
561 int stride, RestorationInternal *rst,
David Barker025b2542016-12-08 11:50:42 +0000562 uint8_t *dst, int dst_stride) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700563 int tile_idx;
564 uint8_t *tmpbuf = aom_malloc(SGRPROJ_TMPBUF_SIZE);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700565 for (tile_idx = 0; tile_idx < rst->ntiles; ++tile_idx) {
David Barker9666e752016-12-08 11:25:47 +0000566 loop_sgrproj_filter_tile(data, tile_idx, width, height, stride, rst, tmpbuf,
567 dst, dst_stride);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700568 }
569 aom_free(tmpbuf);
570}
571
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800572static void apply_domaintxfmrf_hor(int iter, int param, uint8_t *img, int width,
573 int height, int img_stride, int32_t *dat,
574 int dat_stride) {
575 int i, j;
576 for (i = 0; i < height; ++i) {
577 uint8_t *ip = &img[i * img_stride];
578 int32_t *dp = &dat[i * dat_stride];
579 *dp *= DOMAINTXFMRF_VTABLE_PREC;
580 dp++;
581 ip++;
582 // left to right
583 for (j = 1; j < width; ++j, dp++, ip++) {
584 const int v = domaintxfmrf_vtable[iter][param][abs(ip[0] - ip[-1])];
585 dp[0] = dp[0] * (DOMAINTXFMRF_VTABLE_PREC - v) +
586 ((v * dp[-1] + DOMAINTXFMRF_VTABLE_PREC / 2) >>
587 DOMAINTXFMRF_VTABLE_PRECBITS);
588 }
589 // right to left
590 dp -= 2;
591 ip -= 2;
592 for (j = width - 2; j >= 0; --j, dp--, ip--) {
593 const int v = domaintxfmrf_vtable[iter][param][abs(ip[1] - ip[0])];
594 dp[0] = (dp[0] * (DOMAINTXFMRF_VTABLE_PREC - v) + v * dp[1] +
595 DOMAINTXFMRF_VTABLE_PREC / 2) >>
596 DOMAINTXFMRF_VTABLE_PRECBITS;
597 }
598 }
599}
600
601static void apply_domaintxfmrf_ver(int iter, int param, uint8_t *img, int width,
602 int height, int img_stride, int32_t *dat,
603 int dat_stride) {
604 int i, j;
605 for (j = 0; j < width; ++j) {
606 uint8_t *ip = &img[j];
607 int32_t *dp = &dat[j];
608 dp += dat_stride;
609 ip += img_stride;
610 // top to bottom
611 for (i = 1; i < height; ++i, dp += dat_stride, ip += img_stride) {
612 const int v =
613 domaintxfmrf_vtable[iter][param][abs(ip[0] - ip[-img_stride])];
614 dp[0] = (dp[0] * (DOMAINTXFMRF_VTABLE_PREC - v) +
615 (dp[-dat_stride] * v + DOMAINTXFMRF_VTABLE_PREC / 2)) >>
616 DOMAINTXFMRF_VTABLE_PRECBITS;
617 }
618 // bottom to top
619 dp -= 2 * dat_stride;
620 ip -= 2 * img_stride;
621 for (i = height - 2; i >= 0; --i, dp -= dat_stride, ip -= img_stride) {
622 const int v =
623 domaintxfmrf_vtable[iter][param][abs(ip[img_stride] - ip[0])];
624 dp[0] = (dp[0] * (DOMAINTXFMRF_VTABLE_PREC - v) + dp[dat_stride] * v +
625 DOMAINTXFMRF_VTABLE_PREC / 2) >>
626 DOMAINTXFMRF_VTABLE_PRECBITS;
627 }
628 }
629}
630
631static void apply_domaintxfmrf_reduce_prec(int32_t *dat, int width, int height,
632 int dat_stride) {
633 int i, j;
634 for (i = 0; i < height; ++i) {
635 for (j = 0; j < width; ++j) {
636 dat[i * dat_stride + j] = ROUND_POWER_OF_TWO_SIGNED(
637 dat[i * dat_stride + j], DOMAINTXFMRF_VTABLE_PRECBITS);
638 }
639 }
640}
641
642void av1_domaintxfmrf_restoration(uint8_t *dgd, int width, int height,
David Barker9666e752016-12-08 11:25:47 +0000643 int stride, int param, uint8_t *dst,
644 int dst_stride) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800645 int32_t dat[RESTORATION_TILEPELS_MAX];
646 int i, j, t;
647 for (i = 0; i < height; ++i) {
648 for (j = 0; j < width; ++j) {
649 dat[i * width + j] = dgd[i * stride + j];
650 }
651 }
652 for (t = 0; t < DOMAINTXFMRF_ITERS; ++t) {
653 apply_domaintxfmrf_hor(t, param, dgd, width, height, stride, dat, width);
654 apply_domaintxfmrf_ver(t, param, dgd, width, height, stride, dat, width);
655 apply_domaintxfmrf_reduce_prec(dat, width, height, width);
656 }
657 for (i = 0; i < height; ++i) {
658 for (j = 0; j < width; ++j) {
David Barker9666e752016-12-08 11:25:47 +0000659 dst[i * dst_stride + j] = clip_pixel(dat[i * width + j]);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800660 }
661 }
662}
663
664static void loop_domaintxfmrf_filter_tile(uint8_t *data, int tile_idx,
665 int width, int height, int stride,
666 RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +0000667 uint8_t *dst, int dst_stride) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800668 const int tile_width = rst->tile_width >> rst->subsampling_x;
669 const int tile_height = rst->tile_height >> rst->subsampling_y;
670 int h_start, h_end, v_start, v_end;
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800671
David Barker9666e752016-12-08 11:25:47 +0000672 if (rst->rsi->domaintxfmrf_info[tile_idx].level == 0) {
673 loop_copy_tile(data, tile_idx, 0, 0, width, height, stride, rst, dst,
674 dst_stride);
675 return;
676 }
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800677 av1_get_rest_tile_limits(tile_idx, 0, 0, rst->nhtiles, rst->nvtiles,
678 tile_width, tile_height, width, height, 0, 0,
679 &h_start, &h_end, &v_start, &v_end);
David Barker9666e752016-12-08 11:25:47 +0000680 av1_domaintxfmrf_restoration(
681 data + h_start + v_start * stride, h_end - h_start, v_end - v_start,
682 stride, rst->rsi->domaintxfmrf_info[tile_idx].sigma_r,
683 dst + h_start + v_start * dst_stride, dst_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800684}
685
686static void loop_domaintxfmrf_filter(uint8_t *data, int width, int height,
687 int stride, RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +0000688 uint8_t *dst, int dst_stride) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800689 int tile_idx;
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800690 for (tile_idx = 0; tile_idx < rst->ntiles; ++tile_idx) {
691 loop_domaintxfmrf_filter_tile(data, tile_idx, width, height, stride, rst,
David Barker9666e752016-12-08 11:25:47 +0000692 dst, dst_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800693 }
694}
695
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700696static void loop_switchable_filter(uint8_t *data, int width, int height,
697 int stride, RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +0000698 uint8_t *dst, int dst_stride) {
David Barker025b2542016-12-08 11:50:42 +0000699 int tile_idx;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700700 uint8_t *tmpbuf = aom_malloc(SGRPROJ_TMPBUF_SIZE);
David Barker025b2542016-12-08 11:50:42 +0000701 extend_frame(data, width, height, stride);
David Barker9666e752016-12-08 11:25:47 +0000702 copy_border(data, width, height, stride, dst, dst_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700703 for (tile_idx = 0; tile_idx < rst->ntiles; ++tile_idx) {
David Barker9666e752016-12-08 11:25:47 +0000704 if (rst->rsi->restoration_type[tile_idx] == RESTORE_NONE) {
705 loop_copy_tile(data, tile_idx, 0, 0, width, height, stride, rst, dst,
706 dst_stride);
707 } else if (rst->rsi->restoration_type[tile_idx] == RESTORE_WIENER) {
David Barker025b2542016-12-08 11:50:42 +0000708 loop_wiener_filter_tile(data, tile_idx, width, height, stride, rst, dst,
709 dst_stride);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700710 } else if (rst->rsi->restoration_type[tile_idx] == RESTORE_SGRPROJ) {
711 loop_sgrproj_filter_tile(data, tile_idx, width, height, stride, rst,
David Barker9666e752016-12-08 11:25:47 +0000712 tmpbuf, dst, dst_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800713 } else if (rst->rsi->restoration_type[tile_idx] == RESTORE_DOMAINTXFMRF) {
714 loop_domaintxfmrf_filter_tile(data, tile_idx, width, height, stride, rst,
David Barker9666e752016-12-08 11:25:47 +0000715 dst, dst_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700716 }
717 }
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700718 aom_free(tmpbuf);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700719}
720
Yaowu Xuf883b422016-08-30 14:01:10 -0700721#if CONFIG_AOM_HIGHBITDEPTH
David Barker9666e752016-12-08 11:25:47 +0000722static void copy_border_highbd(uint16_t *data, int width, int height,
723 int stride, uint16_t *dst, int dst_stride) {
724 int i;
725 for (i = RESTORATION_HALFWIN; i < height - RESTORATION_HALFWIN; ++i) {
726 memcpy(dst + i * dst_stride, data + i * stride,
727 RESTORATION_HALFWIN * sizeof(*dst));
728 memcpy(dst + i * dst_stride + (width - RESTORATION_HALFWIN),
729 data + i * stride + (width - RESTORATION_HALFWIN),
730 RESTORATION_HALFWIN * sizeof(*dst));
731 }
732 for (i = 0; i < RESTORATION_HALFWIN; ++i) {
733 memcpy(dst + i * dst_stride, data + i * stride, width * sizeof(*dst));
734 }
735 for (i = height - RESTORATION_HALFWIN; i < height; ++i)
736 memcpy(dst + i * dst_stride, data + i * stride, width * sizeof(*dst));
737}
738
David Barker025b2542016-12-08 11:50:42 +0000739static void extend_frame_highbd(uint16_t *data, int width, int height,
740 int stride) {
741 uint16_t *data_p;
742 int i, j;
743 for (i = 0; i < height; ++i) {
744 data_p = data + i * stride;
745 for (j = -RESTORATION_HALFWIN; j < 0; ++j) data_p[j] = data_p[0];
746 for (j = width; j < width + RESTORATION_HALFWIN; ++j)
747 data_p[j] = data_p[width - 1];
748 }
749 data_p = data - RESTORATION_HALFWIN;
750 for (i = -RESTORATION_HALFWIN; i < 0; ++i) {
751 memcpy(data_p + i * stride, data_p,
752 (width + 2 * RESTORATION_HALFWIN) * sizeof(uint16_t));
753 }
754 for (i = height; i < height + RESTORATION_HALFWIN; ++i) {
755 memcpy(data_p + i * stride, data_p + (height - 1) * stride,
756 (width + 2 * RESTORATION_HALFWIN) * sizeof(uint16_t));
757 }
758}
759
David Barker9666e752016-12-08 11:25:47 +0000760static void loop_copy_tile_highbd(uint16_t *data, int tile_idx, int subtile_idx,
761 int subtile_bits, int width, int height,
762 int stride, RestorationInternal *rst,
763 uint16_t *dst, int dst_stride) {
764 const int tile_width = rst->tile_width >> rst->subsampling_x;
765 const int tile_height = rst->tile_height >> rst->subsampling_y;
766 int i;
767 int h_start, h_end, v_start, v_end;
768 av1_get_rest_tile_limits(tile_idx, subtile_idx, subtile_bits, rst->nhtiles,
769 rst->nvtiles, tile_width, tile_height, width, height,
770 0, 0, &h_start, &h_end, &v_start, &v_end);
771 for (i = v_start; i < v_end; ++i)
772 memcpy(dst + i * dst_stride + h_start, data + i * stride + h_start,
773 (h_end - h_start) * sizeof(*dst));
774}
775
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700776static void loop_wiener_filter_tile_highbd(uint16_t *data, int tile_idx,
777 int width, int height, int stride,
778 RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +0000779 int bit_depth, uint16_t *dst,
780 int dst_stride) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700781 const int tile_width = rst->tile_width >> rst->subsampling_x;
782 const int tile_height = rst->tile_height >> rst->subsampling_y;
783 int h_start, h_end, v_start, v_end;
784 int i, j;
David Barker025b2542016-12-08 11:50:42 +0000785 DECLARE_ALIGNED(16, InterpKernel, hkernel);
786 DECLARE_ALIGNED(16, InterpKernel, vkernel);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700787
David Barker9666e752016-12-08 11:25:47 +0000788 if (rst->rsi->wiener_info[tile_idx].level == 0) {
789 loop_copy_tile_highbd(data, tile_idx, 0, 0, width, height, stride, rst, dst,
790 dst_stride);
791 return;
792 }
David Barker025b2542016-12-08 11:50:42 +0000793 // TODO(david.barker): Store hfilter/vfilter as an InterpKernel
794 // instead of the current format. Then this can be removed.
795 assert(RESTORATION_WIN == SUBPEL_TAPS - 1);
796 for (i = 0; i < RESTORATION_WIN; ++i) {
797 hkernel[i] = rst->rsi->wiener_info[tile_idx].hfilter[i];
798 vkernel[i] = rst->rsi->wiener_info[tile_idx].vfilter[i];
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700799 }
David Barker025b2542016-12-08 11:50:42 +0000800 hkernel[RESTORATION_WIN] = 0;
801 vkernel[RESTORATION_WIN] = 0;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700802 av1_get_rest_tile_limits(tile_idx, 0, 0, rst->nhtiles, rst->nvtiles,
David Barker025b2542016-12-08 11:50:42 +0000803 tile_width, tile_height, width, height, 0, 0,
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700804 &h_start, &h_end, &v_start, &v_end);
David Barker025b2542016-12-08 11:50:42 +0000805 // Convolve the whole tile (done in blocks here to match the requirements
806 // of the vectorized convolve functions, but the result is equivalent)
807 for (i = v_start; i < v_end; i += MAX_SB_SIZE)
808 for (j = h_start; j < h_end; j += MAX_SB_SIZE) {
809 int w = AOMMIN(MAX_SB_SIZE, (h_end - j + 15) & ~15);
810 int h = AOMMIN(MAX_SB_SIZE, (v_end - i + 15) & ~15);
811 const uint16_t *data_p = data + i * stride + j;
812 uint16_t *dst_p = dst + i * dst_stride + j;
813 aom_highbd_convolve8_c(CONVERT_TO_BYTEPTR(data_p), stride,
814 CONVERT_TO_BYTEPTR(dst_p), dst_stride, hkernel, 16,
815 vkernel, 16, w, h, bit_depth);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700816 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700817}
818
Yaowu Xuc27fc142016-08-22 16:08:15 -0700819static void loop_wiener_filter_highbd(uint8_t *data8, int width, int height,
820 int stride, RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +0000821 int bit_depth, uint8_t *dst8,
822 int dst_stride) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700823 uint16_t *data = CONVERT_TO_SHORTPTR(data8);
David Barker9666e752016-12-08 11:25:47 +0000824 uint16_t *dst = CONVERT_TO_SHORTPTR(dst8);
Debargha Mukherjeeaa229d62016-11-30 18:40:22 -0800825 int tile_idx, i;
David Barker9666e752016-12-08 11:25:47 +0000826 copy_border_highbd(data, width, height, stride, dst, dst_stride);
David Barker025b2542016-12-08 11:50:42 +0000827 extend_frame_highbd(data, width, height, stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700828 for (tile_idx = 0; tile_idx < rst->ntiles; ++tile_idx) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700829 loop_wiener_filter_tile_highbd(data, tile_idx, width, height, stride, rst,
David Barker025b2542016-12-08 11:50:42 +0000830 bit_depth, dst, dst_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700831 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700832}
Yaowu Xuc27fc142016-08-22 16:08:15 -0700833
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700834static void loop_sgrproj_filter_tile_highbd(uint16_t *data, int tile_idx,
835 int width, int height, int stride,
836 RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +0000837 int bit_depth, void *tmpbuf,
838 uint16_t *dst, int dst_stride) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700839 const int tile_width = rst->tile_width >> rst->subsampling_x;
840 const int tile_height = rst->tile_height >> rst->subsampling_y;
841 int i, j;
842 int h_start, h_end, v_start, v_end;
David Barker9666e752016-12-08 11:25:47 +0000843 uint16_t *data_p, *dst_p;
Debargha Mukherjeee166e252016-12-01 07:04:46 -0800844 int64_t *dat = (int64_t *)tmpbuf;
845 tmpbuf = (uint8_t *)tmpbuf + RESTORATION_TILEPELS_MAX * sizeof(*dat);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700846
David Barker9666e752016-12-08 11:25:47 +0000847 if (rst->rsi->sgrproj_info[tile_idx].level == 0) {
848 loop_copy_tile_highbd(data, tile_idx, 0, 0, width, height, stride, rst, dst,
849 dst_stride);
850 return;
851 }
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700852 av1_get_rest_tile_limits(tile_idx, 0, 0, rst->nhtiles, rst->nvtiles,
853 tile_width, tile_height, width, height, 0, 0,
854 &h_start, &h_end, &v_start, &v_end);
855 data_p = data + h_start + v_start * stride;
856 for (i = 0; i < (v_end - v_start); ++i) {
857 for (j = 0; j < (h_end - h_start); ++j) {
858 dat[i * (h_end - h_start) + j] = data_p[i * stride + j];
859 }
860 }
861 apply_selfguided_restoration(dat, h_end - h_start, v_end - v_start,
862 h_end - h_start, bit_depth,
863 rst->rsi->sgrproj_info[tile_idx].ep,
864 rst->rsi->sgrproj_info[tile_idx].xqd, tmpbuf);
David Barker9666e752016-12-08 11:25:47 +0000865 dst_p = dst + h_start + v_start * dst_stride;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700866 for (i = 0; i < (v_end - v_start); ++i) {
867 for (j = 0; j < (h_end - h_start); ++j) {
David Barker9666e752016-12-08 11:25:47 +0000868 dst_p[i * dst_stride + j] =
Debargha Mukherjeeaa229d62016-11-30 18:40:22 -0800869 clip_pixel_highbd((int)dat[i * (h_end - h_start) + j], bit_depth);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700870 }
871 }
872}
873
874static void loop_sgrproj_filter_highbd(uint8_t *data8, int width, int height,
875 int stride, RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +0000876 int bit_depth, uint8_t *dst8,
877 int dst_stride) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700878 int tile_idx;
879 uint16_t *data = CONVERT_TO_SHORTPTR(data8);
880 uint8_t *tmpbuf = aom_malloc(SGRPROJ_TMPBUF_SIZE);
David Barker9666e752016-12-08 11:25:47 +0000881 uint16_t *dst = CONVERT_TO_SHORTPTR(dst8);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700882 for (tile_idx = 0; tile_idx < rst->ntiles; ++tile_idx) {
883 loop_sgrproj_filter_tile_highbd(data, tile_idx, width, height, stride, rst,
David Barker9666e752016-12-08 11:25:47 +0000884 bit_depth, tmpbuf, dst, dst_stride);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700885 }
886 aom_free(tmpbuf);
887}
888
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800889static void apply_domaintxfmrf_hor_highbd(int iter, int param, uint16_t *img,
890 int width, int height, int img_stride,
891 int32_t *dat, int dat_stride,
892 int bd) {
893 const int shift = (bd - 8);
894 int i, j;
895 for (i = 0; i < height; ++i) {
896 uint16_t *ip = &img[i * img_stride];
897 int32_t *dp = &dat[i * dat_stride];
898 *dp *= DOMAINTXFMRF_VTABLE_PREC;
899 dp++;
900 ip++;
901 // left to right
902 for (j = 1; j < width; ++j, dp++, ip++) {
903 const int v =
904 domaintxfmrf_vtable[iter][param]
905 [abs((ip[0] >> shift) - (ip[-1] >> shift))];
906 dp[0] = dp[0] * (DOMAINTXFMRF_VTABLE_PREC - v) +
907 ((v * dp[-1] + DOMAINTXFMRF_VTABLE_PREC / 2) >>
908 DOMAINTXFMRF_VTABLE_PRECBITS);
909 }
910 // right to left
911 dp -= 2;
912 ip -= 2;
913 for (j = width - 2; j >= 0; --j, dp--, ip--) {
914 const int v =
915 domaintxfmrf_vtable[iter][param]
916 [abs((ip[1] >> shift) - (ip[0] >> shift))];
917 dp[0] = (dp[0] * (DOMAINTXFMRF_VTABLE_PREC - v) + v * dp[1] +
918 DOMAINTXFMRF_VTABLE_PREC / 2) >>
919 DOMAINTXFMRF_VTABLE_PRECBITS;
920 }
921 }
922}
923
924static void apply_domaintxfmrf_ver_highbd(int iter, int param, uint16_t *img,
925 int width, int height, int img_stride,
926 int32_t *dat, int dat_stride,
927 int bd) {
928 int i, j;
929 const int shift = (bd - 8);
930 for (j = 0; j < width; ++j) {
931 uint16_t *ip = &img[j];
932 int32_t *dp = &dat[j];
933 dp += dat_stride;
934 ip += img_stride;
935 // top to bottom
936 for (i = 1; i < height; ++i, dp += dat_stride, ip += img_stride) {
937 const int v = domaintxfmrf_vtable[iter][param][abs(
938 (ip[0] >> shift) - (ip[-img_stride] >> shift))];
939 dp[0] = (dp[0] * (DOMAINTXFMRF_VTABLE_PREC - v) +
940 (dp[-dat_stride] * v + DOMAINTXFMRF_VTABLE_PREC / 2)) >>
941 DOMAINTXFMRF_VTABLE_PRECBITS;
942 }
943 // bottom to top
944 dp -= 2 * dat_stride;
945 ip -= 2 * img_stride;
946 for (i = height - 2; i >= 0; --i, dp -= dat_stride, ip -= img_stride) {
947 const int v = domaintxfmrf_vtable[iter][param][abs(
948 (ip[img_stride] >> shift) - (ip[0] >> shift))];
949 dp[0] = (dp[0] * (DOMAINTXFMRF_VTABLE_PREC - v) + dp[dat_stride] * v +
950 DOMAINTXFMRF_VTABLE_PREC / 2) >>
951 DOMAINTXFMRF_VTABLE_PRECBITS;
952 }
953 }
954}
955
956void av1_domaintxfmrf_restoration_highbd(uint16_t *dgd, int width, int height,
David Barker9666e752016-12-08 11:25:47 +0000957 int stride, int param, int bit_depth,
958 uint16_t *dst, int dst_stride) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800959 int32_t dat[RESTORATION_TILEPELS_MAX];
960 int i, j, t;
961 for (i = 0; i < height; ++i) {
962 for (j = 0; j < width; ++j) {
963 dat[i * width + j] = dgd[i * stride + j];
964 }
965 }
966 for (t = 0; t < DOMAINTXFMRF_ITERS; ++t) {
967 apply_domaintxfmrf_hor_highbd(t, param, dgd, width, height, stride, dat,
968 width, bit_depth);
969 apply_domaintxfmrf_ver_highbd(t, param, dgd, width, height, stride, dat,
970 width, bit_depth);
971 apply_domaintxfmrf_reduce_prec(dat, width, height, width);
972 }
973 for (i = 0; i < height; ++i) {
974 for (j = 0; j < width; ++j) {
David Barker9666e752016-12-08 11:25:47 +0000975 dst[i * dst_stride + j] =
976 clip_pixel_highbd(dat[i * width + j], bit_depth);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800977 }
978 }
979}
980
David Barker9666e752016-12-08 11:25:47 +0000981static void loop_domaintxfmrf_filter_tile_highbd(
982 uint16_t *data, int tile_idx, int width, int height, int stride,
983 RestorationInternal *rst, int bit_depth, uint16_t *dst, int dst_stride) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800984 const int tile_width = rst->tile_width >> rst->subsampling_x;
985 const int tile_height = rst->tile_height >> rst->subsampling_y;
986 int h_start, h_end, v_start, v_end;
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800987
David Barker9666e752016-12-08 11:25:47 +0000988 if (rst->rsi->domaintxfmrf_info[tile_idx].level == 0) {
989 loop_copy_tile_highbd(data, tile_idx, 0, 0, width, height, stride, rst, dst,
990 dst_stride);
991 return;
992 }
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800993 av1_get_rest_tile_limits(tile_idx, 0, 0, rst->nhtiles, rst->nvtiles,
994 tile_width, tile_height, width, height, 0, 0,
995 &h_start, &h_end, &v_start, &v_end);
996 av1_domaintxfmrf_restoration_highbd(
997 data + h_start + v_start * stride, h_end - h_start, v_end - v_start,
David Barker9666e752016-12-08 11:25:47 +0000998 stride, rst->rsi->domaintxfmrf_info[tile_idx].sigma_r, bit_depth,
999 dst + h_start + v_start * dst_stride, dst_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -08001000}
1001
1002static void loop_domaintxfmrf_filter_highbd(uint8_t *data8, int width,
1003 int height, int stride,
1004 RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +00001005 int bit_depth, uint8_t *dst8,
1006 int dst_stride) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -08001007 int tile_idx;
1008 uint16_t *data = CONVERT_TO_SHORTPTR(data8);
David Barker9666e752016-12-08 11:25:47 +00001009 uint16_t *dst = CONVERT_TO_SHORTPTR(dst8);
Debargha Mukherjee3981be92016-11-21 09:35:44 -08001010 for (tile_idx = 0; tile_idx < rst->ntiles; ++tile_idx) {
1011 loop_domaintxfmrf_filter_tile_highbd(data, tile_idx, width, height, stride,
David Barker9666e752016-12-08 11:25:47 +00001012 rst, bit_depth, dst, dst_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -08001013 }
1014}
1015
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001016static void loop_switchable_filter_highbd(uint8_t *data8, int width, int height,
1017 int stride, RestorationInternal *rst,
David Barker9666e752016-12-08 11:25:47 +00001018 int bit_depth, uint8_t *dst8,
1019 int dst_stride) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001020 uint16_t *data = CONVERT_TO_SHORTPTR(data8);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001021 uint8_t *tmpbuf = aom_malloc(SGRPROJ_TMPBUF_SIZE);
David Barker9666e752016-12-08 11:25:47 +00001022 uint16_t *dst = CONVERT_TO_SHORTPTR(dst8);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001023 int i, tile_idx;
David Barker9666e752016-12-08 11:25:47 +00001024 copy_border_highbd(data, width, height, stride, dst, dst_stride);
David Barker025b2542016-12-08 11:50:42 +00001025 extend_frame_highbd(data, width, height, stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001026 for (tile_idx = 0; tile_idx < rst->ntiles; ++tile_idx) {
David Barker9666e752016-12-08 11:25:47 +00001027 if (rst->rsi->restoration_type[tile_idx] == RESTORE_NONE) {
1028 loop_copy_tile_highbd(data, tile_idx, 0, 0, width, height, stride, rst,
1029 dst, dst_stride);
1030 } else if (rst->rsi->restoration_type[tile_idx] == RESTORE_WIENER) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001031 loop_wiener_filter_tile_highbd(data, tile_idx, width, height, stride, rst,
David Barker025b2542016-12-08 11:50:42 +00001032 bit_depth, dst, dst_stride);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001033 } else if (rst->rsi->restoration_type[tile_idx] == RESTORE_SGRPROJ) {
1034 loop_sgrproj_filter_tile_highbd(data, tile_idx, width, height, stride,
David Barker9666e752016-12-08 11:25:47 +00001035 rst, bit_depth, tmpbuf, dst, dst_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -08001036 } else if (rst->rsi->restoration_type[tile_idx] == RESTORE_DOMAINTXFMRF) {
1037 loop_domaintxfmrf_filter_tile_highbd(data, tile_idx, width, height,
David Barker9666e752016-12-08 11:25:47 +00001038 stride, rst, bit_depth, dst,
1039 dst_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001040 }
1041 }
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001042 aom_free(tmpbuf);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001043}
Yaowu Xuf883b422016-08-30 14:01:10 -07001044#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001045
Yaowu Xuf883b422016-08-30 14:01:10 -07001046void av1_loop_restoration_rows(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
David Barker9666e752016-12-08 11:25:47 +00001047 int start_mi_row, int end_mi_row, int y_only,
1048 YV12_BUFFER_CONFIG *dst) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001049 const int ywidth = frame->y_crop_width;
1050 const int ystride = frame->y_stride;
1051 const int uvwidth = frame->uv_crop_width;
1052 const int uvstride = frame->uv_stride;
1053 const int ystart = start_mi_row << MI_SIZE_LOG2;
1054 const int uvstart = ystart >> cm->subsampling_y;
1055 int yend = end_mi_row << MI_SIZE_LOG2;
1056 int uvend = yend >> cm->subsampling_y;
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08001057 restore_func_type restore_funcs[RESTORE_TYPES] = { NULL, loop_wiener_filter,
Debargha Mukherjee3981be92016-11-21 09:35:44 -08001058 loop_sgrproj_filter,
Debargha Mukherjee3981be92016-11-21 09:35:44 -08001059 loop_domaintxfmrf_filter,
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001060 loop_switchable_filter };
1061#if CONFIG_AOM_HIGHBITDEPTH
1062 restore_func_highbd_type restore_funcs_highbd[RESTORE_TYPES] = {
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08001063 NULL, loop_wiener_filter_highbd, loop_sgrproj_filter_highbd,
1064 loop_domaintxfmrf_filter_highbd, loop_switchable_filter_highbd
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001065 };
1066#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001067 restore_func_type restore_func =
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001068 restore_funcs[cm->rst_internal.rsi->frame_restoration_type];
Yaowu Xuf883b422016-08-30 14:01:10 -07001069#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001070 restore_func_highbd_type restore_func_highbd =
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001071 restore_funcs_highbd[cm->rst_internal.rsi->frame_restoration_type];
Yaowu Xuf883b422016-08-30 14:01:10 -07001072#endif // CONFIG_AOM_HIGHBITDEPTH
David Barker9666e752016-12-08 11:25:47 +00001073 YV12_BUFFER_CONFIG dst_;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001074
Yaowu Xuf883b422016-08-30 14:01:10 -07001075 yend = AOMMIN(yend, cm->height);
1076 uvend = AOMMIN(uvend, cm->subsampling_y ? (cm->height + 1) >> 1 : cm->height);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001077
David Barker9666e752016-12-08 11:25:47 +00001078 if (cm->rst_internal.rsi->frame_restoration_type == RESTORE_NONE) {
1079 if (dst) {
1080 if (y_only)
1081 aom_yv12_copy_y(frame, dst);
1082 else
1083 aom_yv12_copy_frame(frame, dst);
1084 }
1085 return;
1086 }
1087
Debargha Mukherjee818e42a2016-12-12 11:52:56 -08001088 if (y_only == 0)
1089 y_only = override_y_only[cm->rst_internal.rsi->frame_restoration_type];
David Barker9666e752016-12-08 11:25:47 +00001090 if (!dst) {
1091 dst = &dst_;
1092 memset(dst, 0, sizeof(YV12_BUFFER_CONFIG));
1093 if (aom_realloc_frame_buffer(
1094 dst, cm->width, cm->height, cm->subsampling_x, cm->subsampling_y,
1095#if CONFIG_AOM_HIGHBITDEPTH
1096 cm->use_highbitdepth,
1097#endif
1098 AOM_BORDER_IN_PIXELS, cm->byte_alignment, NULL, NULL, NULL) < 0)
1099 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
1100 "Failed to allocate restoration dst buffer");
1101 }
Debargha Mukherjee818e42a2016-12-12 11:52:56 -08001102
Yaowu Xuf883b422016-08-30 14:01:10 -07001103#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001104 if (cm->use_highbitdepth)
1105 restore_func_highbd(frame->y_buffer + ystart * ystride, ywidth,
1106 yend - ystart, ystride, &cm->rst_internal,
David Barker025b2542016-12-08 11:50:42 +00001107 cm->bit_depth, dst->y_buffer + ystart * dst->y_stride,
1108 dst->y_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001109 else
Yaowu Xuf883b422016-08-30 14:01:10 -07001110#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001111 restore_func(frame->y_buffer + ystart * ystride, ywidth, yend - ystart,
1112 ystride, &cm->rst_internal,
David Barker9666e752016-12-08 11:25:47 +00001113 dst->y_buffer + ystart * dst->y_stride, dst->y_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001114 if (!y_only) {
1115 cm->rst_internal.subsampling_x = cm->subsampling_x;
1116 cm->rst_internal.subsampling_y = cm->subsampling_y;
Yaowu Xuf883b422016-08-30 14:01:10 -07001117#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001118 if (cm->use_highbitdepth) {
David Barker025b2542016-12-08 11:50:42 +00001119 restore_func_highbd(
1120 frame->u_buffer + uvstart * uvstride, uvwidth, uvend - uvstart,
1121 uvstride, &cm->rst_internal, cm->bit_depth,
1122 dst->u_buffer + uvstart * dst->uv_stride, dst->uv_stride);
1123 restore_func_highbd(
1124 frame->v_buffer + uvstart * uvstride, uvwidth, uvend - uvstart,
1125 uvstride, &cm->rst_internal, cm->bit_depth,
1126 dst->v_buffer + uvstart * dst->uv_stride, dst->uv_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001127 } else {
Yaowu Xuf883b422016-08-30 14:01:10 -07001128#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001129 restore_func(frame->u_buffer + uvstart * uvstride, uvwidth,
1130 uvend - uvstart, uvstride, &cm->rst_internal,
David Barker025b2542016-12-08 11:50:42 +00001131 dst->u_buffer + uvstart * dst->uv_stride, dst->uv_stride);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001132 restore_func(frame->v_buffer + uvstart * uvstride, uvwidth,
1133 uvend - uvstart, uvstride, &cm->rst_internal,
David Barker025b2542016-12-08 11:50:42 +00001134 dst->v_buffer + uvstart * dst->uv_stride, dst->uv_stride);
Yaowu Xuf883b422016-08-30 14:01:10 -07001135#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001136 }
Yaowu Xuf883b422016-08-30 14:01:10 -07001137#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001138 }
David Barker9666e752016-12-08 11:25:47 +00001139
David Barker9666e752016-12-08 11:25:47 +00001140 if (dst == &dst_) {
1141 if (y_only)
1142 aom_yv12_copy_y(dst, frame);
1143 else
1144 aom_yv12_copy_frame(dst, frame);
1145 aom_free_frame_buffer(dst);
1146 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001147}
1148
Yaowu Xuf883b422016-08-30 14:01:10 -07001149void av1_loop_restoration_frame(YV12_BUFFER_CONFIG *frame, AV1_COMMON *cm,
1150 RestorationInfo *rsi, int y_only,
David Barker9666e752016-12-08 11:25:47 +00001151 int partial_frame, YV12_BUFFER_CONFIG *dst) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001152 int start_mi_row, end_mi_row, mi_rows_to_filter;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001153 if (rsi->frame_restoration_type != RESTORE_NONE) {
Yaowu Xuc27fc142016-08-22 16:08:15 -07001154 start_mi_row = 0;
1155 mi_rows_to_filter = cm->mi_rows;
1156 if (partial_frame && cm->mi_rows > 8) {
1157 start_mi_row = cm->mi_rows >> 1;
1158 start_mi_row &= 0xfffffff8;
Yaowu Xuf883b422016-08-30 14:01:10 -07001159 mi_rows_to_filter = AOMMAX(cm->mi_rows / 8, 8);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001160 }
1161 end_mi_row = start_mi_row + mi_rows_to_filter;
Yaowu Xuf883b422016-08-30 14:01:10 -07001162 av1_loop_restoration_init(&cm->rst_internal, rsi,
1163 cm->frame_type == KEY_FRAME, cm->width,
1164 cm->height);
David Barker9666e752016-12-08 11:25:47 +00001165 av1_loop_restoration_rows(frame, cm, start_mi_row, end_mi_row, y_only, dst);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001166 }
1167}