blob: 6ba6cc22f9475d8ccfefcfa08e783524073caf56 [file] [log] [blame]
Yaowu Xuc27fc142016-08-22 16:08:15 -07001/*
Yaowu Xubde4ac82016-11-28 15:26:06 -08002 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
Yaowu Xuc27fc142016-08-22 16:08:15 -07003 *
Yaowu Xubde4ac82016-11-28 15:26:06 -08004 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
Yaowu Xuc27fc142016-08-22 16:08:15 -070010 */
11
12#include <assert.h>
13#include <float.h>
14#include <limits.h>
15#include <math.h>
16
Yaowu Xuf883b422016-08-30 14:01:10 -070017#include "./aom_scale_rtcd.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070018
19#include "aom_dsp/psnr.h"
Yaowu Xuf883b422016-08-30 14:01:10 -070020#include "aom_dsp/aom_dsp_common.h"
21#include "aom_mem/aom_mem.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070022#include "aom_ports/mem.h"
23
24#include "av1/common/onyxc_int.h"
25#include "av1/common/quant_common.h"
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -080026#include "av1/common/restoration.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070027
Tom Finegan17ce8b12017-02-08 12:46:31 -080028#include "av1/encoder/av1_quantize.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070029#include "av1/encoder/encoder.h"
30#include "av1/encoder/picklpf.h"
31#include "av1/encoder/pickrst.h"
Yaowu Xuc27fc142016-08-22 16:08:15 -070032
Debargha Mukherjee5d89a632016-09-17 13:16:58 -070033typedef double (*search_restore_type)(const YV12_BUFFER_CONFIG *src,
Debargha Mukherjee00c54332017-03-03 15:44:17 -080034 AV1_COMP *cpi, int partial_frame,
35 RestorationInfo *info,
Debargha Mukherjee994ccd72017-01-06 11:18:23 -080036 RestorationType *rest_level,
David Barker9666e752016-12-08 11:25:47 +000037 double *best_tile_cost,
38 YV12_BUFFER_CONFIG *dst_frame);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -070039
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -080040#if USE_DOMAINTXFMRF
Debargha Mukherjee0e67b252016-12-08 09:22:44 -080041const int frame_level_restore_bits[RESTORE_TYPES] = { 2, 2, 3, 3, 2 };
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -080042#else
43const int frame_level_restore_bits[RESTORE_TYPES] = { 2, 2, 2, 2 };
44#endif // USE_DOMAINTXFMRF
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070045
46static int64_t sse_restoration_tile(const YV12_BUFFER_CONFIG *src,
David Barker9666e752016-12-08 11:25:47 +000047 const YV12_BUFFER_CONFIG *dst,
48 const AV1_COMMON *cm, int h_start,
Debargha Mukherjee874d36d2016-12-14 16:53:17 -080049 int width, int v_start, int height,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -080050 int components_pattern) {
51 int64_t filt_err = 0;
Debargha Mukherjeed7489142017-01-05 13:58:16 -080052 (void)cm;
53 // Y and UV components cannot be mixed
54 assert(components_pattern == 1 || components_pattern == 2 ||
55 components_pattern == 4 || components_pattern == 6);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070056#if CONFIG_AOM_HIGHBITDEPTH
57 if (cm->use_highbitdepth) {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -080058 if ((components_pattern >> AOM_PLANE_Y) & 1) {
59 filt_err +=
60 aom_highbd_get_y_sse_part(src, dst, h_start, width, v_start, height);
61 }
62 if ((components_pattern >> AOM_PLANE_U) & 1) {
Debargha Mukherjeed7489142017-01-05 13:58:16 -080063 filt_err +=
64 aom_highbd_get_u_sse_part(src, dst, h_start, width, v_start, height);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -080065 }
66 if ((components_pattern >> AOM_PLANE_V) & 1) {
Debargha Mukherjeed7489142017-01-05 13:58:16 -080067 filt_err +=
68 aom_highbd_get_v_sse_part(src, dst, h_start, width, v_start, height);
Debargha Mukherjee874d36d2016-12-14 16:53:17 -080069 }
70 return filt_err;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070071 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070072#endif // CONFIG_AOM_HIGHBITDEPTH
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -080073 if ((components_pattern >> AOM_PLANE_Y) & 1) {
74 filt_err += aom_get_y_sse_part(src, dst, h_start, width, v_start, height);
75 }
76 if ((components_pattern >> AOM_PLANE_U) & 1) {
Debargha Mukherjeed7489142017-01-05 13:58:16 -080077 filt_err += aom_get_u_sse_part(src, dst, h_start, width, v_start, height);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -080078 }
79 if ((components_pattern >> AOM_PLANE_V) & 1) {
Debargha Mukherjeed7489142017-01-05 13:58:16 -080080 filt_err += aom_get_v_sse_part(src, dst, h_start, width, v_start, height);
Debargha Mukherjee874d36d2016-12-14 16:53:17 -080081 }
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -070082 return filt_err;
83}
84
David Barker60a055b2017-01-18 15:10:43 +000085static int64_t sse_restoration_frame(AV1_COMMON *const cm,
86 const YV12_BUFFER_CONFIG *src,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -080087 const YV12_BUFFER_CONFIG *dst,
88 int components_pattern) {
89 int64_t filt_err = 0;
90#if CONFIG_AOM_HIGHBITDEPTH
91 if (cm->use_highbitdepth) {
92 if ((components_pattern >> AOM_PLANE_Y) & 1) {
93 filt_err += aom_highbd_get_y_sse(src, dst);
94 }
95 if ((components_pattern >> AOM_PLANE_U) & 1) {
96 filt_err += aom_highbd_get_u_sse(src, dst);
97 }
98 if ((components_pattern >> AOM_PLANE_V) & 1) {
99 filt_err += aom_highbd_get_v_sse(src, dst);
100 }
101 return filt_err;
102 }
David Barker60a055b2017-01-18 15:10:43 +0000103#else
104 (void)cm;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800105#endif // CONFIG_AOM_HIGHBITDEPTH
106 if ((components_pattern >> AOM_PLANE_Y) & 1) {
107 filt_err = aom_get_y_sse(src, dst);
108 }
109 if ((components_pattern >> AOM_PLANE_U) & 1) {
110 filt_err += aom_get_u_sse(src, dst);
111 }
112 if ((components_pattern >> AOM_PLANE_V) & 1) {
113 filt_err += aom_get_v_sse(src, dst);
114 }
115 return filt_err;
116}
117
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700118static int64_t try_restoration_tile(const YV12_BUFFER_CONFIG *src,
119 AV1_COMP *const cpi, RestorationInfo *rsi,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800120 int components_pattern, int partial_frame,
121 int tile_idx, int subtile_idx,
122 int subtile_bits,
David Barker9666e752016-12-08 11:25:47 +0000123 YV12_BUFFER_CONFIG *dst_frame) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700124 AV1_COMMON *const cm = &cpi->common;
125 int64_t filt_err;
126 int tile_width, tile_height, nhtiles, nvtiles;
127 int h_start, h_end, v_start, v_end;
Debargha Mukherjeed7489142017-01-05 13:58:16 -0800128 int ntiles, width, height;
129
130 // Y and UV components cannot be mixed
131 assert(components_pattern == 1 || components_pattern == 2 ||
132 components_pattern == 4 || components_pattern == 6);
133
134 if (components_pattern == 1) { // Y only
135 width = src->y_crop_width;
136 height = src->y_crop_height;
137 } else { // Color
138 width = src->uv_crop_width;
139 height = src->uv_crop_height;
140 }
141 ntiles = av1_get_rest_ntiles(width, height, &tile_width, &tile_height,
142 &nhtiles, &nvtiles);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700143 (void)ntiles;
144
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800145 av1_loop_restoration_frame(cm->frame_to_show, cm, rsi, components_pattern,
146 partial_frame, dst_frame);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700147 av1_get_rest_tile_limits(tile_idx, subtile_idx, subtile_bits, nhtiles,
Debargha Mukherjeed7489142017-01-05 13:58:16 -0800148 nvtiles, tile_width, tile_height, width, height, 0,
149 0, &h_start, &h_end, &v_start, &v_end);
David Barker9666e752016-12-08 11:25:47 +0000150 filt_err = sse_restoration_tile(src, dst_frame, cm, h_start, h_end - h_start,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800151 v_start, v_end - v_start, components_pattern);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700152
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -0700153 return filt_err;
154}
155
156static int64_t try_restoration_frame(const YV12_BUFFER_CONFIG *src,
Yaowu Xuf883b422016-08-30 14:01:10 -0700157 AV1_COMP *const cpi, RestorationInfo *rsi,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800158 int components_pattern, int partial_frame,
David Barker9666e752016-12-08 11:25:47 +0000159 YV12_BUFFER_CONFIG *dst_frame) {
Yaowu Xuf883b422016-08-30 14:01:10 -0700160 AV1_COMMON *const cm = &cpi->common;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700161 int64_t filt_err;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800162 av1_loop_restoration_frame(cm->frame_to_show, cm, rsi, components_pattern,
163 partial_frame, dst_frame);
David Barker60a055b2017-01-18 15:10:43 +0000164 filt_err = sse_restoration_frame(cm, src, dst_frame, components_pattern);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700165 return filt_err;
166}
167
David Barker3a0df182016-12-21 10:44:52 +0000168static int64_t get_pixel_proj_error(uint8_t *src8, int width, int height,
169 int src_stride, uint8_t *dat8,
170 int dat_stride, int bit_depth,
171 int32_t *flt1, int flt1_stride,
172 int32_t *flt2, int flt2_stride, int *xqd) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700173 int i, j;
174 int64_t err = 0;
175 int xq[2];
176 decode_xq(xqd, xq);
David Barker3a0df182016-12-21 10:44:52 +0000177 if (bit_depth == 8) {
178 const uint8_t *src = src8;
179 const uint8_t *dat = dat8;
180 for (i = 0; i < height; ++i) {
181 for (j = 0; j < width; ++j) {
182 const int32_t u =
183 (int32_t)(dat[i * dat_stride + j] << SGRPROJ_RST_BITS);
184 const int32_t f1 = (int32_t)flt1[i * flt1_stride + j] - u;
185 const int32_t f2 = (int32_t)flt2[i * flt2_stride + j] - u;
David Barkerce110cc2017-02-22 10:38:59 +0000186 const int32_t v = xq[0] * f1 + xq[1] * f2 + (u << SGRPROJ_PRJ_BITS);
David Barker3a0df182016-12-21 10:44:52 +0000187 const int32_t e =
188 ROUND_POWER_OF_TWO(v, SGRPROJ_RST_BITS + SGRPROJ_PRJ_BITS) -
189 src[i * src_stride + j];
190 err += e * e;
191 }
192 }
193 } else {
194 const uint16_t *src = CONVERT_TO_SHORTPTR(src8);
195 const uint16_t *dat = CONVERT_TO_SHORTPTR(dat8);
196 for (i = 0; i < height; ++i) {
197 for (j = 0; j < width; ++j) {
198 const int32_t u =
199 (int32_t)(dat[i * dat_stride + j] << SGRPROJ_RST_BITS);
200 const int32_t f1 = (int32_t)flt1[i * flt1_stride + j] - u;
201 const int32_t f2 = (int32_t)flt2[i * flt2_stride + j] - u;
David Barkerce110cc2017-02-22 10:38:59 +0000202 const int32_t v = xq[0] * f1 + xq[1] * f2 + (u << SGRPROJ_PRJ_BITS);
David Barker3a0df182016-12-21 10:44:52 +0000203 const int32_t e =
204 ROUND_POWER_OF_TWO(v, SGRPROJ_RST_BITS + SGRPROJ_PRJ_BITS) -
205 src[i * src_stride + j];
206 err += e * e;
207 }
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700208 }
209 }
210 return err;
211}
212
David Barker3a0df182016-12-21 10:44:52 +0000213static void get_proj_subspace(uint8_t *src8, int width, int height,
214 int src_stride, uint8_t *dat8, int dat_stride,
215 int bit_depth, int32_t *flt1, int flt1_stride,
216 int32_t *flt2, int flt2_stride, int *xq) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700217 int i, j;
218 double H[2][2] = { { 0, 0 }, { 0, 0 } };
219 double C[2] = { 0, 0 };
220 double Det;
221 double x[2];
222 const int size = width * height;
223
224 xq[0] = -(1 << SGRPROJ_PRJ_BITS) / 4;
225 xq[1] = (1 << SGRPROJ_PRJ_BITS) - xq[0];
David Barker3a0df182016-12-21 10:44:52 +0000226 if (bit_depth == 8) {
227 const uint8_t *src = src8;
228 const uint8_t *dat = dat8;
229 for (i = 0; i < height; ++i) {
230 for (j = 0; j < width; ++j) {
231 const double u = (double)(dat[i * dat_stride + j] << SGRPROJ_RST_BITS);
232 const double s =
233 (double)(src[i * src_stride + j] << SGRPROJ_RST_BITS) - u;
234 const double f1 = (double)flt1[i * flt1_stride + j] - u;
235 const double f2 = (double)flt2[i * flt2_stride + j] - u;
236 H[0][0] += f1 * f1;
237 H[1][1] += f2 * f2;
238 H[0][1] += f1 * f2;
239 C[0] += f1 * s;
240 C[1] += f2 * s;
241 }
242 }
243 } else {
244 const uint16_t *src = CONVERT_TO_SHORTPTR(src8);
245 const uint16_t *dat = CONVERT_TO_SHORTPTR(dat8);
246 for (i = 0; i < height; ++i) {
247 for (j = 0; j < width; ++j) {
248 const double u = (double)(dat[i * dat_stride + j] << SGRPROJ_RST_BITS);
249 const double s =
250 (double)(src[i * src_stride + j] << SGRPROJ_RST_BITS) - u;
251 const double f1 = (double)flt1[i * flt1_stride + j] - u;
252 const double f2 = (double)flt2[i * flt2_stride + j] - u;
253 H[0][0] += f1 * f1;
254 H[1][1] += f2 * f2;
255 H[0][1] += f1 * f2;
256 C[0] += f1 * s;
257 C[1] += f2 * s;
258 }
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700259 }
260 }
261 H[0][0] /= size;
262 H[0][1] /= size;
263 H[1][1] /= size;
264 H[1][0] = H[0][1];
265 C[0] /= size;
266 C[1] /= size;
267 Det = (H[0][0] * H[1][1] - H[0][1] * H[1][0]);
268 if (Det < 1e-8) return; // ill-posed, return default values
269 x[0] = (H[1][1] * C[0] - H[0][1] * C[1]) / Det;
270 x[1] = (H[0][0] * C[1] - H[1][0] * C[0]) / Det;
271 xq[0] = (int)rint(x[0] * (1 << SGRPROJ_PRJ_BITS));
272 xq[1] = (int)rint(x[1] * (1 << SGRPROJ_PRJ_BITS));
273}
274
275void encode_xq(int *xq, int *xqd) {
276 xqd[0] = -xq[0];
277 xqd[0] = clamp(xqd[0], SGRPROJ_PRJ_MIN0, SGRPROJ_PRJ_MAX0);
278 xqd[1] = (1 << SGRPROJ_PRJ_BITS) + xqd[0] - xq[1];
279 xqd[1] = clamp(xqd[1], SGRPROJ_PRJ_MIN1, SGRPROJ_PRJ_MAX1);
280}
281
282static void search_selfguided_restoration(uint8_t *dat8, int width, int height,
283 int dat_stride, uint8_t *src8,
284 int src_stride, int bit_depth,
David Barker3a0df182016-12-21 10:44:52 +0000285 int *eps, int *xqd, int32_t *rstbuf) {
286 int32_t *flt1 = rstbuf;
Debargha Mukherjee519dbcf2016-12-16 03:13:02 -0800287 int32_t *flt2 = flt1 + RESTORATION_TILEPELS_MAX;
David Barker3a0df182016-12-21 10:44:52 +0000288 int32_t *tmpbuf2 = flt2 + RESTORATION_TILEPELS_MAX;
David Barker506eb722017-03-08 13:35:49 +0000289 int ep, bestep = 0;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700290 int64_t err, besterr = -1;
291 int exqd[2], bestxqd[2] = { 0, 0 };
David Barker3a0df182016-12-21 10:44:52 +0000292
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700293 for (ep = 0; ep < SGRPROJ_PARAMS; ep++) {
294 int exq[2];
David Barker506eb722017-03-08 13:35:49 +0000295#if CONFIG_AOM_HIGHBITDEPTH
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700296 if (bit_depth > 8) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700297 uint16_t *dat = CONVERT_TO_SHORTPTR(dat8);
David Barker506eb722017-03-08 13:35:49 +0000298 av1_selfguided_restoration_highbd(dat, width, height, dat_stride, flt1,
299 width, bit_depth, sgr_params[ep].r1,
300 sgr_params[ep].e1, tmpbuf2);
301 av1_selfguided_restoration_highbd(dat, width, height, dat_stride, flt2,
302 width, bit_depth, sgr_params[ep].r2,
303 sgr_params[ep].e2, tmpbuf2);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700304 } else {
David Barker506eb722017-03-08 13:35:49 +0000305#endif
306 av1_selfguided_restoration(dat8, width, height, dat_stride, flt1, width,
307 bit_depth, sgr_params[ep].r1,
308 sgr_params[ep].e1, tmpbuf2);
309 av1_selfguided_restoration(dat8, width, height, dat_stride, flt2, width,
310 bit_depth, sgr_params[ep].r2,
311 sgr_params[ep].e2, tmpbuf2);
312#if CONFIG_AOM_HIGHBITDEPTH
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700313 }
David Barker506eb722017-03-08 13:35:49 +0000314#endif
David Barker3a0df182016-12-21 10:44:52 +0000315 get_proj_subspace(src8, width, height, src_stride, dat8, dat_stride,
316 bit_depth, flt1, width, flt2, width, exq);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700317 encode_xq(exq, exqd);
David Barker3a0df182016-12-21 10:44:52 +0000318 err =
319 get_pixel_proj_error(src8, width, height, src_stride, dat8, dat_stride,
320 bit_depth, flt1, width, flt2, width, exqd);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700321 if (besterr == -1 || err < besterr) {
322 bestep = ep;
323 besterr = err;
324 bestxqd[0] = exqd[0];
325 bestxqd[1] = exqd[1];
326 }
327 }
328 *eps = bestep;
329 xqd[0] = bestxqd[0];
330 xqd[1] = bestxqd[1];
331}
332
333static double search_sgrproj(const YV12_BUFFER_CONFIG *src, AV1_COMP *cpi,
Debargha Mukherjee00c54332017-03-03 15:44:17 -0800334 int partial_frame, RestorationInfo *info,
335 RestorationType *type, double *best_tile_cost,
David Barker9666e752016-12-08 11:25:47 +0000336 YV12_BUFFER_CONFIG *dst_frame) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700337 SgrprojInfo *sgrproj_info = info->sgrproj_info;
338 double err, cost_norestore, cost_sgrproj;
339 int bits;
340 MACROBLOCK *x = &cpi->td.mb;
341 AV1_COMMON *const cm = &cpi->common;
342 const YV12_BUFFER_CONFIG *dgd = cm->frame_to_show;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800343 RestorationInfo *rsi = &cpi->rst_search[0];
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700344 int tile_idx, tile_width, tile_height, nhtiles, nvtiles;
345 int h_start, h_end, v_start, v_end;
Debargha Mukherjee874d36d2016-12-14 16:53:17 -0800346 // Allocate for the src buffer at high precision
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700347 const int ntiles = av1_get_rest_ntiles(cm->width, cm->height, &tile_width,
348 &tile_height, &nhtiles, &nvtiles);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800349 rsi->frame_restoration_type = RESTORE_SGRPROJ;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700350
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800351 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
352 rsi->restoration_type[tile_idx] = RESTORE_NONE;
353 }
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700354 // Compute best Sgrproj filters for each tile
355 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
356 av1_get_rest_tile_limits(tile_idx, 0, 0, nhtiles, nvtiles, tile_width,
357 tile_height, cm->width, cm->height, 0, 0, &h_start,
358 &h_end, &v_start, &v_end);
David Barker9666e752016-12-08 11:25:47 +0000359 err = sse_restoration_tile(src, cm->frame_to_show, cm, h_start,
Debargha Mukherjee874d36d2016-12-14 16:53:17 -0800360 h_end - h_start, v_start, v_end - v_start, 1);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700361 // #bits when a tile is not restored
362 bits = av1_cost_bit(RESTORE_NONE_SGRPROJ_PROB, 0);
363 cost_norestore = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
364 best_tile_cost[tile_idx] = DBL_MAX;
365 search_selfguided_restoration(
366 dgd->y_buffer + v_start * dgd->y_stride + h_start, h_end - h_start,
367 v_end - v_start, dgd->y_stride,
368 src->y_buffer + v_start * src->y_stride + h_start, src->y_stride,
369#if CONFIG_AOM_HIGHBITDEPTH
370 cm->bit_depth,
371#else
372 8,
373#endif // CONFIG_AOM_HIGHBITDEPTH
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800374 &rsi->sgrproj_info[tile_idx].ep, rsi->sgrproj_info[tile_idx].xqd,
David Barker3a0df182016-12-21 10:44:52 +0000375 cm->rst_internal.tmpbuf);
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800376 rsi->restoration_type[tile_idx] = RESTORE_SGRPROJ;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800377 err = try_restoration_tile(src, cpi, rsi, 1, partial_frame, tile_idx, 0, 0,
David Barker9666e752016-12-08 11:25:47 +0000378 dst_frame);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700379 bits = SGRPROJ_BITS << AV1_PROB_COST_SHIFT;
380 bits += av1_cost_bit(RESTORE_NONE_SGRPROJ_PROB, 1);
381 cost_sgrproj = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
382 if (cost_sgrproj >= cost_norestore) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800383 type[tile_idx] = RESTORE_NONE;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700384 } else {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800385 type[tile_idx] = RESTORE_SGRPROJ;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800386 memcpy(&sgrproj_info[tile_idx], &rsi->sgrproj_info[tile_idx],
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700387 sizeof(sgrproj_info[tile_idx]));
388 bits = SGRPROJ_BITS << AV1_PROB_COST_SHIFT;
389 best_tile_cost[tile_idx] = RDCOST_DBL(
390 x->rdmult, x->rddiv,
391 (bits + cpi->switchable_restore_cost[RESTORE_SGRPROJ]) >> 4, err);
392 }
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800393 rsi->restoration_type[tile_idx] = RESTORE_NONE;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700394 }
395 // Cost for Sgrproj filtering
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800396 bits = frame_level_restore_bits[rsi->frame_restoration_type]
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700397 << AV1_PROB_COST_SHIFT;
398 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
399 bits +=
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800400 av1_cost_bit(RESTORE_NONE_SGRPROJ_PROB, type[tile_idx] != RESTORE_NONE);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800401 memcpy(&rsi->sgrproj_info[tile_idx], &sgrproj_info[tile_idx],
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700402 sizeof(sgrproj_info[tile_idx]));
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800403 if (type[tile_idx] == RESTORE_SGRPROJ) {
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700404 bits += (SGRPROJ_BITS << AV1_PROB_COST_SHIFT);
405 }
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800406 rsi->restoration_type[tile_idx] = type[tile_idx];
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700407 }
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800408 err = try_restoration_frame(src, cpi, rsi, 1, partial_frame, dst_frame);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700409 cost_sgrproj = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
410
Debargha Mukherjee8f209a82016-10-12 10:47:01 -0700411 return cost_sgrproj;
412}
413
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -0800414#if USE_DOMAINTXFMRF
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800415static int64_t compute_sse(uint8_t *dgd, int width, int height, int dgd_stride,
416 uint8_t *src, int src_stride) {
417 int64_t sse = 0;
418 int i, j;
419 for (i = 0; i < height; ++i) {
420 for (j = 0; j < width; ++j) {
421 const int diff =
422 (int)dgd[i * dgd_stride + j] - (int)src[i * src_stride + j];
423 sse += diff * diff;
424 }
425 }
426 return sse;
427}
428
429#if CONFIG_AOM_HIGHBITDEPTH
430static int64_t compute_sse_highbd(uint16_t *dgd, int width, int height,
431 int dgd_stride, uint16_t *src,
432 int src_stride) {
433 int64_t sse = 0;
434 int i, j;
435 for (i = 0; i < height; ++i) {
436 for (j = 0; j < width; ++j) {
437 const int diff =
438 (int)dgd[i * dgd_stride + j] - (int)src[i * src_stride + j];
439 sse += diff * diff;
440 }
441 }
442 return sse;
443}
444#endif // CONFIG_AOM_HIGHBITDEPTH
445
446static void search_domaintxfmrf_restoration(uint8_t *dgd8, int width,
447 int height, int dgd_stride,
448 uint8_t *src8, int src_stride,
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800449 int bit_depth, int *sigma_r,
David Barker3a0df182016-12-21 10:44:52 +0000450 uint8_t *fltbuf, int32_t *tmpbuf) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800451 const int first_p_step = 8;
452 const int second_p_range = first_p_step >> 1;
453 const int second_p_step = 2;
454 const int third_p_range = second_p_step >> 1;
455 const int third_p_step = 1;
David Barker9666e752016-12-08 11:25:47 +0000456 int p, best_p0, best_p = -1;
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800457 int64_t best_sse = INT64_MAX, sse;
458 if (bit_depth == 8) {
David Barker3a0df182016-12-21 10:44:52 +0000459 uint8_t *flt = fltbuf;
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800460 uint8_t *dgd = dgd8;
461 uint8_t *src = src8;
462 // First phase
463 for (p = first_p_step / 2; p < DOMAINTXFMRF_PARAMS; p += first_p_step) {
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800464 av1_domaintxfmrf_restoration(dgd, width, height, dgd_stride, p, flt,
Yaowu Xubf1d62d2016-12-14 19:20:46 -0800465 width, tmpbuf);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800466 sse = compute_sse(flt, width, height, width, src, src_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800467 if (sse < best_sse || best_p == -1) {
468 best_p = p;
469 best_sse = sse;
470 }
471 }
472 // Second Phase
473 best_p0 = best_p;
474 for (p = best_p0 - second_p_range; p <= best_p0 + second_p_range;
475 p += second_p_step) {
476 if (p < 0 || p == best_p || p >= DOMAINTXFMRF_PARAMS) continue;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800477 av1_domaintxfmrf_restoration(dgd, width, height, dgd_stride, p, flt,
Yaowu Xubf1d62d2016-12-14 19:20:46 -0800478 width, tmpbuf);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800479 sse = compute_sse(flt, width, height, width, src, src_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800480 if (sse < best_sse) {
481 best_p = p;
482 best_sse = sse;
483 }
484 }
485 // Third Phase
486 best_p0 = best_p;
487 for (p = best_p0 - third_p_range; p <= best_p0 + third_p_range;
488 p += third_p_step) {
489 if (p < 0 || p == best_p || p >= DOMAINTXFMRF_PARAMS) continue;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800490 av1_domaintxfmrf_restoration(dgd, width, height, dgd_stride, p, flt,
Yaowu Xubf1d62d2016-12-14 19:20:46 -0800491 width, tmpbuf);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800492 sse = compute_sse(flt, width, height, width, src, src_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800493 if (sse < best_sse) {
494 best_p = p;
495 best_sse = sse;
496 }
497 }
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800498 } else {
499#if CONFIG_AOM_HIGHBITDEPTH
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800500 uint16_t *flt = (uint16_t *)fltbuf;
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800501 uint16_t *dgd = CONVERT_TO_SHORTPTR(dgd8);
502 uint16_t *src = CONVERT_TO_SHORTPTR(src8);
503 // First phase
504 for (p = first_p_step / 2; p < DOMAINTXFMRF_PARAMS; p += first_p_step) {
David Barker9666e752016-12-08 11:25:47 +0000505 av1_domaintxfmrf_restoration_highbd(dgd, width, height, dgd_stride, p,
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800506 bit_depth, flt, width, tmpbuf);
507 sse = compute_sse_highbd(flt, width, height, width, src, src_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800508 if (sse < best_sse || best_p == -1) {
509 best_p = p;
510 best_sse = sse;
511 }
512 }
513 // Second Phase
514 best_p0 = best_p;
515 for (p = best_p0 - second_p_range; p <= best_p0 + second_p_range;
516 p += second_p_step) {
517 if (p < 0 || p == best_p || p >= DOMAINTXFMRF_PARAMS) continue;
David Barker9666e752016-12-08 11:25:47 +0000518 av1_domaintxfmrf_restoration_highbd(dgd, width, height, dgd_stride, p,
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800519 bit_depth, flt, width, tmpbuf);
520 sse = compute_sse_highbd(flt, width, height, width, src, src_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800521 if (sse < best_sse) {
522 best_p = p;
523 best_sse = sse;
524 }
525 }
526 // Third Phase
527 best_p0 = best_p;
528 for (p = best_p0 - third_p_range; p <= best_p0 + third_p_range;
529 p += third_p_step) {
530 if (p < 0 || p == best_p || p >= DOMAINTXFMRF_PARAMS) continue;
David Barker9666e752016-12-08 11:25:47 +0000531 av1_domaintxfmrf_restoration_highbd(dgd, width, height, dgd_stride, p,
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800532 bit_depth, flt, width, tmpbuf);
533 sse = compute_sse_highbd(flt, width, height, width, src, src_stride);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800534 if (sse < best_sse) {
535 best_p = p;
536 best_sse = sse;
537 }
538 }
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800539#else
540 assert(0);
541#endif // CONFIG_AOM_HIGHBITDEPTH
542 }
543 *sigma_r = best_p;
544}
545
546static double search_domaintxfmrf(const YV12_BUFFER_CONFIG *src, AV1_COMP *cpi,
Debargha Mukherjee00c54332017-03-03 15:44:17 -0800547 int partial_frame, RestorationInfo *info,
548 RestorationType *type, double *best_tile_cost,
David Barker9666e752016-12-08 11:25:47 +0000549 YV12_BUFFER_CONFIG *dst_frame) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800550 DomaintxfmrfInfo *domaintxfmrf_info = info->domaintxfmrf_info;
Debargha Mukherjee0e67b252016-12-08 09:22:44 -0800551 double cost_norestore, cost_domaintxfmrf;
552 int64_t err;
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800553 int bits;
554 MACROBLOCK *x = &cpi->td.mb;
555 AV1_COMMON *const cm = &cpi->common;
556 const YV12_BUFFER_CONFIG *dgd = cm->frame_to_show;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800557 RestorationInfo *rsi = &cpi->rst_search[0];
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800558 int tile_idx, tile_width, tile_height, nhtiles, nvtiles;
559 int h_start, h_end, v_start, v_end;
560 const int ntiles = av1_get_rest_ntiles(cm->width, cm->height, &tile_width,
561 &tile_height, &nhtiles, &nvtiles);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800562
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800563 rsi->frame_restoration_type = RESTORE_DOMAINTXFMRF;
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800564
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800565 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
566 rsi->restoration_type[tile_idx] = RESTORE_NONE;
567 }
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800568 // Compute best Domaintxfm filters for each tile
569 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
570 av1_get_rest_tile_limits(tile_idx, 0, 0, nhtiles, nvtiles, tile_width,
571 tile_height, cm->width, cm->height, 0, 0, &h_start,
572 &h_end, &v_start, &v_end);
David Barker9666e752016-12-08 11:25:47 +0000573 err = sse_restoration_tile(src, cm->frame_to_show, cm, h_start,
Debargha Mukherjee874d36d2016-12-14 16:53:17 -0800574 h_end - h_start, v_start, v_end - v_start, 1);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800575 // #bits when a tile is not restored
576 bits = av1_cost_bit(RESTORE_NONE_DOMAINTXFMRF_PROB, 0);
577 cost_norestore = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
578 best_tile_cost[tile_idx] = DBL_MAX;
579
580 search_domaintxfmrf_restoration(
581 dgd->y_buffer + v_start * dgd->y_stride + h_start, h_end - h_start,
582 v_end - v_start, dgd->y_stride,
583 src->y_buffer + v_start * src->y_stride + h_start, src->y_stride,
584#if CONFIG_AOM_HIGHBITDEPTH
585 cm->bit_depth,
586#else
587 8,
588#endif // CONFIG_AOM_HIGHBITDEPTH
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800589 &rsi->domaintxfmrf_info[tile_idx].sigma_r, cpi->extra_rstbuf,
590 cm->rst_internal.tmpbuf);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800591
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800592 rsi->restoration_type[tile_idx] = RESTORE_DOMAINTXFMRF;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800593 err = try_restoration_tile(src, cpi, rsi, 1, partial_frame, tile_idx, 0, 0,
David Barker9666e752016-12-08 11:25:47 +0000594 dst_frame);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800595 bits = DOMAINTXFMRF_PARAMS_BITS << AV1_PROB_COST_SHIFT;
596 bits += av1_cost_bit(RESTORE_NONE_DOMAINTXFMRF_PROB, 1);
597 cost_domaintxfmrf = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
598 if (cost_domaintxfmrf >= cost_norestore) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800599 type[tile_idx] = RESTORE_NONE;
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800600 } else {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800601 type[tile_idx] = RESTORE_DOMAINTXFMRF;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800602 memcpy(&domaintxfmrf_info[tile_idx], &rsi->domaintxfmrf_info[tile_idx],
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800603 sizeof(domaintxfmrf_info[tile_idx]));
604 bits = DOMAINTXFMRF_PARAMS_BITS << AV1_PROB_COST_SHIFT;
605 best_tile_cost[tile_idx] = RDCOST_DBL(
606 x->rdmult, x->rddiv,
607 (bits + cpi->switchable_restore_cost[RESTORE_DOMAINTXFMRF]) >> 4,
608 err);
609 }
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800610 rsi->restoration_type[tile_idx] = RESTORE_NONE;
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800611 }
612 // Cost for Domaintxfmrf filtering
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800613 bits = frame_level_restore_bits[rsi->frame_restoration_type]
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800614 << AV1_PROB_COST_SHIFT;
615 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
616 bits += av1_cost_bit(RESTORE_NONE_DOMAINTXFMRF_PROB,
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800617 type[tile_idx] != RESTORE_NONE);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800618 memcpy(&rsi->domaintxfmrf_info[tile_idx], &domaintxfmrf_info[tile_idx],
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800619 sizeof(domaintxfmrf_info[tile_idx]));
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800620 if (type[tile_idx] == RESTORE_DOMAINTXFMRF) {
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800621 bits += (DOMAINTXFMRF_PARAMS_BITS << AV1_PROB_COST_SHIFT);
622 }
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800623 rsi->restoration_type[tile_idx] = type[tile_idx];
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800624 }
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800625 err = try_restoration_frame(src, cpi, rsi, 1, partial_frame, dst_frame);
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800626 cost_domaintxfmrf = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
627
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800628 return cost_domaintxfmrf;
629}
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -0800630#endif // USE_DOMAINTXFMRF
Debargha Mukherjee3981be92016-11-21 09:35:44 -0800631
Yaowu Xuc27fc142016-08-22 16:08:15 -0700632static double find_average(uint8_t *src, int h_start, int h_end, int v_start,
633 int v_end, int stride) {
634 uint64_t sum = 0;
635 double avg = 0;
636 int i, j;
637 for (i = v_start; i < v_end; i++)
638 for (j = h_start; j < h_end; j++) sum += src[i * stride + j];
639 avg = (double)sum / ((v_end - v_start) * (h_end - h_start));
640 return avg;
641}
642
643static void compute_stats(uint8_t *dgd, uint8_t *src, int h_start, int h_end,
644 int v_start, int v_end, int dgd_stride,
645 int src_stride, double *M, double *H) {
646 int i, j, k, l;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800647 double Y[WIENER_WIN2];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700648 const double avg =
649 find_average(dgd, h_start, h_end, v_start, v_end, dgd_stride);
650
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800651 memset(M, 0, sizeof(*M) * WIENER_WIN2);
652 memset(H, 0, sizeof(*H) * WIENER_WIN2 * WIENER_WIN2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700653 for (i = v_start; i < v_end; i++) {
654 for (j = h_start; j < h_end; j++) {
655 const double X = (double)src[i * src_stride + j] - avg;
656 int idx = 0;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800657 for (k = -WIENER_HALFWIN; k <= WIENER_HALFWIN; k++) {
658 for (l = -WIENER_HALFWIN; l <= WIENER_HALFWIN; l++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700659 Y[idx] = (double)dgd[(i + l) * dgd_stride + (j + k)] - avg;
660 idx++;
661 }
662 }
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800663 for (k = 0; k < WIENER_WIN2; ++k) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700664 M[k] += Y[k] * X;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800665 H[k * WIENER_WIN2 + k] += Y[k] * Y[k];
666 for (l = k + 1; l < WIENER_WIN2; ++l) {
David Barker33f3bfd2017-01-06 15:34:50 +0000667 // H is a symmetric matrix, so we only need to fill out the upper
668 // triangle here. We can copy it down to the lower triangle outside
669 // the (i, j) loops.
670 H[k * WIENER_WIN2 + l] += Y[k] * Y[l];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700671 }
672 }
673 }
674 }
David Barker33f3bfd2017-01-06 15:34:50 +0000675 for (k = 0; k < WIENER_WIN2; ++k) {
676 for (l = k + 1; l < WIENER_WIN2; ++l) {
677 H[l * WIENER_WIN2 + k] = H[k * WIENER_WIN2 + l];
678 }
679 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700680}
681
Yaowu Xuf883b422016-08-30 14:01:10 -0700682#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700683static double find_average_highbd(uint16_t *src, int h_start, int h_end,
684 int v_start, int v_end, int stride) {
685 uint64_t sum = 0;
686 double avg = 0;
687 int i, j;
688 for (i = v_start; i < v_end; i++)
689 for (j = h_start; j < h_end; j++) sum += src[i * stride + j];
690 avg = (double)sum / ((v_end - v_start) * (h_end - h_start));
691 return avg;
692}
693
694static void compute_stats_highbd(uint8_t *dgd8, uint8_t *src8, int h_start,
695 int h_end, int v_start, int v_end,
696 int dgd_stride, int src_stride, double *M,
697 double *H) {
698 int i, j, k, l;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800699 double Y[WIENER_WIN2];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700700 uint16_t *src = CONVERT_TO_SHORTPTR(src8);
701 uint16_t *dgd = CONVERT_TO_SHORTPTR(dgd8);
702 const double avg =
703 find_average_highbd(dgd, h_start, h_end, v_start, v_end, dgd_stride);
704
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800705 memset(M, 0, sizeof(*M) * WIENER_WIN2);
706 memset(H, 0, sizeof(*H) * WIENER_WIN2 * WIENER_WIN2);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700707 for (i = v_start; i < v_end; i++) {
708 for (j = h_start; j < h_end; j++) {
709 const double X = (double)src[i * src_stride + j] - avg;
710 int idx = 0;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800711 for (k = -WIENER_HALFWIN; k <= WIENER_HALFWIN; k++) {
712 for (l = -WIENER_HALFWIN; l <= WIENER_HALFWIN; l++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700713 Y[idx] = (double)dgd[(i + l) * dgd_stride + (j + k)] - avg;
714 idx++;
715 }
716 }
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800717 for (k = 0; k < WIENER_WIN2; ++k) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700718 M[k] += Y[k] * X;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800719 H[k * WIENER_WIN2 + k] += Y[k] * Y[k];
720 for (l = k + 1; l < WIENER_WIN2; ++l) {
David Barker33f3bfd2017-01-06 15:34:50 +0000721 // H is a symmetric matrix, so we only need to fill out the upper
722 // triangle here. We can copy it down to the lower triangle outside
723 // the (i, j) loops.
724 H[k * WIENER_WIN2 + l] += Y[k] * Y[l];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700725 }
726 }
727 }
728 }
David Barker33f3bfd2017-01-06 15:34:50 +0000729 for (k = 0; k < WIENER_WIN2; ++k) {
730 for (l = k + 1; l < WIENER_WIN2; ++l) {
731 H[l * WIENER_WIN2 + k] = H[k * WIENER_WIN2 + l];
732 }
733 }
Yaowu Xuc27fc142016-08-22 16:08:15 -0700734}
Yaowu Xuf883b422016-08-30 14:01:10 -0700735#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -0700736
737// Solves Ax = b, where x and b are column vectors
738static int linsolve(int n, double *A, int stride, double *b, double *x) {
739 int i, j, k;
740 double c;
741 // Partial pivoting
742 for (i = n - 1; i > 0; i--) {
743 if (A[(i - 1) * stride] < A[i * stride]) {
744 for (j = 0; j < n; j++) {
745 c = A[i * stride + j];
746 A[i * stride + j] = A[(i - 1) * stride + j];
747 A[(i - 1) * stride + j] = c;
748 }
749 c = b[i];
750 b[i] = b[i - 1];
751 b[i - 1] = c;
752 }
753 }
754 // Forward elimination
755 for (k = 0; k < n - 1; k++) {
756 for (i = k; i < n - 1; i++) {
757 c = A[(i + 1) * stride + k] / A[k * stride + k];
758 for (j = 0; j < n; j++) A[(i + 1) * stride + j] -= c * A[k * stride + j];
759 b[i + 1] -= c * b[k];
760 }
761 }
762 // Backward substitution
763 for (i = n - 1; i >= 0; i--) {
764 if (fabs(A[i * stride + i]) < 1e-10) return 0;
765 c = 0;
766 for (j = i + 1; j <= n - 1; j++) c += A[i * stride + j] * x[j];
767 x[i] = (b[i] - c) / A[i * stride + i];
768 }
769 return 1;
770}
771
772static INLINE int wrap_index(int i) {
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800773 return (i >= WIENER_HALFWIN1 ? WIENER_WIN - 1 - i : i);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700774}
775
776// Fix vector b, update vector a
777static void update_a_sep_sym(double **Mc, double **Hc, double *a, double *b) {
778 int i, j;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800779 double S[WIENER_WIN];
780 double A[WIENER_WIN], B[WIENER_WIN2];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700781 int w, w2;
782 memset(A, 0, sizeof(A));
783 memset(B, 0, sizeof(B));
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800784 for (i = 0; i < WIENER_WIN; i++) {
785 for (j = 0; j < WIENER_WIN; ++j) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700786 const int jj = wrap_index(j);
787 A[jj] += Mc[i][j] * b[i];
788 }
789 }
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800790 for (i = 0; i < WIENER_WIN; i++) {
791 for (j = 0; j < WIENER_WIN; j++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700792 int k, l;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800793 for (k = 0; k < WIENER_WIN; ++k)
794 for (l = 0; l < WIENER_WIN; ++l) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700795 const int kk = wrap_index(k);
796 const int ll = wrap_index(l);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800797 B[ll * WIENER_HALFWIN1 + kk] +=
798 Hc[j * WIENER_WIN + i][k * WIENER_WIN2 + l] * b[i] * b[j];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700799 }
800 }
801 }
802 // Normalization enforcement in the system of equations itself
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800803 w = WIENER_WIN;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700804 w2 = (w >> 1) + 1;
805 for (i = 0; i < w2 - 1; ++i)
806 A[i] -=
807 A[w2 - 1] * 2 + B[i * w2 + w2 - 1] - 2 * B[(w2 - 1) * w2 + (w2 - 1)];
808 for (i = 0; i < w2 - 1; ++i)
809 for (j = 0; j < w2 - 1; ++j)
810 B[i * w2 + j] -= 2 * (B[i * w2 + (w2 - 1)] + B[(w2 - 1) * w2 + j] -
811 2 * B[(w2 - 1) * w2 + (w2 - 1)]);
812 if (linsolve(w2 - 1, B, w2, A, S)) {
813 S[w2 - 1] = 1.0;
814 for (i = w2; i < w; ++i) {
815 S[i] = S[w - 1 - i];
816 S[w2 - 1] -= 2 * S[i];
817 }
818 memcpy(a, S, w * sizeof(*a));
819 }
820}
821
822// Fix vector a, update vector b
823static void update_b_sep_sym(double **Mc, double **Hc, double *a, double *b) {
824 int i, j;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800825 double S[WIENER_WIN];
826 double A[WIENER_WIN], B[WIENER_WIN2];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700827 int w, w2;
828 memset(A, 0, sizeof(A));
829 memset(B, 0, sizeof(B));
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800830 for (i = 0; i < WIENER_WIN; i++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700831 const int ii = wrap_index(i);
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800832 for (j = 0; j < WIENER_WIN; j++) A[ii] += Mc[i][j] * a[j];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700833 }
834
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800835 for (i = 0; i < WIENER_WIN; i++) {
836 for (j = 0; j < WIENER_WIN; j++) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700837 const int ii = wrap_index(i);
838 const int jj = wrap_index(j);
839 int k, l;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800840 for (k = 0; k < WIENER_WIN; ++k)
841 for (l = 0; l < WIENER_WIN; ++l)
842 B[jj * WIENER_HALFWIN1 + ii] +=
843 Hc[i * WIENER_WIN + j][k * WIENER_WIN2 + l] * a[k] * a[l];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700844 }
845 }
846 // Normalization enforcement in the system of equations itself
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800847 w = WIENER_WIN;
848 w2 = WIENER_HALFWIN1;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700849 for (i = 0; i < w2 - 1; ++i)
850 A[i] -=
851 A[w2 - 1] * 2 + B[i * w2 + w2 - 1] - 2 * B[(w2 - 1) * w2 + (w2 - 1)];
852 for (i = 0; i < w2 - 1; ++i)
853 for (j = 0; j < w2 - 1; ++j)
854 B[i * w2 + j] -= 2 * (B[i * w2 + (w2 - 1)] + B[(w2 - 1) * w2 + j] -
855 2 * B[(w2 - 1) * w2 + (w2 - 1)]);
856 if (linsolve(w2 - 1, B, w2, A, S)) {
857 S[w2 - 1] = 1.0;
858 for (i = w2; i < w; ++i) {
859 S[i] = S[w - 1 - i];
860 S[w2 - 1] -= 2 * S[i];
861 }
862 memcpy(b, S, w * sizeof(*b));
863 }
864}
865
866static int wiener_decompose_sep_sym(double *M, double *H, double *a,
867 double *b) {
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800868 static const double init_filt[WIENER_WIN] = {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700869 0.035623, -0.127154, 0.211436, 0.760190, 0.211436, -0.127154, 0.035623,
870 };
871 int i, j, iter;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800872 double *Hc[WIENER_WIN2];
873 double *Mc[WIENER_WIN];
874 for (i = 0; i < WIENER_WIN; i++) {
875 Mc[i] = M + i * WIENER_WIN;
876 for (j = 0; j < WIENER_WIN; j++) {
877 Hc[i * WIENER_WIN + j] =
878 H + i * WIENER_WIN * WIENER_WIN2 + j * WIENER_WIN;
Yaowu Xuc27fc142016-08-22 16:08:15 -0700879 }
880 }
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800881 memcpy(a, init_filt, sizeof(*a) * WIENER_WIN);
882 memcpy(b, init_filt, sizeof(*b) * WIENER_WIN);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700883
884 iter = 1;
885 while (iter < 10) {
886 update_a_sep_sym(Mc, Hc, a, b);
887 update_b_sep_sym(Mc, Hc, a, b);
888 iter++;
889 }
890 return 1;
891}
892
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800893// Computes the function x'*H*x - x'*M for the learned 2D filter x, and compares
Yaowu Xuc27fc142016-08-22 16:08:15 -0700894// against identity filters; Final score is defined as the difference between
895// the function values
David Barker1e8e6b92017-01-13 13:45:51 +0000896static double compute_score(double *M, double *H, InterpKernel vfilt,
897 InterpKernel hfilt) {
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800898 double ab[WIENER_WIN * WIENER_WIN];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700899 int i, k, l;
900 double P = 0, Q = 0;
901 double iP = 0, iQ = 0;
902 double Score, iScore;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800903 double a[WIENER_WIN], b[WIENER_WIN];
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800904 a[WIENER_HALFWIN] = b[WIENER_HALFWIN] = 1.0;
905 for (i = 0; i < WIENER_HALFWIN; ++i) {
906 a[i] = a[WIENER_WIN - i - 1] = (double)vfilt[i] / WIENER_FILT_STEP;
907 b[i] = b[WIENER_WIN - i - 1] = (double)hfilt[i] / WIENER_FILT_STEP;
908 a[WIENER_HALFWIN] -= 2 * a[i];
909 b[WIENER_HALFWIN] -= 2 * b[i];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700910 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800911 for (k = 0; k < WIENER_WIN; ++k) {
912 for (l = 0; l < WIENER_WIN; ++l) ab[k * WIENER_WIN + l] = a[l] * b[k];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700913 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800914 for (k = 0; k < WIENER_WIN2; ++k) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700915 P += ab[k] * M[k];
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800916 for (l = 0; l < WIENER_WIN2; ++l)
917 Q += ab[k] * H[k * WIENER_WIN2 + l] * ab[l];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700918 }
919 Score = Q - 2 * P;
920
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800921 iP = M[WIENER_WIN2 >> 1];
922 iQ = H[(WIENER_WIN2 >> 1) * WIENER_WIN2 + (WIENER_WIN2 >> 1)];
Yaowu Xuc27fc142016-08-22 16:08:15 -0700923 iScore = iQ - 2 * iP;
924
925 return Score - iScore;
926}
927
David Barker1e8e6b92017-01-13 13:45:51 +0000928static void quantize_sym_filter(double *f, InterpKernel fi) {
Yaowu Xuc27fc142016-08-22 16:08:15 -0700929 int i;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -0800930 for (i = 0; i < WIENER_HALFWIN; ++i) {
931 fi[i] = RINT(f[i] * WIENER_FILT_STEP);
Yaowu Xuc27fc142016-08-22 16:08:15 -0700932 }
933 // Specialize for 7-tap filter
934 fi[0] = CLIP(fi[0], WIENER_FILT_TAP0_MINV, WIENER_FILT_TAP0_MAXV);
935 fi[1] = CLIP(fi[1], WIENER_FILT_TAP1_MINV, WIENER_FILT_TAP1_MAXV);
936 fi[2] = CLIP(fi[2], WIENER_FILT_TAP2_MINV, WIENER_FILT_TAP2_MAXV);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800937 // Satisfy filter constraints
938 fi[WIENER_WIN - 1] = fi[0];
939 fi[WIENER_WIN - 2] = fi[1];
940 fi[WIENER_WIN - 3] = fi[2];
David Barker1e8e6b92017-01-13 13:45:51 +0000941 // The central element has an implicit +WIENER_FILT_STEP
942 fi[3] = -2 * (fi[0] + fi[1] + fi[2]);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800943}
944
945static double search_wiener_uv(const YV12_BUFFER_CONFIG *src, AV1_COMP *cpi,
Debargha Mukherjee00c54332017-03-03 15:44:17 -0800946 int partial_frame, int plane,
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800947 RestorationInfo *info, RestorationType *type,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800948 YV12_BUFFER_CONFIG *dst_frame) {
949 WienerInfo *wiener_info = info->wiener_info;
950 AV1_COMMON *const cm = &cpi->common;
951 RestorationInfo *rsi = cpi->rst_search;
952 int64_t err;
953 int bits;
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800954 double cost_wiener, cost_norestore, cost_wiener_frame, cost_norestore_frame;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800955 MACROBLOCK *x = &cpi->td.mb;
956 double M[WIENER_WIN2];
957 double H[WIENER_WIN2 * WIENER_WIN2];
958 double vfilterd[WIENER_WIN], hfilterd[WIENER_WIN];
959 const YV12_BUFFER_CONFIG *dgd = cm->frame_to_show;
960 const int width = src->uv_crop_width;
961 const int height = src->uv_crop_height;
962 const int src_stride = src->uv_stride;
963 const int dgd_stride = dgd->uv_stride;
964 double score;
965 int tile_idx, tile_width, tile_height, nhtiles, nvtiles;
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800966 int h_start, h_end, v_start, v_end;
Debargha Mukherjeed7489142017-01-05 13:58:16 -0800967 const int ntiles = av1_get_rest_ntiles(width, height, &tile_width,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800968 &tile_height, &nhtiles, &nvtiles);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800969 assert(width == dgd->uv_crop_width);
970 assert(height == dgd->uv_crop_height);
971
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800972 rsi[plane].frame_restoration_type = RESTORE_NONE;
David Barker60a055b2017-01-18 15:10:43 +0000973 err = sse_restoration_frame(cm, src, cm->frame_to_show, (1 << plane));
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800974 bits = 0;
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800975 cost_norestore_frame = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -0800976
977 rsi[plane].frame_restoration_type = RESTORE_WIENER;
David Barker33f3bfd2017-01-06 15:34:50 +0000978
Debargha Mukherjee994ccd72017-01-06 11:18:23 -0800979 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
980 rsi[plane].restoration_type[tile_idx] = RESTORE_NONE;
981 }
982
983 // Compute best Wiener filters for each tile
984 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
985 av1_get_rest_tile_limits(tile_idx, 0, 0, nhtiles, nvtiles, tile_width,
986 tile_height, width, height, 0, 0, &h_start, &h_end,
987 &v_start, &v_end);
988 err = sse_restoration_tile(src, cm->frame_to_show, cm, h_start,
989 h_end - h_start, v_start, v_end - v_start,
990 1 << plane);
991 // #bits when a tile is not restored
992 bits = av1_cost_bit(RESTORE_NONE_WIENER_PROB, 0);
993 cost_norestore = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
994 // best_tile_cost[tile_idx] = DBL_MAX;
995
996 av1_get_rest_tile_limits(tile_idx, 0, 0, nhtiles, nvtiles, tile_width,
997 tile_height, width, height, WIENER_HALFWIN,
998 WIENER_HALFWIN, &h_start, &h_end, &v_start,
999 &v_end);
David Barker33f3bfd2017-01-06 15:34:50 +00001000 if (plane == AOM_PLANE_U) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001001#if CONFIG_AOM_HIGHBITDEPTH
1002 if (cm->use_highbitdepth)
1003 compute_stats_highbd(dgd->u_buffer, src->u_buffer, h_start, h_end,
1004 v_start, v_end, dgd_stride, src_stride, M, H);
1005 else
1006#endif // CONFIG_AOM_HIGHBITDEPTH
1007 compute_stats(dgd->u_buffer, src->u_buffer, h_start, h_end, v_start,
1008 v_end, dgd_stride, src_stride, M, H);
David Barker33f3bfd2017-01-06 15:34:50 +00001009 } else if (plane == AOM_PLANE_V) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001010#if CONFIG_AOM_HIGHBITDEPTH
1011 if (cm->use_highbitdepth)
1012 compute_stats_highbd(dgd->v_buffer, src->v_buffer, h_start, h_end,
1013 v_start, v_end, dgd_stride, src_stride, M, H);
1014 else
1015#endif // CONFIG_AOM_HIGHBITDEPTH
1016 compute_stats(dgd->v_buffer, src->v_buffer, h_start, h_end, v_start,
1017 v_end, dgd_stride, src_stride, M, H);
David Barker33f3bfd2017-01-06 15:34:50 +00001018 } else {
1019 assert(0);
1020 }
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001021
1022 type[tile_idx] = RESTORE_WIENER;
1023
1024 if (!wiener_decompose_sep_sym(M, H, vfilterd, hfilterd)) {
1025 type[tile_idx] = RESTORE_NONE;
1026 continue;
David Barker33f3bfd2017-01-06 15:34:50 +00001027 }
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001028 quantize_sym_filter(vfilterd, rsi[plane].wiener_info[tile_idx].vfilter);
1029 quantize_sym_filter(hfilterd, rsi[plane].wiener_info[tile_idx].hfilter);
David Barker33f3bfd2017-01-06 15:34:50 +00001030
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001031 // Filter score computes the value of the function x'*A*x - x'*b for the
1032 // learned filter and compares it against identity filer. If there is no
1033 // reduction in the function, the filter is reverted back to identity
1034 score = compute_score(M, H, rsi[plane].wiener_info[tile_idx].vfilter,
1035 rsi[plane].wiener_info[tile_idx].hfilter);
1036 if (score > 0.0) {
1037 type[tile_idx] = RESTORE_NONE;
1038 continue;
1039 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001040
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001041 rsi[plane].restoration_type[tile_idx] = RESTORE_WIENER;
1042 err = try_restoration_tile(src, cpi, rsi, 1 << plane, partial_frame,
1043 tile_idx, 0, 0, dst_frame);
1044 bits = WIENER_FILT_BITS << AV1_PROB_COST_SHIFT;
1045 bits += av1_cost_bit(RESTORE_NONE_WIENER_PROB, 1);
1046 cost_wiener = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
1047 if (cost_wiener >= cost_norestore) {
1048 type[tile_idx] = RESTORE_NONE;
1049 } else {
1050 type[tile_idx] = RESTORE_WIENER;
1051 memcpy(&wiener_info[tile_idx], &rsi[plane].wiener_info[tile_idx],
1052 sizeof(wiener_info[tile_idx]));
1053 }
1054 rsi[plane].restoration_type[tile_idx] = RESTORE_NONE;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001055 }
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001056 // Cost for Wiener filtering
1057 bits = 0;
1058 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
1059 bits +=
1060 av1_cost_bit(RESTORE_NONE_WIENER_PROB, type[tile_idx] != RESTORE_NONE);
1061 memcpy(&rsi[plane].wiener_info[tile_idx], &wiener_info[tile_idx],
1062 sizeof(wiener_info[tile_idx]));
1063 if (type[tile_idx] == RESTORE_WIENER) {
1064 bits += (WIENER_FILT_BITS << AV1_PROB_COST_SHIFT);
1065 }
1066 rsi[plane].restoration_type[tile_idx] = type[tile_idx];
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001067 }
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001068 err = try_restoration_frame(src, cpi, rsi, 1 << plane, partial_frame,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001069 dst_frame);
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001070 cost_wiener_frame = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
1071
1072 if (cost_wiener_frame < cost_norestore_frame) {
1073 info->frame_restoration_type = RESTORE_WIENER;
1074 } else {
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001075 info->frame_restoration_type = RESTORE_NONE;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001076 }
1077
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001078 return info->frame_restoration_type == RESTORE_WIENER ? cost_wiener_frame
1079 : cost_norestore_frame;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001080}
1081
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001082static double search_wiener(const YV12_BUFFER_CONFIG *src, AV1_COMP *cpi,
Debargha Mukherjee00c54332017-03-03 15:44:17 -08001083 int partial_frame, RestorationInfo *info,
1084 RestorationType *type, double *best_tile_cost,
David Barker9666e752016-12-08 11:25:47 +00001085 YV12_BUFFER_CONFIG *dst_frame) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001086 WienerInfo *wiener_info = info->wiener_info;
Yaowu Xuf883b422016-08-30 14:01:10 -07001087 AV1_COMMON *const cm = &cpi->common;
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001088 RestorationInfo *rsi = cpi->rst_search;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001089 int64_t err;
1090 int bits;
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001091 double cost_wiener, cost_norestore;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001092 MACROBLOCK *x = &cpi->td.mb;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -08001093 double M[WIENER_WIN2];
1094 double H[WIENER_WIN2 * WIENER_WIN2];
1095 double vfilterd[WIENER_WIN], hfilterd[WIENER_WIN];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001096 const YV12_BUFFER_CONFIG *dgd = cm->frame_to_show;
1097 const int width = cm->width;
1098 const int height = cm->height;
1099 const int src_stride = src->y_stride;
1100 const int dgd_stride = dgd->y_stride;
1101 double score;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001102 int tile_idx, tile_width, tile_height, nhtiles, nvtiles;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001103 int h_start, h_end, v_start, v_end;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001104 const int ntiles = av1_get_rest_ntiles(width, height, &tile_width,
1105 &tile_height, &nhtiles, &nvtiles);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001106 assert(width == dgd->y_crop_width);
1107 assert(height == dgd->y_crop_height);
1108 assert(width == src->y_crop_width);
1109 assert(height == src->y_crop_height);
1110
Debargha Mukherjee999d2f62016-12-15 13:23:21 -08001111 rsi->frame_restoration_type = RESTORE_WIENER;
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001112
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001113 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
1114 rsi->restoration_type[tile_idx] = RESTORE_NONE;
1115 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001116
David Barker33f3bfd2017-01-06 15:34:50 +00001117// Construct a (WIENER_HALFWIN)-pixel border around the frame
1118#if CONFIG_AOM_HIGHBITDEPTH
1119 if (cm->use_highbitdepth)
1120 extend_frame_highbd(CONVERT_TO_SHORTPTR(dgd->y_buffer), width, height,
1121 dgd_stride);
1122 else
1123#endif
1124 extend_frame(dgd->y_buffer, width, height, dgd_stride);
1125
Yaowu Xuc27fc142016-08-22 16:08:15 -07001126 // Compute best Wiener filters for each tile
1127 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001128 av1_get_rest_tile_limits(tile_idx, 0, 0, nhtiles, nvtiles, tile_width,
1129 tile_height, width, height, 0, 0, &h_start, &h_end,
1130 &v_start, &v_end);
David Barker9666e752016-12-08 11:25:47 +00001131 err = sse_restoration_tile(src, cm->frame_to_show, cm, h_start,
Debargha Mukherjee874d36d2016-12-14 16:53:17 -08001132 h_end - h_start, v_start, v_end - v_start, 1);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001133 // #bits when a tile is not restored
1134 bits = av1_cost_bit(RESTORE_NONE_WIENER_PROB, 0);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001135 cost_norestore = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001136 best_tile_cost[tile_idx] = DBL_MAX;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001137
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001138 av1_get_rest_tile_limits(tile_idx, 0, 0, nhtiles, nvtiles, tile_width,
David Barker33f3bfd2017-01-06 15:34:50 +00001139 tile_height, width, height, 0, 0, &h_start, &h_end,
1140 &v_start, &v_end);
Yaowu Xuf883b422016-08-30 14:01:10 -07001141#if CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001142 if (cm->use_highbitdepth)
1143 compute_stats_highbd(dgd->y_buffer, src->y_buffer, h_start, h_end,
1144 v_start, v_end, dgd_stride, src_stride, M, H);
1145 else
Yaowu Xuf883b422016-08-30 14:01:10 -07001146#endif // CONFIG_AOM_HIGHBITDEPTH
Yaowu Xuc27fc142016-08-22 16:08:15 -07001147 compute_stats(dgd->y_buffer, src->y_buffer, h_start, h_end, v_start,
1148 v_end, dgd_stride, src_stride, M, H);
1149
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001150 type[tile_idx] = RESTORE_WIENER;
1151
Yaowu Xuc27fc142016-08-22 16:08:15 -07001152 if (!wiener_decompose_sep_sym(M, H, vfilterd, hfilterd)) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001153 type[tile_idx] = RESTORE_NONE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001154 continue;
1155 }
Debargha Mukherjee999d2f62016-12-15 13:23:21 -08001156 quantize_sym_filter(vfilterd, rsi->wiener_info[tile_idx].vfilter);
1157 quantize_sym_filter(hfilterd, rsi->wiener_info[tile_idx].hfilter);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001158
1159 // Filter score computes the value of the function x'*A*x - x'*b for the
1160 // learned filter and compares it against identity filer. If there is no
1161 // reduction in the function, the filter is reverted back to identity
Debargha Mukherjee999d2f62016-12-15 13:23:21 -08001162 score = compute_score(M, H, rsi->wiener_info[tile_idx].vfilter,
1163 rsi->wiener_info[tile_idx].hfilter);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001164 if (score > 0.0) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001165 type[tile_idx] = RESTORE_NONE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001166 continue;
1167 }
1168
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001169 rsi->restoration_type[tile_idx] = RESTORE_WIENER;
Debargha Mukherjee999d2f62016-12-15 13:23:21 -08001170 err = try_restoration_tile(src, cpi, rsi, 1, partial_frame, tile_idx, 0, 0,
David Barker9666e752016-12-08 11:25:47 +00001171 dst_frame);
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001172 bits = WIENER_FILT_BITS << AV1_PROB_COST_SHIFT;
1173 bits += av1_cost_bit(RESTORE_NONE_WIENER_PROB, 1);
1174 cost_wiener = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001175 if (cost_wiener >= cost_norestore) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001176 type[tile_idx] = RESTORE_NONE;
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001177 } else {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001178 type[tile_idx] = RESTORE_WIENER;
1179 memcpy(&wiener_info[tile_idx], &rsi->wiener_info[tile_idx],
1180 sizeof(wiener_info[tile_idx]));
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001181 bits = WIENER_FILT_BITS << AV1_PROB_COST_SHIFT;
1182 best_tile_cost[tile_idx] = RDCOST_DBL(
1183 x->rdmult, x->rddiv,
1184 (bits + cpi->switchable_restore_cost[RESTORE_WIENER]) >> 4, err);
1185 }
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001186 rsi->restoration_type[tile_idx] = RESTORE_NONE;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001187 }
1188 // Cost for Wiener filtering
Debargha Mukherjee999d2f62016-12-15 13:23:21 -08001189 bits = frame_level_restore_bits[rsi->frame_restoration_type]
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001190 << AV1_PROB_COST_SHIFT;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001191 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001192 bits +=
1193 av1_cost_bit(RESTORE_NONE_WIENER_PROB, type[tile_idx] != RESTORE_NONE);
1194 memcpy(&rsi->wiener_info[tile_idx], &wiener_info[tile_idx],
1195 sizeof(wiener_info[tile_idx]));
1196 if (type[tile_idx] == RESTORE_WIENER) {
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001197 bits += (WIENER_FILT_BITS << AV1_PROB_COST_SHIFT);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001198 }
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001199 rsi->restoration_type[tile_idx] = type[tile_idx];
Yaowu Xuc27fc142016-08-22 16:08:15 -07001200 }
Debargha Mukherjee999d2f62016-12-15 13:23:21 -08001201 err = try_restoration_frame(src, cpi, rsi, 1, partial_frame, dst_frame);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001202 cost_wiener = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
Debargha Mukherjee8f209a82016-10-12 10:47:01 -07001203
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001204 return cost_wiener;
Yaowu Xuc27fc142016-08-22 16:08:15 -07001205}
1206
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001207static double search_norestore(const YV12_BUFFER_CONFIG *src, AV1_COMP *cpi,
Debargha Mukherjee00c54332017-03-03 15:44:17 -08001208 int partial_frame, RestorationInfo *info,
1209 RestorationType *type, double *best_tile_cost,
David Barker9666e752016-12-08 11:25:47 +00001210 YV12_BUFFER_CONFIG *dst_frame) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001211 double err, cost_norestore;
1212 int bits;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001213 MACROBLOCK *x = &cpi->td.mb;
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001214 AV1_COMMON *const cm = &cpi->common;
1215 int tile_idx, tile_width, tile_height, nhtiles, nvtiles;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001216 int h_start, h_end, v_start, v_end;
1217 const int ntiles = av1_get_rest_ntiles(cm->width, cm->height, &tile_width,
1218 &tile_height, &nhtiles, &nvtiles);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001219 (void)info;
David Barker9666e752016-12-08 11:25:47 +00001220 (void)dst_frame;
Debargha Mukherjee00c54332017-03-03 15:44:17 -08001221 (void)partial_frame;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001222
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001223 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
1224 av1_get_rest_tile_limits(tile_idx, 0, 0, nhtiles, nvtiles, tile_width,
1225 tile_height, cm->width, cm->height, 0, 0, &h_start,
1226 &h_end, &v_start, &v_end);
David Barker9666e752016-12-08 11:25:47 +00001227 err = sse_restoration_tile(src, cm->frame_to_show, cm, h_start,
Debargha Mukherjee874d36d2016-12-14 16:53:17 -08001228 h_end - h_start, v_start, v_end - v_start, 1);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001229 best_tile_cost[tile_idx] =
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001230 RDCOST_DBL(x->rdmult, x->rddiv,
1231 (cpi->switchable_restore_cost[RESTORE_NONE] >> 4), err);
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001232 type[tile_idx] = RESTORE_NONE;
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001233 }
1234 // RD cost associated with no restoration
David Barker9666e752016-12-08 11:25:47 +00001235 err = sse_restoration_tile(src, cm->frame_to_show, cm, 0, cm->width, 0,
Debargha Mukherjee874d36d2016-12-14 16:53:17 -08001236 cm->height, 1);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001237 bits = frame_level_restore_bits[RESTORE_NONE] << AV1_PROB_COST_SHIFT;
1238 cost_norestore = RDCOST_DBL(x->rdmult, x->rddiv, (bits >> 4), err);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001239 return cost_norestore;
1240}
1241
1242static double search_switchable_restoration(
Debargha Mukherjee00c54332017-03-03 15:44:17 -08001243 AV1_COMP *cpi, int partial_frame, RestorationInfo *rsi,
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001244 double *tile_cost[RESTORE_SWITCHABLE_TYPES]) {
1245 AV1_COMMON *const cm = &cpi->common;
1246 MACROBLOCK *x = &cpi->td.mb;
1247 double cost_switchable = 0;
1248 int r, bits, tile_idx;
1249 const int ntiles =
1250 av1_get_rest_ntiles(cm->width, cm->height, NULL, NULL, NULL, NULL);
Debargha Mukherjee00c54332017-03-03 15:44:17 -08001251 (void)partial_frame;
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001252
1253 rsi->frame_restoration_type = RESTORE_SWITCHABLE;
1254 bits = frame_level_restore_bits[rsi->frame_restoration_type]
1255 << AV1_PROB_COST_SHIFT;
1256 cost_switchable = RDCOST_DBL(x->rdmult, x->rddiv, bits >> 4, 0);
1257 for (tile_idx = 0; tile_idx < ntiles; ++tile_idx) {
1258 double best_cost = tile_cost[RESTORE_NONE][tile_idx];
1259 rsi->restoration_type[tile_idx] = RESTORE_NONE;
1260 for (r = 1; r < RESTORE_SWITCHABLE_TYPES; r++) {
1261 if (tile_cost[r][tile_idx] < best_cost) {
1262 rsi->restoration_type[tile_idx] = r;
1263 best_cost = tile_cost[r][tile_idx];
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001264 }
1265 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001266 cost_switchable += best_cost;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001267 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001268 return cost_switchable;
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001269}
1270
1271void av1_pick_filter_restoration(const YV12_BUFFER_CONFIG *src, AV1_COMP *cpi,
Yaowu Xuf883b422016-08-30 14:01:10 -07001272 LPF_PICK_METHOD method) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001273 static search_restore_type search_restore_fun[RESTORE_SWITCHABLE_TYPES] = {
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -08001274 search_norestore,
1275 search_wiener,
1276 search_sgrproj,
1277#if USE_DOMAINTXFMRF
1278 search_domaintxfmrf,
1279#endif // USE_DOMAINTXFMRF
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001280 };
Yaowu Xuf883b422016-08-30 14:01:10 -07001281 AV1_COMMON *const cm = &cpi->common;
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001282 double cost_restore[RESTORE_TYPES];
1283 double *tile_cost[RESTORE_SWITCHABLE_TYPES];
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001284 RestorationType *restore_types[RESTORE_SWITCHABLE_TYPES];
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001285 double best_cost_restore;
1286 RestorationType r, best_restore;
1287
Debargha Mukherjee5cd2ab92016-09-08 15:15:17 -07001288 const int ntiles =
1289 av1_get_rest_ntiles(cm->width, cm->height, NULL, NULL, NULL, NULL);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001290
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001291 for (r = 0; r < RESTORE_SWITCHABLE_TYPES; r++) {
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001292 tile_cost[r] = (double *)aom_malloc(sizeof(*tile_cost[0]) * ntiles);
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001293 restore_types[r] =
1294 (RestorationType *)aom_malloc(sizeof(*restore_types[0]) * ntiles);
1295 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001296
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001297 for (r = 0; r < RESTORE_SWITCHABLE_TYPES; ++r) {
David Barker9666e752016-12-08 11:25:47 +00001298 cost_restore[r] = search_restore_fun[r](
Debargha Mukherjee00c54332017-03-03 15:44:17 -08001299 src, cpi, method == LPF_PICK_FROM_SUBIMAGE, &cm->rst_info[0],
1300 restore_types[r], tile_cost[r], &cpi->trial_frame_rst);
Yaowu Xuc27fc142016-08-22 16:08:15 -07001301 }
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001302 cost_restore[RESTORE_SWITCHABLE] = search_switchable_restoration(
Debargha Mukherjee00c54332017-03-03 15:44:17 -08001303 cpi, method == LPF_PICK_FROM_SUBIMAGE, &cm->rst_info[0], tile_cost);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001304
1305 best_cost_restore = DBL_MAX;
1306 best_restore = 0;
1307 for (r = 0; r < RESTORE_TYPES; ++r) {
1308 if (cost_restore[r] < best_cost_restore) {
1309 best_restore = r;
1310 best_cost_restore = cost_restore[r];
1311 }
1312 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001313 cm->rst_info[0].frame_restoration_type = best_restore;
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001314 if (best_restore != RESTORE_SWITCHABLE) {
1315 memcpy(cm->rst_info[0].restoration_type, restore_types[best_restore],
1316 ntiles * sizeof(restore_types[best_restore][0]));
1317 }
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001318
1319 // Color components
Debargha Mukherjee00c54332017-03-03 15:44:17 -08001320 search_wiener_uv(src, cpi, method == LPF_PICK_FROM_SUBIMAGE, AOM_PLANE_U,
1321 &cm->rst_info[AOM_PLANE_U],
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001322 cm->rst_info[AOM_PLANE_U].restoration_type,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001323 &cpi->trial_frame_rst);
Debargha Mukherjee00c54332017-03-03 15:44:17 -08001324 search_wiener_uv(src, cpi, method == LPF_PICK_FROM_SUBIMAGE, AOM_PLANE_V,
1325 &cm->rst_info[AOM_PLANE_V],
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001326 cm->rst_info[AOM_PLANE_V].restoration_type,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001327 &cpi->trial_frame_rst);
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001328 /*
Debargha Mukherjee09ad6d82017-01-06 13:02:57 -08001329 printf("Frame %d/%d restore types: %d %d %d\n",
1330 cm->current_video_frame, cm->show_frame,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001331 cm->rst_info[0].frame_restoration_type,
1332 cm->rst_info[1].frame_restoration_type,
1333 cm->rst_info[2].frame_restoration_type);
Debargha Mukherjeed7489142017-01-05 13:58:16 -08001334 */
1335 /*
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -08001336#if USE_DOMAINTXFMRF
Debargha Mukherjee0e67b252016-12-08 09:22:44 -08001337 printf("Frame %d/%d frame_restore_type %d : %f %f %f %f %f\n",
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001338 cm->current_video_frame, cm->show_frame,
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001339 cm->rst_info[0].frame_restoration_type, cost_restore[0],
Debargha Mukherjeeb3c43bc2017-02-01 13:09:03 -08001340 cost_restore[1], cost_restore[2], cost_restore[3], cost_restore[4]);
1341#else
1342 printf("Frame %d/%d frame_restore_type %d : %f %f %f %f\n",
1343 cm->current_video_frame, cm->show_frame,
1344 cm->rst_info[0].frame_restoration_type, cost_restore[0],
1345 cost_restore[1], cost_restore[2], cost_restore[3]);
1346#endif // USE_DOMAINTXFMRF
Debargha Mukherjee5d89a632016-09-17 13:16:58 -07001347 */
Debargha Mukherjeea43a2d92017-01-03 15:14:57 -08001348
Debargha Mukherjee994ccd72017-01-06 11:18:23 -08001349 for (r = 0; r < RESTORE_SWITCHABLE_TYPES; r++) {
1350 aom_free(tile_cost[r]);
1351 aom_free(restore_types[r]);
1352 }
Yaowu Xuc27fc142016-08-22 16:08:15 -07001353}