blob: 7d8118235d9098db589efedcb9110834f59f7e8b [file] [log] [blame]
James Zern98473442012-11-06 16:58:11 -08001/*
2 * Copyright (c) 2012 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10#include <stdlib.h>
11#include <new>
12
13#include "third_party/googletest/src/include/gtest/gtest.h"
14
James Zern5b756742013-06-17 22:58:40 -070015#include "test/clear_system_state.h"
James Zerncc774c82013-06-18 19:15:56 -070016#include "test/register_state_check.h"
James Zern5b756742013-06-17 22:58:40 -070017
James Zern1711cf22013-02-22 16:23:36 -080018#include "vpx/vpx_integer.h"
Yaowu Xuafffa3d2013-09-05 08:45:56 -070019#include "./vpx_config.h"
Ronald S. Bultjeac6ea2a2013-06-21 17:03:57 -070020#include "vpx_mem/vpx_mem.h"
James Zern1711cf22013-02-22 16:23:36 -080021#if CONFIG_VP8_ENCODER
Yaowu Xuafffa3d2013-09-05 08:45:56 -070022# include "./vp8_rtcd.h"
James Zern002ad402014-01-18 13:03:31 -080023# include "vp8/common/variance.h"
James Zern1711cf22013-02-22 16:23:36 -080024#endif
25#if CONFIG_VP9_ENCODER
Yaowu Xuafffa3d2013-09-05 08:45:56 -070026# include "./vp9_rtcd.h"
James Zern002ad402014-01-18 13:03:31 -080027# include "vp9/encoder/vp9_variance.h"
James Zern1711cf22013-02-22 16:23:36 -080028#endif
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -070029#include "test/acm_random.h"
James Zern98473442012-11-06 16:58:11 -080030
31namespace {
32
33using ::std::tr1::get;
34using ::std::tr1::make_tuple;
35using ::std::tr1::tuple;
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -070036using libvpx_test::ACMRandom;
37
38static unsigned int variance_ref(const uint8_t *ref, const uint8_t *src,
39 int l2w, int l2h, unsigned int *sse_ptr) {
40 int se = 0;
41 unsigned int sse = 0;
42 const int w = 1 << l2w, h = 1 << l2h;
43 for (int y = 0; y < h; y++) {
44 for (int x = 0; x < w; x++) {
45 int diff = ref[w * y + x] - src[w * y + x];
46 se += diff;
47 sse += diff * diff;
48 }
49 }
50 *sse_ptr = sse;
51 return sse - (((int64_t) se * se) >> (l2w + l2h));
52}
53
54static unsigned int subpel_variance_ref(const uint8_t *ref, const uint8_t *src,
55 int l2w, int l2h, int xoff, int yoff,
56 unsigned int *sse_ptr) {
57 int se = 0;
58 unsigned int sse = 0;
59 const int w = 1 << l2w, h = 1 << l2h;
60 for (int y = 0; y < h; y++) {
61 for (int x = 0; x < w; x++) {
62 // bilinear interpolation at a 16th pel step
63 const int a1 = ref[(w + 1) * (y + 0) + x + 0];
64 const int a2 = ref[(w + 1) * (y + 0) + x + 1];
65 const int b1 = ref[(w + 1) * (y + 1) + x + 0];
66 const int b2 = ref[(w + 1) * (y + 1) + x + 1];
67 const int a = a1 + (((a2 - a1) * xoff + 8) >> 4);
68 const int b = b1 + (((b2 - b1) * xoff + 8) >> 4);
69 const int r = a + (((b - a) * yoff + 8) >> 4);
70 int diff = r - src[w * y + x];
71 se += diff;
72 sse += diff * diff;
73 }
74 }
75 *sse_ptr = sse;
76 return sse - (((int64_t) se * se) >> (l2w + l2h));
77}
James Zern98473442012-11-06 16:58:11 -080078
James Zern1711cf22013-02-22 16:23:36 -080079template<typename VarianceFunctionType>
Yaowu Xuafffa3d2013-09-05 08:45:56 -070080class VarianceTest
81 : public ::testing::TestWithParam<tuple<int, int, VarianceFunctionType> > {
James Zern98473442012-11-06 16:58:11 -080082 public:
83 virtual void SetUp() {
James Zern1711cf22013-02-22 16:23:36 -080084 const tuple<int, int, VarianceFunctionType>& params = this->GetParam();
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -070085 log2width_ = get<0>(params);
86 width_ = 1 << log2width_;
87 log2height_ = get<1>(params);
88 height_ = 1 << log2height_;
James Zern98473442012-11-06 16:58:11 -080089 variance_ = get<2>(params);
90
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -070091 rnd(ACMRandom::DeterministicSeed());
James Zern98473442012-11-06 16:58:11 -080092 block_size_ = width_ * height_;
Yunqing Wang5c93e622014-07-24 17:11:58 -070093 src_ = reinterpret_cast<uint8_t *>(vpx_memalign(16, block_size_));
James Zern98473442012-11-06 16:58:11 -080094 ref_ = new uint8_t[block_size_];
95 ASSERT_TRUE(src_ != NULL);
96 ASSERT_TRUE(ref_ != NULL);
97 }
98
99 virtual void TearDown() {
Yunqing Wang5c93e622014-07-24 17:11:58 -0700100 vpx_free(src_);
James Zern98473442012-11-06 16:58:11 -0800101 delete[] ref_;
James Zern5b756742013-06-17 22:58:40 -0700102 libvpx_test::ClearSystemState();
James Zern98473442012-11-06 16:58:11 -0800103 }
104
105 protected:
James Zern1711cf22013-02-22 16:23:36 -0800106 void ZeroTest();
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700107 void RefTest();
James Zern1711cf22013-02-22 16:23:36 -0800108 void OneQuarterTest();
109
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700110 ACMRandom rnd;
James Zern98473442012-11-06 16:58:11 -0800111 uint8_t* src_;
112 uint8_t* ref_;
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700113 int width_, log2width_;
114 int height_, log2height_;
James Zern98473442012-11-06 16:58:11 -0800115 int block_size_;
James Zern1711cf22013-02-22 16:23:36 -0800116 VarianceFunctionType variance_;
James Zern98473442012-11-06 16:58:11 -0800117};
118
James Zern1711cf22013-02-22 16:23:36 -0800119template<typename VarianceFunctionType>
120void VarianceTest<VarianceFunctionType>::ZeroTest() {
James Zern98473442012-11-06 16:58:11 -0800121 for (int i = 0; i <= 255; ++i) {
122 memset(src_, i, block_size_);
123 for (int j = 0; j <= 255; ++j) {
124 memset(ref_, j, block_size_);
125 unsigned int sse;
James Zerncc774c82013-06-18 19:15:56 -0700126 unsigned int var;
James Zern29e1b1a2014-07-09 21:02:02 -0700127 ASM_REGISTER_STATE_CHECK(
128 var = variance_(src_, width_, ref_, width_, &sse));
James Zern98473442012-11-06 16:58:11 -0800129 EXPECT_EQ(0u, var) << "src values: " << i << "ref values: " << j;
130 }
131 }
132}
133
James Zern1711cf22013-02-22 16:23:36 -0800134template<typename VarianceFunctionType>
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700135void VarianceTest<VarianceFunctionType>::RefTest() {
136 for (int i = 0; i < 10; ++i) {
137 for (int j = 0; j < block_size_; j++) {
138 src_[j] = rnd.Rand8();
139 ref_[j] = rnd.Rand8();
140 }
141 unsigned int sse1, sse2;
James Zerncc774c82013-06-18 19:15:56 -0700142 unsigned int var1;
James Zern29e1b1a2014-07-09 21:02:02 -0700143 ASM_REGISTER_STATE_CHECK(
144 var1 = variance_(src_, width_, ref_, width_, &sse1));
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700145 const unsigned int var2 = variance_ref(src_, ref_, log2width_,
146 log2height_, &sse2);
147 EXPECT_EQ(sse1, sse2);
148 EXPECT_EQ(var1, var2);
149 }
150}
151
152template<typename VarianceFunctionType>
James Zern1711cf22013-02-22 16:23:36 -0800153void VarianceTest<VarianceFunctionType>::OneQuarterTest() {
James Zern98473442012-11-06 16:58:11 -0800154 memset(src_, 255, block_size_);
155 const int half = block_size_ / 2;
156 memset(ref_, 255, half);
157 memset(ref_ + half, 0, half);
158 unsigned int sse;
James Zerncc774c82013-06-18 19:15:56 -0700159 unsigned int var;
James Zern29e1b1a2014-07-09 21:02:02 -0700160 ASM_REGISTER_STATE_CHECK(var = variance_(src_, width_, ref_, width_, &sse));
James Zern98473442012-11-06 16:58:11 -0800161 const unsigned int expected = block_size_ * 255 * 255 / 4;
162 EXPECT_EQ(expected, var);
163}
164
James Zerna0fcbcf2013-09-17 18:47:25 -0700165#if CONFIG_VP9_ENCODER
166
167unsigned int subpel_avg_variance_ref(const uint8_t *ref,
168 const uint8_t *src,
169 const uint8_t *second_pred,
170 int l2w, int l2h,
171 int xoff, int yoff,
172 unsigned int *sse_ptr) {
173 int se = 0;
174 unsigned int sse = 0;
175 const int w = 1 << l2w, h = 1 << l2h;
176 for (int y = 0; y < h; y++) {
177 for (int x = 0; x < w; x++) {
178 // bilinear interpolation at a 16th pel step
179 const int a1 = ref[(w + 1) * (y + 0) + x + 0];
180 const int a2 = ref[(w + 1) * (y + 0) + x + 1];
181 const int b1 = ref[(w + 1) * (y + 1) + x + 0];
182 const int b2 = ref[(w + 1) * (y + 1) + x + 1];
183 const int a = a1 + (((a2 - a1) * xoff + 8) >> 4);
184 const int b = b1 + (((b2 - b1) * xoff + 8) >> 4);
185 const int r = a + (((b - a) * yoff + 8) >> 4);
186 int diff = ((r + second_pred[w * y + x] + 1) >> 1) - src[w * y + x];
187 se += diff;
188 sse += diff * diff;
189 }
190 }
191 *sse_ptr = sse;
192 return sse - (((int64_t) se * se) >> (l2w + l2h));
193}
194
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700195template<typename SubpelVarianceFunctionType>
Yaowu Xuafffa3d2013-09-05 08:45:56 -0700196class SubpelVarianceTest
197 : public ::testing::TestWithParam<tuple<int, int,
198 SubpelVarianceFunctionType> > {
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700199 public:
200 virtual void SetUp() {
201 const tuple<int, int, SubpelVarianceFunctionType>& params =
202 this->GetParam();
203 log2width_ = get<0>(params);
204 width_ = 1 << log2width_;
205 log2height_ = get<1>(params);
206 height_ = 1 << log2height_;
207 subpel_variance_ = get<2>(params);
208
209 rnd(ACMRandom::DeterministicSeed());
210 block_size_ = width_ * height_;
Ronald S. Bultjeac6ea2a2013-06-21 17:03:57 -0700211 src_ = reinterpret_cast<uint8_t *>(vpx_memalign(16, block_size_));
212 sec_ = reinterpret_cast<uint8_t *>(vpx_memalign(16, block_size_));
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700213 ref_ = new uint8_t[block_size_ + width_ + height_ + 1];
214 ASSERT_TRUE(src_ != NULL);
Ronald S. Bultjeac6ea2a2013-06-21 17:03:57 -0700215 ASSERT_TRUE(sec_ != NULL);
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700216 ASSERT_TRUE(ref_ != NULL);
217 }
218
219 virtual void TearDown() {
Ronald S. Bultjeac6ea2a2013-06-21 17:03:57 -0700220 vpx_free(src_);
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700221 delete[] ref_;
Ronald S. Bultjeac6ea2a2013-06-21 17:03:57 -0700222 vpx_free(sec_);
James Zerne247ab02013-06-26 18:32:21 -0700223 libvpx_test::ClearSystemState();
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700224 }
225
226 protected:
227 void RefTest();
228
229 ACMRandom rnd;
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700230 uint8_t *src_;
231 uint8_t *ref_;
232 uint8_t *sec_;
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700233 int width_, log2width_;
234 int height_, log2height_;
235 int block_size_;
236 SubpelVarianceFunctionType subpel_variance_;
237};
238
239template<typename SubpelVarianceFunctionType>
240void SubpelVarianceTest<SubpelVarianceFunctionType>::RefTest() {
241 for (int x = 0; x < 16; ++x) {
242 for (int y = 0; y < 16; ++y) {
243 for (int j = 0; j < block_size_; j++) {
244 src_[j] = rnd.Rand8();
245 }
246 for (int j = 0; j < block_size_ + width_ + height_ + 1; j++) {
247 ref_[j] = rnd.Rand8();
248 }
249 unsigned int sse1, sse2;
James Zerncc774c82013-06-18 19:15:56 -0700250 unsigned int var1;
James Zern29e1b1a2014-07-09 21:02:02 -0700251 ASM_REGISTER_STATE_CHECK(var1 = subpel_variance_(ref_, width_ + 1, x, y,
252 src_, width_, &sse1));
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700253 const unsigned int var2 = subpel_variance_ref(ref_, src_, log2width_,
254 log2height_, x, y, &sse2);
255 EXPECT_EQ(sse1, sse2) << "at position " << x << ", " << y;
256 EXPECT_EQ(var1, var2) << "at position " << x << ", " << y;
257 }
258 }
259}
260
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700261template<>
262void SubpelVarianceTest<vp9_subp_avg_variance_fn_t>::RefTest() {
263 for (int x = 0; x < 16; ++x) {
264 for (int y = 0; y < 16; ++y) {
265 for (int j = 0; j < block_size_; j++) {
266 src_[j] = rnd.Rand8();
267 sec_[j] = rnd.Rand8();
268 }
269 for (int j = 0; j < block_size_ + width_ + height_ + 1; j++) {
270 ref_[j] = rnd.Rand8();
271 }
272 unsigned int sse1, sse2;
James Zerncc774c82013-06-18 19:15:56 -0700273 unsigned int var1;
James Zern29e1b1a2014-07-09 21:02:02 -0700274 ASM_REGISTER_STATE_CHECK(
275 var1 = subpel_variance_(ref_, width_ + 1, x, y,
276 src_, width_, &sse1, sec_));
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700277 const unsigned int var2 = subpel_avg_variance_ref(ref_, src_, sec_,
278 log2width_, log2height_,
279 x, y, &sse2);
280 EXPECT_EQ(sse1, sse2) << "at position " << x << ", " << y;
281 EXPECT_EQ(var1, var2) << "at position " << x << ", " << y;
282 }
283 }
284}
285
James Zerna0fcbcf2013-09-17 18:47:25 -0700286#endif // CONFIG_VP9_ENCODER
287
James Zern1711cf22013-02-22 16:23:36 -0800288// -----------------------------------------------------------------------------
289// VP8 test cases.
290
291namespace vp8 {
292
293#if CONFIG_VP8_ENCODER
294typedef VarianceTest<vp8_variance_fn_t> VP8VarianceTest;
295
296TEST_P(VP8VarianceTest, Zero) { ZeroTest(); }
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700297TEST_P(VP8VarianceTest, Ref) { RefTest(); }
James Zern1711cf22013-02-22 16:23:36 -0800298TEST_P(VP8VarianceTest, OneQuarter) { OneQuarterTest(); }
299
James Zern6e5e75f2014-05-08 14:32:32 -0700300const vp8_variance_fn_t variance4x4_c = vp8_variance4x4_c;
301const vp8_variance_fn_t variance8x8_c = vp8_variance8x8_c;
302const vp8_variance_fn_t variance8x16_c = vp8_variance8x16_c;
303const vp8_variance_fn_t variance16x8_c = vp8_variance16x8_c;
304const vp8_variance_fn_t variance16x16_c = vp8_variance16x16_c;
James Zern1711cf22013-02-22 16:23:36 -0800305INSTANTIATE_TEST_CASE_P(
306 C, VP8VarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700307 ::testing::Values(make_tuple(2, 2, variance4x4_c),
308 make_tuple(3, 3, variance8x8_c),
309 make_tuple(3, 4, variance8x16_c),
310 make_tuple(4, 3, variance16x8_c),
311 make_tuple(4, 4, variance16x16_c)));
James Zern1711cf22013-02-22 16:23:36 -0800312
James Zernd5e07a82014-02-25 23:11:49 -0800313#if HAVE_NEON
James Zern6e5e75f2014-05-08 14:32:32 -0700314const vp8_variance_fn_t variance8x8_neon = vp8_variance8x8_neon;
315const vp8_variance_fn_t variance8x16_neon = vp8_variance8x16_neon;
316const vp8_variance_fn_t variance16x8_neon = vp8_variance16x8_neon;
317const vp8_variance_fn_t variance16x16_neon = vp8_variance16x16_neon;
James Zernd5e07a82014-02-25 23:11:49 -0800318INSTANTIATE_TEST_CASE_P(
319 NEON, VP8VarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700320 ::testing::Values(make_tuple(3, 3, variance8x8_neon),
321 make_tuple(3, 4, variance8x16_neon),
322 make_tuple(4, 3, variance16x8_neon),
323 make_tuple(4, 4, variance16x16_neon)));
James Zernd5e07a82014-02-25 23:11:49 -0800324#endif
325
James Zern1711cf22013-02-22 16:23:36 -0800326#if HAVE_MMX
James Zern6e5e75f2014-05-08 14:32:32 -0700327const vp8_variance_fn_t variance4x4_mmx = vp8_variance4x4_mmx;
328const vp8_variance_fn_t variance8x8_mmx = vp8_variance8x8_mmx;
329const vp8_variance_fn_t variance8x16_mmx = vp8_variance8x16_mmx;
330const vp8_variance_fn_t variance16x8_mmx = vp8_variance16x8_mmx;
331const vp8_variance_fn_t variance16x16_mmx = vp8_variance16x16_mmx;
James Zern1711cf22013-02-22 16:23:36 -0800332INSTANTIATE_TEST_CASE_P(
333 MMX, VP8VarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700334 ::testing::Values(make_tuple(2, 2, variance4x4_mmx),
335 make_tuple(3, 3, variance8x8_mmx),
336 make_tuple(3, 4, variance8x16_mmx),
337 make_tuple(4, 3, variance16x8_mmx),
338 make_tuple(4, 4, variance16x16_mmx)));
James Zern1711cf22013-02-22 16:23:36 -0800339#endif
340
341#if HAVE_SSE2
James Zern6e5e75f2014-05-08 14:32:32 -0700342const vp8_variance_fn_t variance4x4_wmt = vp8_variance4x4_wmt;
343const vp8_variance_fn_t variance8x8_wmt = vp8_variance8x8_wmt;
344const vp8_variance_fn_t variance8x16_wmt = vp8_variance8x16_wmt;
345const vp8_variance_fn_t variance16x8_wmt = vp8_variance16x8_wmt;
346const vp8_variance_fn_t variance16x16_wmt = vp8_variance16x16_wmt;
James Zern1711cf22013-02-22 16:23:36 -0800347INSTANTIATE_TEST_CASE_P(
348 SSE2, VP8VarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700349 ::testing::Values(make_tuple(2, 2, variance4x4_wmt),
350 make_tuple(3, 3, variance8x8_wmt),
351 make_tuple(3, 4, variance8x16_wmt),
352 make_tuple(4, 3, variance16x8_wmt),
353 make_tuple(4, 4, variance16x16_wmt)));
James Zern1711cf22013-02-22 16:23:36 -0800354#endif
355#endif // CONFIG_VP8_ENCODER
356
357} // namespace vp8
358
359// -----------------------------------------------------------------------------
360// VP9 test cases.
361
362namespace vp9 {
363
364#if CONFIG_VP9_ENCODER
365typedef VarianceTest<vp9_variance_fn_t> VP9VarianceTest;
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700366typedef SubpelVarianceTest<vp9_subpixvariance_fn_t> VP9SubpelVarianceTest;
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700367typedef SubpelVarianceTest<vp9_subp_avg_variance_fn_t> VP9SubpelAvgVarianceTest;
James Zern1711cf22013-02-22 16:23:36 -0800368
369TEST_P(VP9VarianceTest, Zero) { ZeroTest(); }
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700370TEST_P(VP9VarianceTest, Ref) { RefTest(); }
371TEST_P(VP9SubpelVarianceTest, Ref) { RefTest(); }
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700372TEST_P(VP9SubpelAvgVarianceTest, Ref) { RefTest(); }
James Zern1711cf22013-02-22 16:23:36 -0800373TEST_P(VP9VarianceTest, OneQuarter) { OneQuarterTest(); }
374
James Zern6e5e75f2014-05-08 14:32:32 -0700375const vp9_variance_fn_t variance4x4_c = vp9_variance4x4_c;
376const vp9_variance_fn_t variance4x8_c = vp9_variance4x8_c;
377const vp9_variance_fn_t variance8x4_c = vp9_variance8x4_c;
378const vp9_variance_fn_t variance8x8_c = vp9_variance8x8_c;
379const vp9_variance_fn_t variance8x16_c = vp9_variance8x16_c;
380const vp9_variance_fn_t variance16x8_c = vp9_variance16x8_c;
381const vp9_variance_fn_t variance16x16_c = vp9_variance16x16_c;
382const vp9_variance_fn_t variance16x32_c = vp9_variance16x32_c;
383const vp9_variance_fn_t variance32x16_c = vp9_variance32x16_c;
384const vp9_variance_fn_t variance32x32_c = vp9_variance32x32_c;
385const vp9_variance_fn_t variance32x64_c = vp9_variance32x64_c;
386const vp9_variance_fn_t variance64x32_c = vp9_variance64x32_c;
387const vp9_variance_fn_t variance64x64_c = vp9_variance64x64_c;
James Zern98473442012-11-06 16:58:11 -0800388INSTANTIATE_TEST_CASE_P(
389 C, VP9VarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700390 ::testing::Values(make_tuple(2, 2, variance4x4_c),
391 make_tuple(2, 3, variance4x8_c),
392 make_tuple(3, 2, variance8x4_c),
393 make_tuple(3, 3, variance8x8_c),
394 make_tuple(3, 4, variance8x16_c),
395 make_tuple(4, 3, variance16x8_c),
396 make_tuple(4, 4, variance16x16_c),
397 make_tuple(4, 5, variance16x32_c),
398 make_tuple(5, 4, variance32x16_c),
399 make_tuple(5, 5, variance32x32_c),
400 make_tuple(5, 6, variance32x64_c),
401 make_tuple(6, 5, variance64x32_c),
402 make_tuple(6, 6, variance64x64_c)));
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700403
James Zern6e5e75f2014-05-08 14:32:32 -0700404const vp9_subpixvariance_fn_t subpel_variance4x4_c =
405 vp9_sub_pixel_variance4x4_c;
406const vp9_subpixvariance_fn_t subpel_variance4x8_c =
407 vp9_sub_pixel_variance4x8_c;
408const vp9_subpixvariance_fn_t subpel_variance8x4_c =
409 vp9_sub_pixel_variance8x4_c;
410const vp9_subpixvariance_fn_t subpel_variance8x8_c =
411 vp9_sub_pixel_variance8x8_c;
412const vp9_subpixvariance_fn_t subpel_variance8x16_c =
413 vp9_sub_pixel_variance8x16_c;
414const vp9_subpixvariance_fn_t subpel_variance16x8_c =
415 vp9_sub_pixel_variance16x8_c;
416const vp9_subpixvariance_fn_t subpel_variance16x16_c =
417 vp9_sub_pixel_variance16x16_c;
418const vp9_subpixvariance_fn_t subpel_variance16x32_c =
419 vp9_sub_pixel_variance16x32_c;
420const vp9_subpixvariance_fn_t subpel_variance32x16_c =
421 vp9_sub_pixel_variance32x16_c;
422const vp9_subpixvariance_fn_t subpel_variance32x32_c =
423 vp9_sub_pixel_variance32x32_c;
424const vp9_subpixvariance_fn_t subpel_variance32x64_c =
425 vp9_sub_pixel_variance32x64_c;
426const vp9_subpixvariance_fn_t subpel_variance64x32_c =
427 vp9_sub_pixel_variance64x32_c;
428const vp9_subpixvariance_fn_t subpel_variance64x64_c =
429 vp9_sub_pixel_variance64x64_c;
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700430INSTANTIATE_TEST_CASE_P(
431 C, VP9SubpelVarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700432 ::testing::Values(make_tuple(2, 2, subpel_variance4x4_c),
433 make_tuple(2, 3, subpel_variance4x8_c),
434 make_tuple(3, 2, subpel_variance8x4_c),
435 make_tuple(3, 3, subpel_variance8x8_c),
436 make_tuple(3, 4, subpel_variance8x16_c),
437 make_tuple(4, 3, subpel_variance16x8_c),
438 make_tuple(4, 4, subpel_variance16x16_c),
439 make_tuple(4, 5, subpel_variance16x32_c),
440 make_tuple(5, 4, subpel_variance32x16_c),
441 make_tuple(5, 5, subpel_variance32x32_c),
442 make_tuple(5, 6, subpel_variance32x64_c),
443 make_tuple(6, 5, subpel_variance64x32_c),
444 make_tuple(6, 6, subpel_variance64x64_c)));
James Zern98473442012-11-06 16:58:11 -0800445
James Zern6e5e75f2014-05-08 14:32:32 -0700446const vp9_subp_avg_variance_fn_t subpel_avg_variance4x4_c =
447 vp9_sub_pixel_avg_variance4x4_c;
448const vp9_subp_avg_variance_fn_t subpel_avg_variance4x8_c =
449 vp9_sub_pixel_avg_variance4x8_c;
450const vp9_subp_avg_variance_fn_t subpel_avg_variance8x4_c =
451 vp9_sub_pixel_avg_variance8x4_c;
452const vp9_subp_avg_variance_fn_t subpel_avg_variance8x8_c =
453 vp9_sub_pixel_avg_variance8x8_c;
454const vp9_subp_avg_variance_fn_t subpel_avg_variance8x16_c =
455 vp9_sub_pixel_avg_variance8x16_c;
456const vp9_subp_avg_variance_fn_t subpel_avg_variance16x8_c =
457 vp9_sub_pixel_avg_variance16x8_c;
458const vp9_subp_avg_variance_fn_t subpel_avg_variance16x16_c =
459 vp9_sub_pixel_avg_variance16x16_c;
460const vp9_subp_avg_variance_fn_t subpel_avg_variance16x32_c =
461 vp9_sub_pixel_avg_variance16x32_c;
462const vp9_subp_avg_variance_fn_t subpel_avg_variance32x16_c =
463 vp9_sub_pixel_avg_variance32x16_c;
464const vp9_subp_avg_variance_fn_t subpel_avg_variance32x32_c =
465 vp9_sub_pixel_avg_variance32x32_c;
466const vp9_subp_avg_variance_fn_t subpel_avg_variance32x64_c =
467 vp9_sub_pixel_avg_variance32x64_c;
468const vp9_subp_avg_variance_fn_t subpel_avg_variance64x32_c =
469 vp9_sub_pixel_avg_variance64x32_c;
470const vp9_subp_avg_variance_fn_t subpel_avg_variance64x64_c =
471 vp9_sub_pixel_avg_variance64x64_c;
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700472INSTANTIATE_TEST_CASE_P(
473 C, VP9SubpelAvgVarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700474 ::testing::Values(make_tuple(2, 2, subpel_avg_variance4x4_c),
475 make_tuple(2, 3, subpel_avg_variance4x8_c),
476 make_tuple(3, 2, subpel_avg_variance8x4_c),
477 make_tuple(3, 3, subpel_avg_variance8x8_c),
478 make_tuple(3, 4, subpel_avg_variance8x16_c),
479 make_tuple(4, 3, subpel_avg_variance16x8_c),
480 make_tuple(4, 4, subpel_avg_variance16x16_c),
481 make_tuple(4, 5, subpel_avg_variance16x32_c),
482 make_tuple(5, 4, subpel_avg_variance32x16_c),
483 make_tuple(5, 5, subpel_avg_variance32x32_c),
484 make_tuple(5, 6, subpel_avg_variance32x64_c),
485 make_tuple(6, 5, subpel_avg_variance64x32_c),
486 make_tuple(6, 6, subpel_avg_variance64x64_c)));
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700487
James Zern98473442012-11-06 16:58:11 -0800488#if HAVE_MMX
James Zern6e5e75f2014-05-08 14:32:32 -0700489const vp9_variance_fn_t variance4x4_mmx = vp9_variance4x4_mmx;
490const vp9_variance_fn_t variance8x8_mmx = vp9_variance8x8_mmx;
491const vp9_variance_fn_t variance8x16_mmx = vp9_variance8x16_mmx;
492const vp9_variance_fn_t variance16x8_mmx = vp9_variance16x8_mmx;
493const vp9_variance_fn_t variance16x16_mmx = vp9_variance16x16_mmx;
James Zern98473442012-11-06 16:58:11 -0800494INSTANTIATE_TEST_CASE_P(
495 MMX, VP9VarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700496 ::testing::Values(make_tuple(2, 2, variance4x4_mmx),
497 make_tuple(3, 3, variance8x8_mmx),
498 make_tuple(3, 4, variance8x16_mmx),
499 make_tuple(4, 3, variance16x8_mmx),
500 make_tuple(4, 4, variance16x16_mmx)));
James Zern98473442012-11-06 16:58:11 -0800501#endif
502
503#if HAVE_SSE2
Jim Bankoski5b307882013-08-06 08:25:10 -0700504#if CONFIG_USE_X86INC
James Zern6e5e75f2014-05-08 14:32:32 -0700505const vp9_variance_fn_t variance4x4_sse2 = vp9_variance4x4_sse2;
506const vp9_variance_fn_t variance4x8_sse2 = vp9_variance4x8_sse2;
507const vp9_variance_fn_t variance8x4_sse2 = vp9_variance8x4_sse2;
508const vp9_variance_fn_t variance8x8_sse2 = vp9_variance8x8_sse2;
509const vp9_variance_fn_t variance8x16_sse2 = vp9_variance8x16_sse2;
510const vp9_variance_fn_t variance16x8_sse2 = vp9_variance16x8_sse2;
511const vp9_variance_fn_t variance16x16_sse2 = vp9_variance16x16_sse2;
512const vp9_variance_fn_t variance16x32_sse2 = vp9_variance16x32_sse2;
513const vp9_variance_fn_t variance32x16_sse2 = vp9_variance32x16_sse2;
514const vp9_variance_fn_t variance32x32_sse2 = vp9_variance32x32_sse2;
515const vp9_variance_fn_t variance32x64_sse2 = vp9_variance32x64_sse2;
516const vp9_variance_fn_t variance64x32_sse2 = vp9_variance64x32_sse2;
517const vp9_variance_fn_t variance64x64_sse2 = vp9_variance64x64_sse2;
James Zern98473442012-11-06 16:58:11 -0800518INSTANTIATE_TEST_CASE_P(
519 SSE2, VP9VarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700520 ::testing::Values(make_tuple(2, 2, variance4x4_sse2),
521 make_tuple(2, 3, variance4x8_sse2),
522 make_tuple(3, 2, variance8x4_sse2),
523 make_tuple(3, 3, variance8x8_sse2),
524 make_tuple(3, 4, variance8x16_sse2),
525 make_tuple(4, 3, variance16x8_sse2),
526 make_tuple(4, 4, variance16x16_sse2),
527 make_tuple(4, 5, variance16x32_sse2),
528 make_tuple(5, 4, variance32x16_sse2),
529 make_tuple(5, 5, variance32x32_sse2),
530 make_tuple(5, 6, variance32x64_sse2),
531 make_tuple(6, 5, variance64x32_sse2),
532 make_tuple(6, 6, variance64x64_sse2)));
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700533
James Zern6e5e75f2014-05-08 14:32:32 -0700534const vp9_subpixvariance_fn_t subpel_variance4x4_sse =
535 vp9_sub_pixel_variance4x4_sse;
536const vp9_subpixvariance_fn_t subpel_variance4x8_sse =
537 vp9_sub_pixel_variance4x8_sse;
538const vp9_subpixvariance_fn_t subpel_variance8x4_sse2 =
539 vp9_sub_pixel_variance8x4_sse2;
540const vp9_subpixvariance_fn_t subpel_variance8x8_sse2 =
541 vp9_sub_pixel_variance8x8_sse2;
542const vp9_subpixvariance_fn_t subpel_variance8x16_sse2 =
543 vp9_sub_pixel_variance8x16_sse2;
544const vp9_subpixvariance_fn_t subpel_variance16x8_sse2 =
545 vp9_sub_pixel_variance16x8_sse2;
546const vp9_subpixvariance_fn_t subpel_variance16x16_sse2 =
547 vp9_sub_pixel_variance16x16_sse2;
548const vp9_subpixvariance_fn_t subpel_variance16x32_sse2 =
549 vp9_sub_pixel_variance16x32_sse2;
550const vp9_subpixvariance_fn_t subpel_variance32x16_sse2 =
551 vp9_sub_pixel_variance32x16_sse2;
552const vp9_subpixvariance_fn_t subpel_variance32x32_sse2 =
553 vp9_sub_pixel_variance32x32_sse2;
554const vp9_subpixvariance_fn_t subpel_variance32x64_sse2 =
555 vp9_sub_pixel_variance32x64_sse2;
556const vp9_subpixvariance_fn_t subpel_variance64x32_sse2 =
557 vp9_sub_pixel_variance64x32_sse2;
558const vp9_subpixvariance_fn_t subpel_variance64x64_sse2 =
559 vp9_sub_pixel_variance64x64_sse2;
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700560INSTANTIATE_TEST_CASE_P(
561 SSE2, VP9SubpelVarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700562 ::testing::Values(make_tuple(2, 2, subpel_variance4x4_sse),
563 make_tuple(2, 3, subpel_variance4x8_sse),
564 make_tuple(3, 2, subpel_variance8x4_sse2),
565 make_tuple(3, 3, subpel_variance8x8_sse2),
566 make_tuple(3, 4, subpel_variance8x16_sse2),
567 make_tuple(4, 3, subpel_variance16x8_sse2),
568 make_tuple(4, 4, subpel_variance16x16_sse2),
569 make_tuple(4, 5, subpel_variance16x32_sse2),
570 make_tuple(5, 4, subpel_variance32x16_sse2),
571 make_tuple(5, 5, subpel_variance32x32_sse2),
572 make_tuple(5, 6, subpel_variance32x64_sse2),
573 make_tuple(6, 5, subpel_variance64x32_sse2),
574 make_tuple(6, 6, subpel_variance64x64_sse2)));
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700575
James Zern6e5e75f2014-05-08 14:32:32 -0700576const vp9_subp_avg_variance_fn_t subpel_avg_variance4x4_sse =
577 vp9_sub_pixel_avg_variance4x4_sse;
578const vp9_subp_avg_variance_fn_t subpel_avg_variance4x8_sse =
579 vp9_sub_pixel_avg_variance4x8_sse;
580const vp9_subp_avg_variance_fn_t subpel_avg_variance8x4_sse2 =
581 vp9_sub_pixel_avg_variance8x4_sse2;
582const vp9_subp_avg_variance_fn_t subpel_avg_variance8x8_sse2 =
583 vp9_sub_pixel_avg_variance8x8_sse2;
584const vp9_subp_avg_variance_fn_t subpel_avg_variance8x16_sse2 =
585 vp9_sub_pixel_avg_variance8x16_sse2;
586const vp9_subp_avg_variance_fn_t subpel_avg_variance16x8_sse2 =
587 vp9_sub_pixel_avg_variance16x8_sse2;
588const vp9_subp_avg_variance_fn_t subpel_avg_variance16x16_sse2 =
589 vp9_sub_pixel_avg_variance16x16_sse2;
590const vp9_subp_avg_variance_fn_t subpel_avg_variance16x32_sse2 =
591 vp9_sub_pixel_avg_variance16x32_sse2;
592const vp9_subp_avg_variance_fn_t subpel_avg_variance32x16_sse2 =
593 vp9_sub_pixel_avg_variance32x16_sse2;
594const vp9_subp_avg_variance_fn_t subpel_avg_variance32x32_sse2 =
595 vp9_sub_pixel_avg_variance32x32_sse2;
596const vp9_subp_avg_variance_fn_t subpel_avg_variance32x64_sse2 =
597 vp9_sub_pixel_avg_variance32x64_sse2;
598const vp9_subp_avg_variance_fn_t subpel_avg_variance64x32_sse2 =
599 vp9_sub_pixel_avg_variance64x32_sse2;
600const vp9_subp_avg_variance_fn_t subpel_avg_variance64x64_sse2 =
601 vp9_sub_pixel_avg_variance64x64_sse2;
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700602INSTANTIATE_TEST_CASE_P(
603 SSE2, VP9SubpelAvgVarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700604 ::testing::Values(make_tuple(2, 2, subpel_avg_variance4x4_sse),
605 make_tuple(2, 3, subpel_avg_variance4x8_sse),
606 make_tuple(3, 2, subpel_avg_variance8x4_sse2),
607 make_tuple(3, 3, subpel_avg_variance8x8_sse2),
608 make_tuple(3, 4, subpel_avg_variance8x16_sse2),
609 make_tuple(4, 3, subpel_avg_variance16x8_sse2),
610 make_tuple(4, 4, subpel_avg_variance16x16_sse2),
611 make_tuple(4, 5, subpel_avg_variance16x32_sse2),
612 make_tuple(5, 4, subpel_avg_variance32x16_sse2),
613 make_tuple(5, 5, subpel_avg_variance32x32_sse2),
614 make_tuple(5, 6, subpel_avg_variance32x64_sse2),
615 make_tuple(6, 5, subpel_avg_variance64x32_sse2),
616 make_tuple(6, 6, subpel_avg_variance64x64_sse2)));
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700617#endif
Jim Bankoski5b307882013-08-06 08:25:10 -0700618#endif
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700619
620#if HAVE_SSSE3
Jim Bankoski5b307882013-08-06 08:25:10 -0700621#if CONFIG_USE_X86INC
622
James Zern6e5e75f2014-05-08 14:32:32 -0700623const vp9_subpixvariance_fn_t subpel_variance4x4_ssse3 =
624 vp9_sub_pixel_variance4x4_ssse3;
625const vp9_subpixvariance_fn_t subpel_variance4x8_ssse3 =
626 vp9_sub_pixel_variance4x8_ssse3;
627const vp9_subpixvariance_fn_t subpel_variance8x4_ssse3 =
628 vp9_sub_pixel_variance8x4_ssse3;
629const vp9_subpixvariance_fn_t subpel_variance8x8_ssse3 =
630 vp9_sub_pixel_variance8x8_ssse3;
631const vp9_subpixvariance_fn_t subpel_variance8x16_ssse3 =
632 vp9_sub_pixel_variance8x16_ssse3;
633const vp9_subpixvariance_fn_t subpel_variance16x8_ssse3 =
634 vp9_sub_pixel_variance16x8_ssse3;
635const vp9_subpixvariance_fn_t subpel_variance16x16_ssse3 =
636 vp9_sub_pixel_variance16x16_ssse3;
637const vp9_subpixvariance_fn_t subpel_variance16x32_ssse3 =
638 vp9_sub_pixel_variance16x32_ssse3;
639const vp9_subpixvariance_fn_t subpel_variance32x16_ssse3 =
640 vp9_sub_pixel_variance32x16_ssse3;
641const vp9_subpixvariance_fn_t subpel_variance32x32_ssse3 =
642 vp9_sub_pixel_variance32x32_ssse3;
643const vp9_subpixvariance_fn_t subpel_variance32x64_ssse3 =
644 vp9_sub_pixel_variance32x64_ssse3;
645const vp9_subpixvariance_fn_t subpel_variance64x32_ssse3 =
646 vp9_sub_pixel_variance64x32_ssse3;
647const vp9_subpixvariance_fn_t subpel_variance64x64_ssse3 =
648 vp9_sub_pixel_variance64x64_ssse3;
Ronald S. Bultje8fb6c582013-06-20 09:34:25 -0700649INSTANTIATE_TEST_CASE_P(
650 SSSE3, VP9SubpelVarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700651 ::testing::Values(make_tuple(2, 2, subpel_variance4x4_ssse3),
652 make_tuple(2, 3, subpel_variance4x8_ssse3),
653 make_tuple(3, 2, subpel_variance8x4_ssse3),
654 make_tuple(3, 3, subpel_variance8x8_ssse3),
655 make_tuple(3, 4, subpel_variance8x16_ssse3),
656 make_tuple(4, 3, subpel_variance16x8_ssse3),
657 make_tuple(4, 4, subpel_variance16x16_ssse3),
658 make_tuple(4, 5, subpel_variance16x32_ssse3),
659 make_tuple(5, 4, subpel_variance32x16_ssse3),
660 make_tuple(5, 5, subpel_variance32x32_ssse3),
661 make_tuple(5, 6, subpel_variance32x64_ssse3),
662 make_tuple(6, 5, subpel_variance64x32_ssse3),
663 make_tuple(6, 6, subpel_variance64x64_ssse3)));
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700664
James Zern6e5e75f2014-05-08 14:32:32 -0700665const vp9_subp_avg_variance_fn_t subpel_avg_variance4x4_ssse3 =
666 vp9_sub_pixel_avg_variance4x4_ssse3;
667const vp9_subp_avg_variance_fn_t subpel_avg_variance4x8_ssse3 =
668 vp9_sub_pixel_avg_variance4x8_ssse3;
669const vp9_subp_avg_variance_fn_t subpel_avg_variance8x4_ssse3 =
670 vp9_sub_pixel_avg_variance8x4_ssse3;
671const vp9_subp_avg_variance_fn_t subpel_avg_variance8x8_ssse3 =
672 vp9_sub_pixel_avg_variance8x8_ssse3;
673const vp9_subp_avg_variance_fn_t subpel_avg_variance8x16_ssse3 =
674 vp9_sub_pixel_avg_variance8x16_ssse3;
675const vp9_subp_avg_variance_fn_t subpel_avg_variance16x8_ssse3 =
676 vp9_sub_pixel_avg_variance16x8_ssse3;
677const vp9_subp_avg_variance_fn_t subpel_avg_variance16x16_ssse3 =
678 vp9_sub_pixel_avg_variance16x16_ssse3;
679const vp9_subp_avg_variance_fn_t subpel_avg_variance16x32_ssse3 =
680 vp9_sub_pixel_avg_variance16x32_ssse3;
681const vp9_subp_avg_variance_fn_t subpel_avg_variance32x16_ssse3 =
682 vp9_sub_pixel_avg_variance32x16_ssse3;
683const vp9_subp_avg_variance_fn_t subpel_avg_variance32x32_ssse3 =
684 vp9_sub_pixel_avg_variance32x32_ssse3;
685const vp9_subp_avg_variance_fn_t subpel_avg_variance32x64_ssse3 =
686 vp9_sub_pixel_avg_variance32x64_ssse3;
687const vp9_subp_avg_variance_fn_t subpel_avg_variance64x32_ssse3 =
688 vp9_sub_pixel_avg_variance64x32_ssse3;
689const vp9_subp_avg_variance_fn_t subpel_avg_variance64x64_ssse3 =
690 vp9_sub_pixel_avg_variance64x64_ssse3;
Ronald S. Bultje1e6a32f2013-06-20 15:59:48 -0700691INSTANTIATE_TEST_CASE_P(
692 SSSE3, VP9SubpelAvgVarianceTest,
James Zern6e5e75f2014-05-08 14:32:32 -0700693 ::testing::Values(make_tuple(2, 2, subpel_avg_variance4x4_ssse3),
694 make_tuple(2, 3, subpel_avg_variance4x8_ssse3),
695 make_tuple(3, 2, subpel_avg_variance8x4_ssse3),
696 make_tuple(3, 3, subpel_avg_variance8x8_ssse3),
697 make_tuple(3, 4, subpel_avg_variance8x16_ssse3),
698 make_tuple(4, 3, subpel_avg_variance16x8_ssse3),
699 make_tuple(4, 4, subpel_avg_variance16x16_ssse3),
700 make_tuple(4, 5, subpel_avg_variance16x32_ssse3),
701 make_tuple(5, 4, subpel_avg_variance32x16_ssse3),
702 make_tuple(5, 5, subpel_avg_variance32x32_ssse3),
703 make_tuple(5, 6, subpel_avg_variance32x64_ssse3),
704 make_tuple(6, 5, subpel_avg_variance64x32_ssse3),
705 make_tuple(6, 6, subpel_avg_variance64x64_ssse3)));
James Zern98473442012-11-06 16:58:11 -0800706#endif
Jim Bankoski5b307882013-08-06 08:25:10 -0700707#endif
James Zern520cb3f2014-06-08 18:25:37 -0700708
709#if HAVE_AVX2
levytamar8269a5f5e2014-07-24 00:20:19 -0700710
James Zern520cb3f2014-06-08 18:25:37 -0700711const vp9_variance_fn_t variance16x16_avx2 = vp9_variance16x16_avx2;
712const vp9_variance_fn_t variance32x16_avx2 = vp9_variance32x16_avx2;
713const vp9_variance_fn_t variance32x32_avx2 = vp9_variance32x32_avx2;
714const vp9_variance_fn_t variance64x32_avx2 = vp9_variance64x32_avx2;
715const vp9_variance_fn_t variance64x64_avx2 = vp9_variance64x64_avx2;
716INSTANTIATE_TEST_CASE_P(
717 AVX2, VP9VarianceTest,
718 ::testing::Values(make_tuple(4, 4, variance16x16_avx2),
719 make_tuple(5, 4, variance32x16_avx2),
720 make_tuple(5, 5, variance32x32_avx2),
721 make_tuple(6, 5, variance64x32_avx2),
722 make_tuple(6, 6, variance64x64_avx2)));
723
724const vp9_subpixvariance_fn_t subpel_variance32x32_avx2 =
725 vp9_sub_pixel_variance32x32_avx2;
726const vp9_subpixvariance_fn_t subpel_variance64x64_avx2 =
727 vp9_sub_pixel_variance64x64_avx2;
728INSTANTIATE_TEST_CASE_P(
levytamar8269a5f5e2014-07-24 00:20:19 -0700729 AVX2, VP9SubpelVarianceTest,
James Zern520cb3f2014-06-08 18:25:37 -0700730 ::testing::Values(make_tuple(5, 5, subpel_variance32x32_avx2),
731 make_tuple(6, 6, subpel_variance64x64_avx2)));
732
733const vp9_subp_avg_variance_fn_t subpel_avg_variance32x32_avx2 =
734 vp9_sub_pixel_avg_variance32x32_avx2;
735const vp9_subp_avg_variance_fn_t subpel_avg_variance64x64_avx2 =
736 vp9_sub_pixel_avg_variance64x64_avx2;
737INSTANTIATE_TEST_CASE_P(
levytamar8269a5f5e2014-07-24 00:20:19 -0700738 AVX2, VP9SubpelAvgVarianceTest,
James Zern520cb3f2014-06-08 18:25:37 -0700739 ::testing::Values(make_tuple(5, 5, subpel_avg_variance32x32_avx2),
740 make_tuple(6, 6, subpel_avg_variance64x64_avx2)));
741#endif // HAVE_AVX2
Scott LaVarnway521cf7e2014-07-29 16:47:34 -0700742#if HAVE_NEON
Scott LaVarnway98165ec2014-08-01 11:35:55 -0700743const vp9_variance_fn_t variance8x8_neon = vp9_variance8x8_neon;
Scott LaVarnway521cf7e2014-07-29 16:47:34 -0700744const vp9_variance_fn_t variance16x16_neon = vp9_variance16x16_neon;
Scott LaVarnwayd39448e2014-07-31 08:00:36 -0700745const vp9_variance_fn_t variance32x32_neon = vp9_variance32x32_neon;
Scott LaVarnway521cf7e2014-07-29 16:47:34 -0700746INSTANTIATE_TEST_CASE_P(
747 NEON, VP9VarianceTest,
Scott LaVarnway98165ec2014-08-01 11:35:55 -0700748 ::testing::Values(make_tuple(3, 3, variance8x8_neon),
749 make_tuple(4, 4, variance16x16_neon),
Scott LaVarnwayd39448e2014-07-31 08:00:36 -0700750 make_tuple(5, 5, variance32x32_neon)));
Scott LaVarnway521cf7e2014-07-29 16:47:34 -0700751
Scott LaVarnway98165ec2014-08-01 11:35:55 -0700752const vp9_subpixvariance_fn_t subpel_variance8x8_neon =
753 vp9_sub_pixel_variance8x8_neon;
Scott LaVarnway521cf7e2014-07-29 16:47:34 -0700754const vp9_subpixvariance_fn_t subpel_variance16x16_neon =
755 vp9_sub_pixel_variance16x16_neon;
Scott LaVarnwayd39448e2014-07-31 08:00:36 -0700756const vp9_subpixvariance_fn_t subpel_variance32x32_neon =
757 vp9_sub_pixel_variance32x32_neon;
Scott LaVarnway521cf7e2014-07-29 16:47:34 -0700758INSTANTIATE_TEST_CASE_P(
759 NEON, VP9SubpelVarianceTest,
Scott LaVarnway98165ec2014-08-01 11:35:55 -0700760 ::testing::Values(make_tuple(3, 3, subpel_variance8x8_neon),
761 make_tuple(4, 4, subpel_variance16x16_neon),
Scott LaVarnwayd39448e2014-07-31 08:00:36 -0700762 make_tuple(5, 5, subpel_variance32x32_neon)));
Scott LaVarnway521cf7e2014-07-29 16:47:34 -0700763#endif // HAVE_NEON
James Zern1711cf22013-02-22 16:23:36 -0800764#endif // CONFIG_VP9_ENCODER
765
766} // namespace vp9
767
James Zern98473442012-11-06 16:58:11 -0800768} // namespace