Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2015 The WebM project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
| 11 | #include <math.h> |
| 12 | #include <stdlib.h> |
| 13 | #include <string.h> |
| 14 | |
| 15 | #include "third_party/googletest/src/include/gtest/gtest.h" |
| 16 | #include "test/acm_random.h" |
| 17 | #include "test/clear_system_state.h" |
| 18 | #include "test/register_state_check.h" |
| 19 | #include "test/util.h" |
| 20 | |
| 21 | #include "./vpx_config.h" |
| 22 | #include "./vpx_dsp_rtcd.h" |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 23 | #include "vpx/vpx_codec.h" |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 24 | #include "vpx/vpx_integer.h" |
| 25 | #include "vpx_dsp/vpx_filter.h" |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 26 | #include "vpx_mem/vpx_mem.h" |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 27 | |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 28 | using libvpx_test::ACMRandom; |
| 29 | |
| 30 | namespace { |
| 31 | const int number_of_iterations = 500; |
| 32 | |
| 33 | typedef unsigned int (*MaskedVarianceFunc)(const uint8_t *a, int a_stride, |
| 34 | const uint8_t *b, int b_stride, |
| 35 | const uint8_t *m, int m_stride, |
| 36 | unsigned int *sse); |
| 37 | |
| 38 | typedef std::tr1::tuple<MaskedVarianceFunc, |
| 39 | MaskedVarianceFunc> MaskedVarianceParam; |
| 40 | |
| 41 | class MaskedVarianceTest : |
| 42 | public ::testing::TestWithParam<MaskedVarianceParam> { |
| 43 | public: |
| 44 | virtual ~MaskedVarianceTest() {} |
| 45 | virtual void SetUp() { |
| 46 | opt_func_ = GET_PARAM(0); |
| 47 | ref_func_ = GET_PARAM(1); |
| 48 | } |
| 49 | |
| 50 | virtual void TearDown() { libvpx_test::ClearSystemState(); } |
| 51 | |
| 52 | protected: |
| 53 | MaskedVarianceFunc opt_func_; |
| 54 | MaskedVarianceFunc ref_func_; |
| 55 | }; |
| 56 | |
| 57 | TEST_P(MaskedVarianceTest, OperationCheck) { |
| 58 | unsigned int ref_ret, opt_ret; |
| 59 | unsigned int ref_sse, opt_sse; |
| 60 | ACMRandom rnd(ACMRandom::DeterministicSeed()); |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 61 | DECLARE_ALIGNED(16, uint8_t, src_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
| 62 | DECLARE_ALIGNED(16, uint8_t, ref_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
| 63 | DECLARE_ALIGNED(16, uint8_t, msk_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 64 | int err_count = 0; |
| 65 | int first_failure = -1; |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 66 | int src_stride = MAX_SB_SIZE; |
| 67 | int ref_stride = MAX_SB_SIZE; |
| 68 | int msk_stride = MAX_SB_SIZE; |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 69 | |
| 70 | for (int i = 0; i < number_of_iterations; ++i) { |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 71 | for (int j = 0; j < MAX_SB_SIZE*MAX_SB_SIZE; j++) { |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 72 | src_ptr[j] = rnd.Rand8(); |
| 73 | ref_ptr[j] = rnd.Rand8(); |
| 74 | msk_ptr[j] = rnd(65); |
| 75 | } |
| 76 | |
| 77 | ref_ret = ref_func_(src_ptr, src_stride, |
| 78 | ref_ptr, ref_stride, |
| 79 | msk_ptr, msk_stride, |
| 80 | &ref_sse); |
| 81 | ASM_REGISTER_STATE_CHECK(opt_ret = opt_func_(src_ptr, src_stride, |
| 82 | ref_ptr, ref_stride, |
| 83 | msk_ptr, msk_stride, |
| 84 | &opt_sse)); |
| 85 | |
| 86 | if (opt_ret != ref_ret || opt_sse != ref_sse) { |
| 87 | err_count++; |
| 88 | if (first_failure == -1) |
| 89 | first_failure = i; |
| 90 | } |
| 91 | } |
| 92 | |
| 93 | EXPECT_EQ(0, err_count) |
| 94 | << "Error: Masked Variance Test OperationCheck," |
| 95 | << "C output doesn't match SSSE3 output. " |
| 96 | << "First failed at test case " << first_failure; |
| 97 | } |
| 98 | |
| 99 | TEST_P(MaskedVarianceTest, ExtremeValues) { |
| 100 | unsigned int ref_ret, opt_ret; |
| 101 | unsigned int ref_sse, opt_sse; |
| 102 | ACMRandom rnd(ACMRandom::DeterministicSeed()); |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 103 | DECLARE_ALIGNED(16, uint8_t, src_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
| 104 | DECLARE_ALIGNED(16, uint8_t, ref_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
| 105 | DECLARE_ALIGNED(16, uint8_t, msk_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 106 | int err_count = 0; |
| 107 | int first_failure = -1; |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 108 | int src_stride = MAX_SB_SIZE; |
| 109 | int ref_stride = MAX_SB_SIZE; |
| 110 | int msk_stride = MAX_SB_SIZE; |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 111 | |
| 112 | for (int i = 0; i < 8; ++i) { |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 113 | memset(src_ptr, (i & 0x1) ? 255 : 0, MAX_SB_SIZE*MAX_SB_SIZE); |
| 114 | memset(ref_ptr, (i & 0x2) ? 255 : 0, MAX_SB_SIZE*MAX_SB_SIZE); |
| 115 | memset(msk_ptr, (i & 0x4) ? 64 : 0, MAX_SB_SIZE*MAX_SB_SIZE); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 116 | |
| 117 | ref_ret = ref_func_(src_ptr, src_stride, |
| 118 | ref_ptr, ref_stride, |
| 119 | msk_ptr, msk_stride, |
| 120 | &ref_sse); |
| 121 | ASM_REGISTER_STATE_CHECK(opt_ret = opt_func_(src_ptr, src_stride, |
| 122 | ref_ptr, ref_stride, |
| 123 | msk_ptr, msk_stride, |
| 124 | &opt_sse)); |
| 125 | |
| 126 | if (opt_ret != ref_ret || opt_sse != ref_sse) { |
| 127 | err_count++; |
| 128 | if (first_failure == -1) |
| 129 | first_failure = i; |
| 130 | } |
| 131 | } |
| 132 | |
| 133 | EXPECT_EQ(0, err_count) |
| 134 | << "Error: Masked Variance Test ExtremeValues," |
| 135 | << "C output doesn't match SSSE3 output. " |
| 136 | << "First failed at test case " << first_failure; |
| 137 | } |
| 138 | |
| 139 | typedef unsigned int (*MaskedSubPixelVarianceFunc)( |
| 140 | const uint8_t *a, int a_stride, |
| 141 | int xoffset, int yoffset, |
| 142 | const uint8_t *b, int b_stride, |
| 143 | const uint8_t *m, int m_stride, |
| 144 | unsigned int *sse); |
| 145 | |
| 146 | typedef std::tr1::tuple<MaskedSubPixelVarianceFunc, |
| 147 | MaskedSubPixelVarianceFunc> MaskedSubPixelVarianceParam; |
| 148 | |
| 149 | class MaskedSubPixelVarianceTest : |
| 150 | public ::testing::TestWithParam<MaskedSubPixelVarianceParam> { |
| 151 | public: |
| 152 | virtual ~MaskedSubPixelVarianceTest() {} |
| 153 | virtual void SetUp() { |
| 154 | opt_func_ = GET_PARAM(0); |
| 155 | ref_func_ = GET_PARAM(1); |
| 156 | } |
| 157 | |
| 158 | virtual void TearDown() { libvpx_test::ClearSystemState(); } |
| 159 | |
| 160 | protected: |
| 161 | MaskedSubPixelVarianceFunc opt_func_; |
| 162 | MaskedSubPixelVarianceFunc ref_func_; |
| 163 | }; |
| 164 | |
| 165 | TEST_P(MaskedSubPixelVarianceTest, OperationCheck) { |
| 166 | unsigned int ref_ret, opt_ret; |
| 167 | unsigned int ref_sse, opt_sse; |
| 168 | ACMRandom rnd(ACMRandom::DeterministicSeed()); |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 169 | DECLARE_ALIGNED(16, uint8_t, src_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
| 170 | DECLARE_ALIGNED(16, uint8_t, ref_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
| 171 | DECLARE_ALIGNED(16, uint8_t, msk_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 172 | int err_count = 0; |
| 173 | int first_failure = -1; |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 174 | int src_stride = (MAX_SB_SIZE+1); |
| 175 | int ref_stride = (MAX_SB_SIZE+1); |
| 176 | int msk_stride = (MAX_SB_SIZE+1); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 177 | int xoffset; |
| 178 | int yoffset; |
| 179 | |
| 180 | for (int i = 0; i < number_of_iterations; ++i) { |
| 181 | int xoffsets[] = {0, 4, rnd(BIL_SUBPEL_SHIFTS)}; |
| 182 | int yoffsets[] = {0, 4, rnd(BIL_SUBPEL_SHIFTS)}; |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 183 | for (int j = 0; j < (MAX_SB_SIZE+1)*(MAX_SB_SIZE+1); j++) { |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 184 | src_ptr[j] = rnd.Rand8(); |
| 185 | ref_ptr[j] = rnd.Rand8(); |
| 186 | msk_ptr[j] = rnd(65); |
| 187 | } |
| 188 | for (int k = 0; k < 3; k++) { |
| 189 | xoffset = xoffsets[k]; |
| 190 | for (int l = 0; l < 3; l++) { |
| 191 | xoffset = xoffsets[k]; |
| 192 | yoffset = yoffsets[l]; |
| 193 | |
| 194 | ref_ret = ref_func_(src_ptr, src_stride, |
| 195 | xoffset, yoffset, |
| 196 | ref_ptr, ref_stride, |
| 197 | msk_ptr, msk_stride, |
| 198 | &ref_sse); |
| 199 | ASM_REGISTER_STATE_CHECK(opt_ret = opt_func_(src_ptr, src_stride, |
| 200 | xoffset, yoffset, |
| 201 | ref_ptr, ref_stride, |
| 202 | msk_ptr, msk_stride, |
| 203 | &opt_sse)); |
| 204 | |
| 205 | if (opt_ret != ref_ret || opt_sse != ref_sse) { |
| 206 | err_count++; |
| 207 | if (first_failure == -1) |
| 208 | first_failure = i; |
| 209 | } |
| 210 | } |
| 211 | } |
| 212 | } |
| 213 | |
| 214 | EXPECT_EQ(0, err_count) |
| 215 | << "Error: Masked Sub Pixel Variance Test OperationCheck," |
| 216 | << "C output doesn't match SSSE3 output. " |
| 217 | << "First failed at test case " << first_failure; |
| 218 | } |
| 219 | |
| 220 | TEST_P(MaskedSubPixelVarianceTest, ExtremeValues) { |
| 221 | unsigned int ref_ret, opt_ret; |
| 222 | unsigned int ref_sse, opt_sse; |
| 223 | ACMRandom rnd(ACMRandom::DeterministicSeed()); |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 224 | DECLARE_ALIGNED(16, uint8_t, src_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
| 225 | DECLARE_ALIGNED(16, uint8_t, ref_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
| 226 | DECLARE_ALIGNED(16, uint8_t, msk_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 227 | int first_failure_x = -1; |
| 228 | int first_failure_y = -1; |
| 229 | int err_count = 0; |
| 230 | int first_failure = -1; |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 231 | int src_stride = (MAX_SB_SIZE+1); |
| 232 | int ref_stride = (MAX_SB_SIZE+1); |
| 233 | int msk_stride = (MAX_SB_SIZE+1); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 234 | |
| 235 | for (int xoffset = 0 ; xoffset < BIL_SUBPEL_SHIFTS ; xoffset++) { |
| 236 | for (int yoffset = 0 ; yoffset < BIL_SUBPEL_SHIFTS ; yoffset++) { |
| 237 | for (int i = 0; i < 8; ++i) { |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 238 | memset(src_ptr, (i & 0x1) ? 255 : 0, (MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)); |
| 239 | memset(ref_ptr, (i & 0x2) ? 255 : 0, (MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)); |
| 240 | memset(msk_ptr, (i & 0x4) ? 64 : 0, (MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 241 | |
| 242 | ref_ret = ref_func_(src_ptr, src_stride, |
| 243 | xoffset, yoffset, |
| 244 | ref_ptr, ref_stride, |
| 245 | msk_ptr, msk_stride, |
| 246 | &ref_sse); |
| 247 | ASM_REGISTER_STATE_CHECK(opt_ret = opt_func_(src_ptr, src_stride, |
| 248 | xoffset, yoffset, |
| 249 | ref_ptr, ref_stride, |
| 250 | msk_ptr, msk_stride, |
| 251 | &opt_sse)); |
| 252 | |
| 253 | if (opt_ret != ref_ret || opt_sse != ref_sse) { |
| 254 | err_count++; |
| 255 | if (first_failure == -1) { |
| 256 | first_failure = i; |
| 257 | first_failure_x = xoffset; |
| 258 | first_failure_y = yoffset; |
| 259 | } |
| 260 | } |
| 261 | } |
| 262 | } |
| 263 | } |
| 264 | |
| 265 | EXPECT_EQ(0, err_count) |
| 266 | << "Error: Masked Variance Test ExtremeValues," |
| 267 | << "C output doesn't match SSSE3 output. " |
| 268 | << "First failed at test case " << first_failure |
| 269 | << " x_offset = " << first_failure_x |
| 270 | << " y_offset = " << first_failure_y; |
| 271 | } |
| 272 | |
| 273 | #if CONFIG_VP9_HIGHBITDEPTH |
| 274 | typedef std::tr1::tuple<MaskedVarianceFunc, |
| 275 | MaskedVarianceFunc, |
| 276 | vpx_bit_depth_t> HighbdMaskedVarianceParam; |
| 277 | |
| 278 | class HighbdMaskedVarianceTest : |
| 279 | public ::testing::TestWithParam<HighbdMaskedVarianceParam> { |
| 280 | public: |
| 281 | virtual ~HighbdMaskedVarianceTest() {} |
| 282 | virtual void SetUp() { |
| 283 | opt_func_ = GET_PARAM(0); |
| 284 | ref_func_ = GET_PARAM(1); |
| 285 | bit_depth_ = GET_PARAM(2); |
| 286 | } |
| 287 | |
| 288 | virtual void TearDown() { libvpx_test::ClearSystemState(); } |
| 289 | |
| 290 | protected: |
| 291 | MaskedVarianceFunc opt_func_; |
| 292 | MaskedVarianceFunc ref_func_; |
| 293 | vpx_bit_depth_t bit_depth_; |
| 294 | }; |
| 295 | |
| 296 | TEST_P(HighbdMaskedVarianceTest, OperationCheck) { |
| 297 | unsigned int ref_ret, opt_ret; |
| 298 | unsigned int ref_sse, opt_sse; |
| 299 | ACMRandom rnd(ACMRandom::DeterministicSeed()); |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 300 | DECLARE_ALIGNED(16, uint16_t, src_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
| 301 | DECLARE_ALIGNED(16, uint16_t, ref_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
| 302 | DECLARE_ALIGNED(16, uint8_t, msk_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 303 | uint8_t* src8_ptr = CONVERT_TO_BYTEPTR(src_ptr); |
| 304 | uint8_t* ref8_ptr = CONVERT_TO_BYTEPTR(ref_ptr); |
| 305 | int err_count = 0; |
| 306 | int first_failure = -1; |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 307 | int src_stride = MAX_SB_SIZE; |
| 308 | int ref_stride = MAX_SB_SIZE; |
| 309 | int msk_stride = MAX_SB_SIZE; |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 310 | |
| 311 | for (int i = 0; i < number_of_iterations; ++i) { |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 312 | for (int j = 0; j < MAX_SB_SIZE*MAX_SB_SIZE; j++) { |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 313 | src_ptr[j] = rnd.Rand16() & ((1 << bit_depth_) - 1); |
| 314 | ref_ptr[j] = rnd.Rand16() & ((1 << bit_depth_) - 1); |
| 315 | msk_ptr[j] = rnd(65); |
| 316 | } |
| 317 | |
| 318 | ref_ret = ref_func_(src8_ptr, src_stride, |
| 319 | ref8_ptr, ref_stride, |
| 320 | msk_ptr, msk_stride, |
| 321 | &ref_sse); |
| 322 | ASM_REGISTER_STATE_CHECK(opt_ret = opt_func_(src8_ptr, src_stride, |
| 323 | ref8_ptr, ref_stride, |
| 324 | msk_ptr, msk_stride, |
| 325 | &opt_sse)); |
| 326 | |
| 327 | if (opt_ret != ref_ret || opt_sse != ref_sse) { |
| 328 | err_count++; |
| 329 | if (first_failure == -1) |
| 330 | first_failure = i; |
| 331 | } |
| 332 | } |
| 333 | |
| 334 | EXPECT_EQ(0, err_count) |
| 335 | << "Error: Masked Variance Test OperationCheck," |
| 336 | << "C output doesn't match SSSE3 output. " |
| 337 | << "First failed at test case " << first_failure; |
| 338 | } |
| 339 | |
| 340 | TEST_P(HighbdMaskedVarianceTest, ExtremeValues) { |
| 341 | unsigned int ref_ret, opt_ret; |
| 342 | unsigned int ref_sse, opt_sse; |
| 343 | ACMRandom rnd(ACMRandom::DeterministicSeed()); |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 344 | DECLARE_ALIGNED(16, uint16_t, src_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
| 345 | DECLARE_ALIGNED(16, uint16_t, ref_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
| 346 | DECLARE_ALIGNED(16, uint8_t, msk_ptr[MAX_SB_SIZE*MAX_SB_SIZE]); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 347 | uint8_t* src8_ptr = CONVERT_TO_BYTEPTR(src_ptr); |
| 348 | uint8_t* ref8_ptr = CONVERT_TO_BYTEPTR(ref_ptr); |
| 349 | int err_count = 0; |
| 350 | int first_failure = -1; |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 351 | int src_stride = MAX_SB_SIZE; |
| 352 | int ref_stride = MAX_SB_SIZE; |
| 353 | int msk_stride = MAX_SB_SIZE; |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 354 | |
| 355 | for (int i = 0; i < 8; ++i) { |
| 356 | vpx_memset16(src_ptr, (i & 0x1) ? ((1 << bit_depth_) - 1) : 0, |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 357 | MAX_SB_SIZE*MAX_SB_SIZE); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 358 | vpx_memset16(ref_ptr, (i & 0x2) ? ((1 << bit_depth_) - 1) : 0, |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 359 | MAX_SB_SIZE*MAX_SB_SIZE); |
| 360 | memset(msk_ptr, (i & 0x4) ? 64 : 0, MAX_SB_SIZE*MAX_SB_SIZE); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 361 | |
| 362 | ref_ret = ref_func_(src8_ptr, src_stride, |
| 363 | ref8_ptr, ref_stride, |
| 364 | msk_ptr, msk_stride, |
| 365 | &ref_sse); |
| 366 | ASM_REGISTER_STATE_CHECK(opt_ret = opt_func_(src8_ptr, src_stride, |
| 367 | ref8_ptr, ref_stride, |
| 368 | msk_ptr, msk_stride, |
| 369 | &opt_sse)); |
| 370 | |
| 371 | if (opt_ret != ref_ret || opt_sse != ref_sse) { |
| 372 | err_count++; |
| 373 | if (first_failure == -1) |
| 374 | first_failure = i; |
| 375 | } |
| 376 | } |
| 377 | |
| 378 | EXPECT_EQ(0, err_count) |
| 379 | << "Error: Masked Variance Test ExtremeValues," |
| 380 | << "C output doesn't match SSSE3 output. " |
| 381 | << "First failed at test case " << first_failure; |
| 382 | } |
| 383 | |
| 384 | typedef std::tr1::tuple<MaskedSubPixelVarianceFunc, |
| 385 | MaskedSubPixelVarianceFunc, |
| 386 | vpx_bit_depth_t> HighbdMaskedSubPixelVarianceParam; |
| 387 | |
| 388 | class HighbdMaskedSubPixelVarianceTest : |
| 389 | public ::testing::TestWithParam<HighbdMaskedSubPixelVarianceParam> { |
| 390 | public: |
| 391 | virtual ~HighbdMaskedSubPixelVarianceTest() {} |
| 392 | virtual void SetUp() { |
| 393 | opt_func_ = GET_PARAM(0); |
| 394 | ref_func_ = GET_PARAM(1); |
| 395 | bit_depth_ = GET_PARAM(2); |
| 396 | } |
| 397 | |
| 398 | virtual void TearDown() { libvpx_test::ClearSystemState(); } |
| 399 | |
| 400 | protected: |
| 401 | MaskedSubPixelVarianceFunc opt_func_; |
| 402 | MaskedSubPixelVarianceFunc ref_func_; |
| 403 | vpx_bit_depth_t bit_depth_; |
| 404 | }; |
| 405 | |
| 406 | TEST_P(HighbdMaskedSubPixelVarianceTest, OperationCheck) { |
| 407 | unsigned int ref_ret, opt_ret; |
| 408 | unsigned int ref_sse, opt_sse; |
| 409 | ACMRandom rnd(ACMRandom::DeterministicSeed()); |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 410 | DECLARE_ALIGNED(16, uint16_t, src_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
| 411 | DECLARE_ALIGNED(16, uint16_t, ref_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
| 412 | DECLARE_ALIGNED(16, uint8_t, msk_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 413 | uint8_t* src8_ptr = CONVERT_TO_BYTEPTR(src_ptr); |
| 414 | uint8_t* ref8_ptr = CONVERT_TO_BYTEPTR(ref_ptr); |
| 415 | int err_count = 0; |
| 416 | int first_failure = -1; |
| 417 | int first_failure_x = -1; |
| 418 | int first_failure_y = -1; |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 419 | int src_stride = (MAX_SB_SIZE+1); |
| 420 | int ref_stride = (MAX_SB_SIZE+1); |
| 421 | int msk_stride = (MAX_SB_SIZE+1); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 422 | int xoffset, yoffset; |
| 423 | |
| 424 | for (int i = 0; i < number_of_iterations; ++i) { |
| 425 | for (xoffset = 0; xoffset < BIL_SUBPEL_SHIFTS; xoffset++) { |
| 426 | for (yoffset = 0; yoffset < BIL_SUBPEL_SHIFTS; yoffset++) { |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 427 | for (int j = 0; j < (MAX_SB_SIZE+1)*(MAX_SB_SIZE+1); j++) { |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 428 | src_ptr[j] = rnd.Rand16() & ((1 << bit_depth_) - 1); |
| 429 | ref_ptr[j] = rnd.Rand16() & ((1 << bit_depth_) - 1); |
| 430 | msk_ptr[j] = rnd(65); |
| 431 | } |
| 432 | |
| 433 | ref_ret = ref_func_(src8_ptr, src_stride, |
| 434 | xoffset, yoffset, |
| 435 | ref8_ptr, ref_stride, |
| 436 | msk_ptr, msk_stride, |
| 437 | &ref_sse); |
| 438 | ASM_REGISTER_STATE_CHECK(opt_ret = opt_func_(src8_ptr, src_stride, |
| 439 | xoffset, yoffset, |
| 440 | ref8_ptr, ref_stride, |
| 441 | msk_ptr, msk_stride, |
| 442 | &opt_sse)); |
| 443 | |
| 444 | if (opt_ret != ref_ret || opt_sse != ref_sse) { |
| 445 | err_count++; |
| 446 | if (first_failure == -1) { |
| 447 | first_failure = i; |
| 448 | first_failure_x = xoffset; |
| 449 | first_failure_y = yoffset; |
| 450 | } |
| 451 | } |
| 452 | } |
| 453 | } |
| 454 | } |
| 455 | |
| 456 | EXPECT_EQ(0, err_count) |
| 457 | << "Error: Masked Sub Pixel Variance Test OperationCheck," |
| 458 | << "C output doesn't match SSSE3 output. " |
| 459 | << "First failed at test case " << first_failure |
| 460 | << " x_offset = " << first_failure_x |
| 461 | << " y_offset = " << first_failure_y; |
| 462 | } |
| 463 | |
| 464 | TEST_P(HighbdMaskedSubPixelVarianceTest, ExtremeValues) { |
| 465 | unsigned int ref_ret, opt_ret; |
| 466 | unsigned int ref_sse, opt_sse; |
| 467 | ACMRandom rnd(ACMRandom::DeterministicSeed()); |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 468 | DECLARE_ALIGNED(16, uint16_t, src_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
| 469 | DECLARE_ALIGNED(16, uint16_t, ref_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
| 470 | DECLARE_ALIGNED(16, uint8_t, msk_ptr[(MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)]); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 471 | uint8_t* src8_ptr = CONVERT_TO_BYTEPTR(src_ptr); |
| 472 | uint8_t* ref8_ptr = CONVERT_TO_BYTEPTR(ref_ptr); |
| 473 | int first_failure_x = -1; |
| 474 | int first_failure_y = -1; |
| 475 | int err_count = 0; |
| 476 | int first_failure = -1; |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 477 | int src_stride = (MAX_SB_SIZE+1); |
| 478 | int ref_stride = (MAX_SB_SIZE+1); |
| 479 | int msk_stride = (MAX_SB_SIZE+1); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 480 | |
| 481 | for (int xoffset = 0 ; xoffset < BIL_SUBPEL_SHIFTS ; xoffset++) { |
| 482 | for (int yoffset = 0 ; yoffset < BIL_SUBPEL_SHIFTS ; yoffset++) { |
| 483 | for (int i = 0; i < 8; ++i) { |
| 484 | vpx_memset16(src_ptr, (i & 0x1) ? ((1 << bit_depth_) - 1) : 0, |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 485 | (MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 486 | vpx_memset16(ref_ptr, (i & 0x2) ? ((1 << bit_depth_) - 1) : 0, |
Geza Lore | 552d5cd | 2016-03-07 13:46:39 +0000 | [diff] [blame^] | 487 | (MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)); |
| 488 | memset(msk_ptr, (i & 0x4) ? 64 : 0, (MAX_SB_SIZE+1)*(MAX_SB_SIZE+1)); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 489 | |
| 490 | ref_ret = ref_func_(src8_ptr, src_stride, |
| 491 | xoffset, yoffset, |
| 492 | ref8_ptr, ref_stride, |
| 493 | msk_ptr, msk_stride, |
| 494 | &ref_sse); |
| 495 | ASM_REGISTER_STATE_CHECK(opt_ret = opt_func_(src8_ptr, src_stride, |
| 496 | xoffset, yoffset, |
| 497 | ref8_ptr, ref_stride, |
| 498 | msk_ptr, msk_stride, |
| 499 | &opt_sse)); |
| 500 | |
| 501 | if (opt_ret != ref_ret || opt_sse != ref_sse) { |
| 502 | err_count++; |
| 503 | if (first_failure == -1) { |
| 504 | first_failure = i; |
| 505 | first_failure_x = xoffset; |
| 506 | first_failure_y = yoffset; |
| 507 | } |
| 508 | } |
| 509 | } |
| 510 | } |
| 511 | } |
| 512 | |
| 513 | EXPECT_EQ(0, err_count) |
| 514 | << "Error: Masked Variance Test ExtremeValues," |
| 515 | << "C output doesn't match SSSE3 output. " |
| 516 | << "First failed at test case " << first_failure |
| 517 | << " x_offset = " << first_failure_x |
| 518 | << " y_offset = " << first_failure_y; |
| 519 | } |
| 520 | #endif // CONFIG_VP9_HIGHBITDEPTH |
| 521 | |
| 522 | using std::tr1::make_tuple; |
| 523 | |
| 524 | #if HAVE_SSSE3 |
| 525 | INSTANTIATE_TEST_CASE_P( |
| 526 | SSSE3_C_COMPARE, MaskedVarianceTest, |
| 527 | ::testing::Values( |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 528 | #if CONFIG_EXT_PARTITION |
| 529 | make_tuple(&vpx_masked_variance128x128_ssse3, |
| 530 | &vpx_masked_variance128x128_c), |
| 531 | make_tuple(&vpx_masked_variance128x64_ssse3, |
| 532 | &vpx_masked_variance128x64_c), |
| 533 | make_tuple(&vpx_masked_variance64x128_ssse3, |
| 534 | &vpx_masked_variance64x128_c), |
| 535 | #endif // CONFIG_EXT_PARTITION |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 536 | make_tuple(&vpx_masked_variance64x64_ssse3, |
| 537 | &vpx_masked_variance64x64_c), |
| 538 | make_tuple(&vpx_masked_variance64x32_ssse3, |
| 539 | &vpx_masked_variance64x32_c), |
| 540 | make_tuple(&vpx_masked_variance32x64_ssse3, |
| 541 | &vpx_masked_variance32x64_c), |
| 542 | make_tuple(&vpx_masked_variance32x32_ssse3, |
| 543 | &vpx_masked_variance32x32_c), |
| 544 | make_tuple(&vpx_masked_variance32x16_ssse3, |
| 545 | &vpx_masked_variance32x16_c), |
| 546 | make_tuple(&vpx_masked_variance16x32_ssse3, |
| 547 | &vpx_masked_variance16x32_c), |
| 548 | make_tuple(&vpx_masked_variance16x16_ssse3, |
| 549 | &vpx_masked_variance16x16_c), |
| 550 | make_tuple(&vpx_masked_variance16x8_ssse3, |
| 551 | &vpx_masked_variance16x8_c), |
| 552 | make_tuple(&vpx_masked_variance8x16_ssse3, |
| 553 | &vpx_masked_variance8x16_c), |
| 554 | make_tuple(&vpx_masked_variance8x8_ssse3, |
| 555 | &vpx_masked_variance8x8_c), |
| 556 | make_tuple(&vpx_masked_variance8x4_ssse3, |
| 557 | &vpx_masked_variance8x4_c), |
| 558 | make_tuple(&vpx_masked_variance4x8_ssse3, |
| 559 | &vpx_masked_variance4x8_c), |
| 560 | make_tuple(&vpx_masked_variance4x4_ssse3, |
| 561 | &vpx_masked_variance4x4_c))); |
| 562 | |
| 563 | INSTANTIATE_TEST_CASE_P( |
| 564 | SSSE3_C_COMPARE, MaskedSubPixelVarianceTest, |
| 565 | ::testing::Values( |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 566 | #if CONFIG_EXT_PARTITION |
| 567 | make_tuple(&vpx_masked_sub_pixel_variance128x128_ssse3, |
| 568 | &vpx_masked_sub_pixel_variance128x128_c), |
| 569 | make_tuple(&vpx_masked_sub_pixel_variance128x64_ssse3, |
| 570 | &vpx_masked_sub_pixel_variance128x64_c), |
| 571 | make_tuple(&vpx_masked_sub_pixel_variance64x128_ssse3, |
| 572 | &vpx_masked_sub_pixel_variance64x128_c), |
| 573 | #endif // CONFIG_EXT_PARTITION |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 574 | make_tuple(&vpx_masked_sub_pixel_variance64x64_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 575 | &vpx_masked_sub_pixel_variance64x64_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 576 | make_tuple(&vpx_masked_sub_pixel_variance64x32_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 577 | &vpx_masked_sub_pixel_variance64x32_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 578 | make_tuple(&vpx_masked_sub_pixel_variance32x64_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 579 | &vpx_masked_sub_pixel_variance32x64_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 580 | make_tuple(&vpx_masked_sub_pixel_variance32x32_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 581 | &vpx_masked_sub_pixel_variance32x32_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 582 | make_tuple(&vpx_masked_sub_pixel_variance32x16_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 583 | &vpx_masked_sub_pixel_variance32x16_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 584 | make_tuple(&vpx_masked_sub_pixel_variance16x32_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 585 | &vpx_masked_sub_pixel_variance16x32_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 586 | make_tuple(&vpx_masked_sub_pixel_variance16x16_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 587 | &vpx_masked_sub_pixel_variance16x16_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 588 | make_tuple(&vpx_masked_sub_pixel_variance16x8_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 589 | &vpx_masked_sub_pixel_variance16x8_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 590 | make_tuple(&vpx_masked_sub_pixel_variance8x16_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 591 | &vpx_masked_sub_pixel_variance8x16_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 592 | make_tuple(&vpx_masked_sub_pixel_variance8x8_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 593 | &vpx_masked_sub_pixel_variance8x8_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 594 | make_tuple(&vpx_masked_sub_pixel_variance8x4_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 595 | &vpx_masked_sub_pixel_variance8x4_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 596 | make_tuple(&vpx_masked_sub_pixel_variance4x8_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 597 | &vpx_masked_sub_pixel_variance4x8_c), |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 598 | make_tuple(&vpx_masked_sub_pixel_variance4x4_ssse3, |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 599 | &vpx_masked_sub_pixel_variance4x4_c))); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 600 | |
| 601 | #if CONFIG_VP9_HIGHBITDEPTH |
| 602 | INSTANTIATE_TEST_CASE_P( |
| 603 | SSSE3_C_COMPARE, HighbdMaskedVarianceTest, |
| 604 | ::testing::Values( |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 605 | #if CONFIG_EXT_PARTITION |
| 606 | make_tuple(&vpx_highbd_masked_variance128x128_ssse3, |
| 607 | &vpx_highbd_masked_variance128x128_c, VPX_BITS_8), |
| 608 | make_tuple(&vpx_highbd_masked_variance128x64_ssse3, |
| 609 | &vpx_highbd_masked_variance128x64_c, VPX_BITS_8), |
| 610 | make_tuple(&vpx_highbd_masked_variance64x128_ssse3, |
| 611 | &vpx_highbd_masked_variance64x128_c, VPX_BITS_8), |
| 612 | #endif // CONFIG_EXT_PARTITION |
| 613 | make_tuple(&vpx_highbd_masked_variance64x64_ssse3, |
| 614 | &vpx_highbd_masked_variance64x64_c, VPX_BITS_8), |
| 615 | make_tuple(&vpx_highbd_masked_variance64x32_ssse3, |
| 616 | &vpx_highbd_masked_variance64x32_c, VPX_BITS_8), |
| 617 | make_tuple(&vpx_highbd_masked_variance32x64_ssse3, |
| 618 | &vpx_highbd_masked_variance32x64_c, VPX_BITS_8), |
| 619 | make_tuple(&vpx_highbd_masked_variance32x32_ssse3, |
| 620 | &vpx_highbd_masked_variance32x32_c, VPX_BITS_8), |
| 621 | make_tuple(&vpx_highbd_masked_variance32x16_ssse3, |
| 622 | &vpx_highbd_masked_variance32x16_c, VPX_BITS_8), |
| 623 | make_tuple(&vpx_highbd_masked_variance16x32_ssse3, |
| 624 | &vpx_highbd_masked_variance16x32_c, VPX_BITS_8), |
| 625 | make_tuple(&vpx_highbd_masked_variance16x16_ssse3, |
| 626 | &vpx_highbd_masked_variance16x16_c, VPX_BITS_8), |
| 627 | make_tuple(&vpx_highbd_masked_variance16x8_ssse3, |
| 628 | &vpx_highbd_masked_variance16x8_c, VPX_BITS_8), |
| 629 | make_tuple(&vpx_highbd_masked_variance8x16_ssse3, |
| 630 | &vpx_highbd_masked_variance8x16_c, VPX_BITS_8), |
| 631 | make_tuple(&vpx_highbd_masked_variance8x8_ssse3, |
| 632 | &vpx_highbd_masked_variance8x8_c, VPX_BITS_8), |
| 633 | make_tuple(&vpx_highbd_masked_variance8x4_ssse3, |
| 634 | &vpx_highbd_masked_variance8x4_c, VPX_BITS_8), |
| 635 | make_tuple(&vpx_highbd_masked_variance4x8_ssse3, |
| 636 | &vpx_highbd_masked_variance4x8_c, VPX_BITS_8), |
| 637 | make_tuple(&vpx_highbd_masked_variance4x4_ssse3, |
| 638 | &vpx_highbd_masked_variance4x4_c, VPX_BITS_8), |
| 639 | #if CONFIG_EXT_PARTITION |
| 640 | make_tuple(&vpx_highbd_10_masked_variance128x128_ssse3, |
| 641 | &vpx_highbd_10_masked_variance128x128_c, VPX_BITS_10), |
| 642 | make_tuple(&vpx_highbd_10_masked_variance128x64_ssse3, |
| 643 | &vpx_highbd_10_masked_variance128x64_c, VPX_BITS_10), |
| 644 | make_tuple(&vpx_highbd_10_masked_variance64x128_ssse3, |
| 645 | &vpx_highbd_10_masked_variance64x128_c, VPX_BITS_10), |
| 646 | #endif // CONFIG_EXT_PARTITION |
| 647 | make_tuple(&vpx_highbd_10_masked_variance64x64_ssse3, |
| 648 | &vpx_highbd_10_masked_variance64x64_c, VPX_BITS_10), |
| 649 | make_tuple(&vpx_highbd_10_masked_variance64x32_ssse3, |
| 650 | &vpx_highbd_10_masked_variance64x32_c, VPX_BITS_10), |
| 651 | make_tuple(&vpx_highbd_10_masked_variance32x64_ssse3, |
| 652 | &vpx_highbd_10_masked_variance32x64_c, VPX_BITS_10), |
| 653 | make_tuple(&vpx_highbd_10_masked_variance32x32_ssse3, |
| 654 | &vpx_highbd_10_masked_variance32x32_c, VPX_BITS_10), |
| 655 | make_tuple(&vpx_highbd_10_masked_variance32x16_ssse3, |
| 656 | &vpx_highbd_10_masked_variance32x16_c, VPX_BITS_10), |
| 657 | make_tuple(&vpx_highbd_10_masked_variance16x32_ssse3, |
| 658 | &vpx_highbd_10_masked_variance16x32_c, VPX_BITS_10), |
| 659 | make_tuple(&vpx_highbd_10_masked_variance16x16_ssse3, |
| 660 | &vpx_highbd_10_masked_variance16x16_c, VPX_BITS_10), |
| 661 | make_tuple(&vpx_highbd_10_masked_variance16x8_ssse3, |
| 662 | &vpx_highbd_10_masked_variance16x8_c, VPX_BITS_10), |
| 663 | make_tuple(&vpx_highbd_10_masked_variance8x16_ssse3, |
| 664 | &vpx_highbd_10_masked_variance8x16_c, VPX_BITS_10), |
| 665 | make_tuple(&vpx_highbd_10_masked_variance8x8_ssse3, |
| 666 | &vpx_highbd_10_masked_variance8x8_c, VPX_BITS_10), |
| 667 | make_tuple(&vpx_highbd_10_masked_variance8x4_ssse3, |
| 668 | &vpx_highbd_10_masked_variance8x4_c, VPX_BITS_10), |
| 669 | make_tuple(&vpx_highbd_10_masked_variance4x8_ssse3, |
| 670 | &vpx_highbd_10_masked_variance4x8_c, VPX_BITS_10), |
| 671 | make_tuple(&vpx_highbd_10_masked_variance4x4_ssse3, |
| 672 | &vpx_highbd_10_masked_variance4x4_c, VPX_BITS_10), |
| 673 | #if CONFIG_EXT_PARTITION |
| 674 | make_tuple(&vpx_highbd_12_masked_variance128x128_ssse3, |
| 675 | &vpx_highbd_12_masked_variance128x128_c, VPX_BITS_12), |
| 676 | make_tuple(&vpx_highbd_12_masked_variance128x64_ssse3, |
| 677 | &vpx_highbd_12_masked_variance128x64_c, VPX_BITS_12), |
| 678 | make_tuple(&vpx_highbd_12_masked_variance64x128_ssse3, |
| 679 | &vpx_highbd_12_masked_variance64x128_c, VPX_BITS_12), |
| 680 | #endif // CONFIG_EXT_PARTITION |
| 681 | make_tuple(&vpx_highbd_12_masked_variance64x64_ssse3, |
| 682 | &vpx_highbd_12_masked_variance64x64_c, VPX_BITS_12), |
| 683 | make_tuple(&vpx_highbd_12_masked_variance64x32_ssse3, |
| 684 | &vpx_highbd_12_masked_variance64x32_c, VPX_BITS_12), |
| 685 | make_tuple(&vpx_highbd_12_masked_variance32x64_ssse3, |
| 686 | &vpx_highbd_12_masked_variance32x64_c, VPX_BITS_12), |
| 687 | make_tuple(&vpx_highbd_12_masked_variance32x32_ssse3, |
| 688 | &vpx_highbd_12_masked_variance32x32_c, VPX_BITS_12), |
| 689 | make_tuple(&vpx_highbd_12_masked_variance32x16_ssse3, |
| 690 | &vpx_highbd_12_masked_variance32x16_c, VPX_BITS_12), |
| 691 | make_tuple(&vpx_highbd_12_masked_variance16x32_ssse3, |
| 692 | &vpx_highbd_12_masked_variance16x32_c, VPX_BITS_12), |
| 693 | make_tuple(&vpx_highbd_12_masked_variance16x16_ssse3, |
| 694 | &vpx_highbd_12_masked_variance16x16_c, VPX_BITS_12), |
| 695 | make_tuple(&vpx_highbd_12_masked_variance16x8_ssse3, |
| 696 | &vpx_highbd_12_masked_variance16x8_c, VPX_BITS_12), |
| 697 | make_tuple(&vpx_highbd_12_masked_variance8x16_ssse3, |
| 698 | &vpx_highbd_12_masked_variance8x16_c, VPX_BITS_12), |
| 699 | make_tuple(&vpx_highbd_12_masked_variance8x8_ssse3, |
| 700 | &vpx_highbd_12_masked_variance8x8_c, VPX_BITS_12), |
| 701 | make_tuple(&vpx_highbd_12_masked_variance8x4_ssse3, |
| 702 | &vpx_highbd_12_masked_variance8x4_c, VPX_BITS_12), |
| 703 | make_tuple(&vpx_highbd_12_masked_variance4x8_ssse3, |
| 704 | &vpx_highbd_12_masked_variance4x8_c, VPX_BITS_12), |
| 705 | make_tuple(&vpx_highbd_12_masked_variance4x4_ssse3, |
| 706 | &vpx_highbd_12_masked_variance4x4_c, VPX_BITS_12))); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 707 | |
| 708 | INSTANTIATE_TEST_CASE_P( |
| 709 | SSSE3_C_COMPARE, HighbdMaskedSubPixelVarianceTest, |
| 710 | ::testing::Values( |
Geza Lore | 697bf5b | 2016-03-02 11:12:52 +0000 | [diff] [blame] | 711 | #if CONFIG_EXT_PARTITION |
| 712 | make_tuple(&vpx_highbd_masked_sub_pixel_variance128x128_ssse3, |
| 713 | &vpx_highbd_masked_sub_pixel_variance128x128_c, VPX_BITS_8), |
| 714 | make_tuple(&vpx_highbd_masked_sub_pixel_variance128x64_ssse3, |
| 715 | &vpx_highbd_masked_sub_pixel_variance128x64_c, VPX_BITS_8), |
| 716 | make_tuple(&vpx_highbd_masked_sub_pixel_variance64x128_ssse3, |
| 717 | &vpx_highbd_masked_sub_pixel_variance64x128_c, VPX_BITS_8), |
| 718 | #endif // CONFIG_EXT_PARTITION |
| 719 | make_tuple(&vpx_highbd_masked_sub_pixel_variance64x64_ssse3, |
| 720 | &vpx_highbd_masked_sub_pixel_variance64x64_c, VPX_BITS_8), |
| 721 | make_tuple(&vpx_highbd_masked_sub_pixel_variance64x32_ssse3, |
| 722 | &vpx_highbd_masked_sub_pixel_variance64x32_c, VPX_BITS_8), |
| 723 | make_tuple(&vpx_highbd_masked_sub_pixel_variance32x64_ssse3, |
| 724 | &vpx_highbd_masked_sub_pixel_variance32x64_c, VPX_BITS_8), |
| 725 | make_tuple(&vpx_highbd_masked_sub_pixel_variance32x32_ssse3, |
| 726 | &vpx_highbd_masked_sub_pixel_variance32x32_c, VPX_BITS_8), |
| 727 | make_tuple(&vpx_highbd_masked_sub_pixel_variance32x16_ssse3, |
| 728 | &vpx_highbd_masked_sub_pixel_variance32x16_c, VPX_BITS_8), |
| 729 | make_tuple(&vpx_highbd_masked_sub_pixel_variance16x32_ssse3, |
| 730 | &vpx_highbd_masked_sub_pixel_variance16x32_c, VPX_BITS_8), |
| 731 | make_tuple(&vpx_highbd_masked_sub_pixel_variance16x16_ssse3, |
| 732 | &vpx_highbd_masked_sub_pixel_variance16x16_c, VPX_BITS_8), |
| 733 | make_tuple(&vpx_highbd_masked_sub_pixel_variance16x8_ssse3, |
| 734 | &vpx_highbd_masked_sub_pixel_variance16x8_c, VPX_BITS_8), |
| 735 | make_tuple(&vpx_highbd_masked_sub_pixel_variance8x16_ssse3, |
| 736 | &vpx_highbd_masked_sub_pixel_variance8x16_c, VPX_BITS_8), |
| 737 | make_tuple(&vpx_highbd_masked_sub_pixel_variance8x8_ssse3, |
| 738 | &vpx_highbd_masked_sub_pixel_variance8x8_c, VPX_BITS_8), |
| 739 | make_tuple(&vpx_highbd_masked_sub_pixel_variance8x4_ssse3, |
| 740 | &vpx_highbd_masked_sub_pixel_variance8x4_c, VPX_BITS_8), |
| 741 | make_tuple(&vpx_highbd_masked_sub_pixel_variance4x8_ssse3, |
| 742 | &vpx_highbd_masked_sub_pixel_variance4x8_c, VPX_BITS_8), |
| 743 | make_tuple(&vpx_highbd_masked_sub_pixel_variance4x4_ssse3, |
| 744 | &vpx_highbd_masked_sub_pixel_variance4x4_c, VPX_BITS_8), |
| 745 | #if CONFIG_EXT_PARTITION |
| 746 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance128x128_ssse3, |
| 747 | &vpx_highbd_10_masked_sub_pixel_variance128x128_c, VPX_BITS_10), |
| 748 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance128x64_ssse3, |
| 749 | &vpx_highbd_10_masked_sub_pixel_variance128x64_c, VPX_BITS_10), |
| 750 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance64x128_ssse3, |
| 751 | &vpx_highbd_10_masked_sub_pixel_variance64x128_c, VPX_BITS_10), |
| 752 | #endif // CONFIG_EXT_PARTITION |
| 753 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance64x64_ssse3, |
| 754 | &vpx_highbd_10_masked_sub_pixel_variance64x64_c, VPX_BITS_10), |
| 755 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance64x32_ssse3, |
| 756 | &vpx_highbd_10_masked_sub_pixel_variance64x32_c, VPX_BITS_10), |
| 757 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance32x64_ssse3, |
| 758 | &vpx_highbd_10_masked_sub_pixel_variance32x64_c, VPX_BITS_10), |
| 759 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance32x32_ssse3, |
| 760 | &vpx_highbd_10_masked_sub_pixel_variance32x32_c, VPX_BITS_10), |
| 761 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance32x16_ssse3, |
| 762 | &vpx_highbd_10_masked_sub_pixel_variance32x16_c, VPX_BITS_10), |
| 763 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance16x32_ssse3, |
| 764 | &vpx_highbd_10_masked_sub_pixel_variance16x32_c, VPX_BITS_10), |
| 765 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance16x16_ssse3, |
| 766 | &vpx_highbd_10_masked_sub_pixel_variance16x16_c, VPX_BITS_10), |
| 767 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance16x8_ssse3, |
| 768 | &vpx_highbd_10_masked_sub_pixel_variance16x8_c, VPX_BITS_10), |
| 769 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance8x16_ssse3, |
| 770 | &vpx_highbd_10_masked_sub_pixel_variance8x16_c, VPX_BITS_10), |
| 771 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance8x8_ssse3, |
| 772 | &vpx_highbd_10_masked_sub_pixel_variance8x8_c, VPX_BITS_10), |
| 773 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance8x4_ssse3, |
| 774 | &vpx_highbd_10_masked_sub_pixel_variance8x4_c, VPX_BITS_10), |
| 775 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance4x8_ssse3, |
| 776 | &vpx_highbd_10_masked_sub_pixel_variance4x8_c, VPX_BITS_10), |
| 777 | make_tuple(&vpx_highbd_10_masked_sub_pixel_variance4x4_ssse3, |
| 778 | &vpx_highbd_10_masked_sub_pixel_variance4x4_c, VPX_BITS_10), |
| 779 | #if CONFIG_EXT_PARTITION |
| 780 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance128x128_ssse3, |
| 781 | &vpx_highbd_12_masked_sub_pixel_variance128x128_c, VPX_BITS_12), |
| 782 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance128x64_ssse3, |
| 783 | &vpx_highbd_12_masked_sub_pixel_variance128x64_c, VPX_BITS_12), |
| 784 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance64x128_ssse3, |
| 785 | &vpx_highbd_12_masked_sub_pixel_variance64x128_c, VPX_BITS_12), |
| 786 | #endif // CONFIG_EXT_PARTITION |
| 787 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance64x64_ssse3, |
| 788 | &vpx_highbd_12_masked_sub_pixel_variance64x64_c, VPX_BITS_12), |
| 789 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance64x32_ssse3, |
| 790 | &vpx_highbd_12_masked_sub_pixel_variance64x32_c, VPX_BITS_12), |
| 791 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance32x64_ssse3, |
| 792 | &vpx_highbd_12_masked_sub_pixel_variance32x64_c, VPX_BITS_12), |
| 793 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance32x32_ssse3, |
| 794 | &vpx_highbd_12_masked_sub_pixel_variance32x32_c, VPX_BITS_12), |
| 795 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance32x16_ssse3, |
| 796 | &vpx_highbd_12_masked_sub_pixel_variance32x16_c, VPX_BITS_12), |
| 797 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance16x32_ssse3, |
| 798 | &vpx_highbd_12_masked_sub_pixel_variance16x32_c, VPX_BITS_12), |
| 799 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance16x16_ssse3, |
| 800 | &vpx_highbd_12_masked_sub_pixel_variance16x16_c, VPX_BITS_12), |
| 801 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance16x8_ssse3, |
| 802 | &vpx_highbd_12_masked_sub_pixel_variance16x8_c, VPX_BITS_12), |
| 803 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance8x16_ssse3, |
| 804 | &vpx_highbd_12_masked_sub_pixel_variance8x16_c, VPX_BITS_12), |
| 805 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance8x8_ssse3, |
| 806 | &vpx_highbd_12_masked_sub_pixel_variance8x8_c, VPX_BITS_12), |
| 807 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance8x4_ssse3, |
| 808 | &vpx_highbd_12_masked_sub_pixel_variance8x4_c, VPX_BITS_12) , |
| 809 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance4x8_ssse3, |
| 810 | &vpx_highbd_12_masked_sub_pixel_variance4x8_c, VPX_BITS_12), |
| 811 | make_tuple(&vpx_highbd_12_masked_sub_pixel_variance4x4_ssse3, |
| 812 | &vpx_highbd_12_masked_sub_pixel_variance4x4_c, VPX_BITS_12))); |
Debargha Mukherjee | 1d69cee | 2016-02-29 16:08:07 -0800 | [diff] [blame] | 813 | #endif // CONFIG_VP9_HIGHBITDEPTH |
| 814 | |
| 815 | #endif // HAVE_SSSE3 |
| 816 | } // namespace |