John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 1 | /* |
John Koleszar | c2140b8 | 2010-09-09 08:16:39 -0400 | [diff] [blame] | 2 | * Copyright (c) 2010 The WebM project authors. All Rights Reserved. |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 3 | * |
John Koleszar | 94c52e4 | 2010-06-18 12:39:21 -0400 | [diff] [blame] | 4 | * Use of this source code is governed by a BSD-style license |
John Koleszar | 09202d8 | 2010-06-04 16:19:40 -0400 | [diff] [blame] | 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
John Koleszar | 94c52e4 | 2010-06-18 12:39:21 -0400 | [diff] [blame] | 7 | * in the file PATENTS. All contributing project authors may |
John Koleszar | 09202d8 | 2010-06-04 16:19:40 -0400 | [diff] [blame] | 8 | * be found in the AUTHORS file in the root of the source tree. |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 9 | */ |
| 10 | |
| 11 | |
| 12 | #include <limits.h> |
| 13 | #include "vpx_ports/config.h" |
| 14 | #include "onyx_int.h" |
| 15 | #include "modecosts.h" |
| 16 | #include "encodeintra.h" |
| 17 | #include "entropymode.h" |
| 18 | #include "pickinter.h" |
| 19 | #include "findnearmv.h" |
| 20 | #include "encodemb.h" |
| 21 | #include "reconinter.h" |
| 22 | #include "reconintra.h" |
| 23 | #include "reconintra4x4.h" |
| 24 | #include "g_common.h" |
| 25 | #include "variance.h" |
| 26 | #include "mcomp.h" |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 27 | #include "rdopt.h" |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 28 | #include "vpx_mem/vpx_mem.h" |
| 29 | |
| 30 | #if CONFIG_RUNTIME_CPU_DETECT |
| 31 | #define IF_RTCD(x) (x) |
| 32 | #else |
| 33 | #define IF_RTCD(x) NULL |
| 34 | #endif |
| 35 | |
| 36 | extern int VP8_UVSSE(MACROBLOCK *x, const vp8_variance_rtcd_vtable_t *rtcd); |
| 37 | |
| 38 | #ifdef SPEEDSTATS |
| 39 | extern unsigned int cnt_pm; |
| 40 | #endif |
| 41 | |
| 42 | extern const MV_REFERENCE_FRAME vp8_ref_frame_order[MAX_MODES]; |
| 43 | extern const MB_PREDICTION_MODE vp8_mode_order[MAX_MODES]; |
| 44 | |
| 45 | |
| 46 | extern unsigned int (*vp8_get16x16pred_error)(unsigned char *src_ptr, int src_stride, unsigned char *ref_ptr, int ref_stride); |
| 47 | extern unsigned int (*vp8_get4x4sse_cs)(unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr, int recon_stride); |
| 48 | extern int vp8_rd_pick_best_mbsegmentation(VP8_COMP *cpi, MACROBLOCK *x, MV *best_ref_mv, int best_rd, int *, int *, int *, int, int *mvcost[2], int, int fullpixel); |
| 49 | extern int vp8_cost_mv_ref(MB_PREDICTION_MODE m, const int near_mv_ref_ct[4]); |
| 50 | extern void vp8_set_mbmode_and_mvs(MACROBLOCK *x, MB_PREDICTION_MODE mb, MV *mv); |
| 51 | |
| 52 | |
John Koleszar | 209d82a | 2010-10-26 15:34:16 -0400 | [diff] [blame] | 53 | int vp8_skip_fractional_mv_step(MACROBLOCK *mb, BLOCK *b, BLOCKD *d, MV *bestmv, MV *ref_mv, int error_per_bit, const vp8_variance_fn_ptr_t *vfp, int *mvcost[2]) |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 54 | { |
| 55 | (void) b; |
| 56 | (void) d; |
| 57 | (void) ref_mv; |
| 58 | (void) error_per_bit; |
John Koleszar | 209d82a | 2010-10-26 15:34:16 -0400 | [diff] [blame] | 59 | (void) vfp; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 60 | (void) mvcost; |
| 61 | bestmv->row <<= 3; |
| 62 | bestmv->col <<= 3; |
| 63 | return 0; |
| 64 | } |
| 65 | |
| 66 | |
John Koleszar | 209d82a | 2010-10-26 15:34:16 -0400 | [diff] [blame] | 67 | static int get_inter_mbpred_error(MACROBLOCK *mb, const vp8_variance_fn_ptr_t *vfp, unsigned int *sse) |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 68 | { |
| 69 | |
| 70 | BLOCK *b = &mb->block[0]; |
| 71 | BLOCKD *d = &mb->e_mbd.block[0]; |
| 72 | unsigned char *what = (*(b->base_src) + b->src); |
| 73 | int what_stride = b->src_stride; |
| 74 | unsigned char *in_what = *(d->base_pre) + d->pre ; |
| 75 | int in_what_stride = d->pre_stride; |
| 76 | int xoffset = d->bmi.mv.as_mv.col & 7; |
| 77 | int yoffset = d->bmi.mv.as_mv.row & 7; |
| 78 | |
| 79 | in_what += (d->bmi.mv.as_mv.row >> 3) * d->pre_stride + (d->bmi.mv.as_mv.col >> 3); |
| 80 | |
| 81 | if (xoffset | yoffset) |
| 82 | { |
John Koleszar | 209d82a | 2010-10-26 15:34:16 -0400 | [diff] [blame] | 83 | return vfp->svf(in_what, in_what_stride, xoffset, yoffset, what, what_stride, sse); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 84 | } |
| 85 | else |
| 86 | { |
John Koleszar | 209d82a | 2010-10-26 15:34:16 -0400 | [diff] [blame] | 87 | return vfp->vf(what, what_stride, in_what, in_what_stride, sse); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 88 | } |
| 89 | |
| 90 | } |
| 91 | |
| 92 | unsigned int vp8_get16x16pred_error_c |
| 93 | ( |
Timothy B. Terriberry | f4a8594 | 2010-10-11 14:01:23 -0700 | [diff] [blame] | 94 | const unsigned char *src_ptr, |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 95 | int src_stride, |
Timothy B. Terriberry | f4a8594 | 2010-10-11 14:01:23 -0700 | [diff] [blame] | 96 | const unsigned char *ref_ptr, |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 97 | int ref_stride, |
| 98 | int max_sad |
| 99 | ) |
| 100 | { |
| 101 | unsigned pred_error = 0; |
| 102 | int i, j; |
| 103 | int sum = 0; |
| 104 | |
| 105 | for (i = 0; i < 16; i++) |
| 106 | { |
| 107 | int diff; |
| 108 | |
| 109 | for (j = 0; j < 16; j++) |
| 110 | { |
| 111 | diff = src_ptr[j] - ref_ptr[j]; |
| 112 | sum += diff; |
| 113 | pred_error += diff * diff; |
| 114 | } |
| 115 | |
| 116 | src_ptr += src_stride; |
| 117 | ref_ptr += ref_stride; |
| 118 | } |
| 119 | |
| 120 | pred_error -= sum * sum / 256; |
| 121 | return pred_error; |
| 122 | } |
| 123 | |
| 124 | |
| 125 | unsigned int vp8_get4x4sse_cs_c |
| 126 | ( |
Timothy B. Terriberry | f4a8594 | 2010-10-11 14:01:23 -0700 | [diff] [blame] | 127 | const unsigned char *src_ptr, |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 128 | int source_stride, |
Timothy B. Terriberry | f4a8594 | 2010-10-11 14:01:23 -0700 | [diff] [blame] | 129 | const unsigned char *ref_ptr, |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 130 | int recon_stride, |
| 131 | int max_sad |
| 132 | ) |
| 133 | { |
| 134 | int distortion = 0; |
| 135 | int r, c; |
| 136 | |
| 137 | for (r = 0; r < 4; r++) |
| 138 | { |
| 139 | for (c = 0; c < 4; c++) |
| 140 | { |
| 141 | int diff = src_ptr[c] - ref_ptr[c]; |
| 142 | distortion += diff * diff; |
| 143 | } |
| 144 | |
| 145 | src_ptr += source_stride; |
| 146 | ref_ptr += recon_stride; |
| 147 | } |
| 148 | |
| 149 | return distortion; |
| 150 | } |
| 151 | |
| 152 | static int get_prediction_error(BLOCK *be, BLOCKD *b, const vp8_variance_rtcd_vtable_t *rtcd) |
| 153 | { |
| 154 | unsigned char *sptr; |
| 155 | unsigned char *dptr; |
| 156 | sptr = (*(be->base_src) + be->src); |
| 157 | dptr = b->predictor; |
| 158 | |
| 159 | return VARIANCE_INVOKE(rtcd, get4x4sse_cs)(sptr, be->src_stride, dptr, 16, 0x7fffffff); |
| 160 | |
| 161 | } |
| 162 | |
| 163 | static int pick_intra4x4block( |
| 164 | const VP8_ENCODER_RTCD *rtcd, |
| 165 | MACROBLOCK *x, |
| 166 | BLOCK *be, |
| 167 | BLOCKD *b, |
| 168 | B_PREDICTION_MODE *best_mode, |
| 169 | B_PREDICTION_MODE above, |
| 170 | B_PREDICTION_MODE left, |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 171 | |
| 172 | int *bestrate, |
| 173 | int *bestdistortion) |
| 174 | { |
| 175 | B_PREDICTION_MODE mode; |
| 176 | int best_rd = INT_MAX; // 1<<30 |
| 177 | int rate; |
| 178 | int distortion; |
| 179 | unsigned int *mode_costs; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 180 | |
| 181 | if (x->e_mbd.frame_type == KEY_FRAME) |
| 182 | { |
| 183 | mode_costs = x->bmode_costs[above][left]; |
| 184 | } |
| 185 | else |
| 186 | { |
| 187 | mode_costs = x->inter_bmode_costs; |
| 188 | } |
| 189 | |
| 190 | for (mode = B_DC_PRED; mode <= B_HE_PRED /*B_HU_PRED*/; mode++) |
| 191 | { |
| 192 | int this_rd; |
| 193 | |
| 194 | rate = mode_costs[mode]; |
| 195 | vp8_predict_intra4x4(b, mode, b->predictor); |
| 196 | distortion = get_prediction_error(be, b, &rtcd->variance); |
| 197 | this_rd = RD_ESTIMATE(x->rdmult, x->rddiv, rate, distortion); |
| 198 | |
| 199 | if (this_rd < best_rd) |
| 200 | { |
| 201 | *bestrate = rate; |
| 202 | *bestdistortion = distortion; |
| 203 | best_rd = this_rd; |
| 204 | *best_mode = mode; |
| 205 | } |
| 206 | } |
| 207 | |
| 208 | b->bmi.mode = (B_PREDICTION_MODE)(*best_mode); |
| 209 | vp8_encode_intra4x4block(rtcd, x, be, b, b->bmi.mode); |
Scott LaVarnway | b18df82 | 2011-02-01 11:26:04 -0500 | [diff] [blame] | 210 | |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 211 | return best_rd; |
| 212 | } |
| 213 | |
| 214 | |
| 215 | int vp8_pick_intra4x4mby_modes(const VP8_ENCODER_RTCD *rtcd, MACROBLOCK *mb, int *Rate, int *best_dist) |
| 216 | { |
| 217 | MACROBLOCKD *const xd = &mb->e_mbd; |
| 218 | int i; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 219 | int cost = mb->mbmode_cost [xd->frame_type] [B_PRED]; |
Scott LaVarnway | b18df82 | 2011-02-01 11:26:04 -0500 | [diff] [blame] | 220 | int error; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 221 | int distortion = 0; |
| 222 | |
| 223 | vp8_intra_prediction_down_copy(xd); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 224 | |
| 225 | for (i = 0; i < 16; i++) |
| 226 | { |
| 227 | MODE_INFO *const mic = xd->mode_info_context; |
| 228 | const int mis = xd->mode_info_stride; |
| 229 | const B_PREDICTION_MODE A = vp8_above_bmi(mic, i, mis)->mode; |
| 230 | const B_PREDICTION_MODE L = vp8_left_bmi(mic, i)->mode; |
| 231 | B_PREDICTION_MODE UNINITIALIZED_IS_SAFE(best_mode); |
| 232 | int UNINITIALIZED_IS_SAFE(r), UNINITIALIZED_IS_SAFE(d); |
| 233 | |
Scott LaVarnway | b18df82 | 2011-02-01 11:26:04 -0500 | [diff] [blame] | 234 | pick_intra4x4block(rtcd, mb, mb->block + i, xd->block + i, |
| 235 | &best_mode, A, L, &r, &d); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 236 | |
| 237 | cost += r; |
| 238 | distortion += d; |
| 239 | |
| 240 | mic->bmi[i].mode = xd->block[i].bmi.mode = best_mode; |
| 241 | |
| 242 | // Break out case where we have already exceeded best so far value that was bassed in |
| 243 | if (distortion > *best_dist) |
| 244 | break; |
| 245 | } |
| 246 | |
| 247 | for (i = 0; i < 16; i++) |
| 248 | xd->block[i].bmi.mv.as_int = 0; |
| 249 | |
| 250 | *Rate = cost; |
| 251 | |
| 252 | if (i == 16) |
Scott LaVarnway | b18df82 | 2011-02-01 11:26:04 -0500 | [diff] [blame] | 253 | { |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 254 | *best_dist = distortion; |
Scott LaVarnway | b18df82 | 2011-02-01 11:26:04 -0500 | [diff] [blame] | 255 | error = RD_ESTIMATE(mb->rdmult, mb->rddiv, cost, distortion); |
| 256 | } |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 257 | else |
Scott LaVarnway | b18df82 | 2011-02-01 11:26:04 -0500 | [diff] [blame] | 258 | { |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 259 | *best_dist = INT_MAX; |
Scott LaVarnway | b18df82 | 2011-02-01 11:26:04 -0500 | [diff] [blame] | 260 | error = INT_MAX; |
| 261 | } |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 262 | |
| 263 | return error; |
| 264 | } |
| 265 | |
| 266 | int vp8_pick_intra_mbuv_mode(MACROBLOCK *mb) |
| 267 | { |
| 268 | |
| 269 | MACROBLOCKD *x = &mb->e_mbd; |
| 270 | unsigned char *uabove_row = x->dst.u_buffer - x->dst.uv_stride; |
| 271 | unsigned char *vabove_row = x->dst.v_buffer - x->dst.uv_stride; |
| 272 | unsigned char *usrc_ptr = (mb->block[16].src + *mb->block[16].base_src); |
| 273 | unsigned char *vsrc_ptr = (mb->block[20].src + *mb->block[20].base_src); |
| 274 | int uvsrc_stride = mb->block[16].src_stride; |
| 275 | unsigned char uleft_col[8]; |
| 276 | unsigned char vleft_col[8]; |
| 277 | unsigned char utop_left = uabove_row[-1]; |
| 278 | unsigned char vtop_left = vabove_row[-1]; |
| 279 | int i, j; |
| 280 | int expected_udc; |
| 281 | int expected_vdc; |
| 282 | int shift; |
| 283 | int Uaverage = 0; |
| 284 | int Vaverage = 0; |
| 285 | int diff; |
| 286 | int pred_error[4] = {0, 0, 0, 0}, best_error = INT_MAX; |
| 287 | MB_PREDICTION_MODE UNINITIALIZED_IS_SAFE(best_mode); |
| 288 | |
| 289 | |
| 290 | for (i = 0; i < 8; i++) |
| 291 | { |
| 292 | uleft_col[i] = x->dst.u_buffer [i* x->dst.uv_stride -1]; |
| 293 | vleft_col[i] = x->dst.v_buffer [i* x->dst.uv_stride -1]; |
| 294 | } |
| 295 | |
| 296 | if (!x->up_available && !x->left_available) |
| 297 | { |
| 298 | expected_udc = 128; |
| 299 | expected_vdc = 128; |
| 300 | } |
| 301 | else |
| 302 | { |
| 303 | shift = 2; |
| 304 | |
| 305 | if (x->up_available) |
| 306 | { |
| 307 | |
| 308 | for (i = 0; i < 8; i++) |
| 309 | { |
| 310 | Uaverage += uabove_row[i]; |
| 311 | Vaverage += vabove_row[i]; |
| 312 | } |
| 313 | |
| 314 | shift ++; |
| 315 | |
| 316 | } |
| 317 | |
| 318 | if (x->left_available) |
| 319 | { |
| 320 | for (i = 0; i < 8; i++) |
| 321 | { |
| 322 | Uaverage += uleft_col[i]; |
| 323 | Vaverage += vleft_col[i]; |
| 324 | } |
| 325 | |
| 326 | shift ++; |
| 327 | |
| 328 | } |
| 329 | |
| 330 | expected_udc = (Uaverage + (1 << (shift - 1))) >> shift; |
| 331 | expected_vdc = (Vaverage + (1 << (shift - 1))) >> shift; |
| 332 | } |
| 333 | |
| 334 | |
| 335 | for (i = 0; i < 8; i++) |
| 336 | { |
| 337 | for (j = 0; j < 8; j++) |
| 338 | { |
| 339 | |
| 340 | int predu = uleft_col[i] + uabove_row[j] - utop_left; |
| 341 | int predv = vleft_col[i] + vabove_row[j] - vtop_left; |
| 342 | int u_p, v_p; |
| 343 | |
| 344 | u_p = usrc_ptr[j]; |
| 345 | v_p = vsrc_ptr[j]; |
| 346 | |
| 347 | if (predu < 0) |
| 348 | predu = 0; |
| 349 | |
| 350 | if (predu > 255) |
| 351 | predu = 255; |
| 352 | |
| 353 | if (predv < 0) |
| 354 | predv = 0; |
| 355 | |
| 356 | if (predv > 255) |
| 357 | predv = 255; |
| 358 | |
| 359 | |
| 360 | diff = u_p - expected_udc; |
| 361 | pred_error[DC_PRED] += diff * diff; |
| 362 | diff = v_p - expected_vdc; |
| 363 | pred_error[DC_PRED] += diff * diff; |
| 364 | |
| 365 | |
| 366 | diff = u_p - uabove_row[j]; |
| 367 | pred_error[V_PRED] += diff * diff; |
| 368 | diff = v_p - vabove_row[j]; |
| 369 | pred_error[V_PRED] += diff * diff; |
| 370 | |
| 371 | |
| 372 | diff = u_p - uleft_col[i]; |
| 373 | pred_error[H_PRED] += diff * diff; |
| 374 | diff = v_p - vleft_col[i]; |
| 375 | pred_error[H_PRED] += diff * diff; |
| 376 | |
| 377 | |
| 378 | diff = u_p - predu; |
| 379 | pred_error[TM_PRED] += diff * diff; |
| 380 | diff = v_p - predv; |
| 381 | pred_error[TM_PRED] += diff * diff; |
| 382 | |
| 383 | |
| 384 | } |
| 385 | |
| 386 | usrc_ptr += uvsrc_stride; |
| 387 | vsrc_ptr += uvsrc_stride; |
| 388 | |
| 389 | if (i == 3) |
| 390 | { |
| 391 | usrc_ptr = (mb->block[18].src + *mb->block[18].base_src); |
| 392 | vsrc_ptr = (mb->block[22].src + *mb->block[22].base_src); |
| 393 | } |
| 394 | |
| 395 | |
| 396 | |
| 397 | } |
| 398 | |
| 399 | |
| 400 | for (i = DC_PRED; i <= TM_PRED; i++) |
| 401 | { |
| 402 | if (best_error > pred_error[i]) |
| 403 | { |
| 404 | best_error = pred_error[i]; |
| 405 | best_mode = (MB_PREDICTION_MODE)i; |
| 406 | } |
| 407 | } |
| 408 | |
| 409 | |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 410 | mb->e_mbd.mode_info_context->mbmi.uv_mode = best_mode; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 411 | return best_error; |
| 412 | |
| 413 | } |
| 414 | |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 415 | int vp8_pick_inter_mode(VP8_COMP *cpi, MACROBLOCK *x, int recon_yoffset, int recon_uvoffset, int *returnrate, int *returndistortion, int *returnintra) |
| 416 | { |
| 417 | BLOCK *b = &x->block[0]; |
| 418 | BLOCKD *d = &x->e_mbd.block[0]; |
| 419 | MACROBLOCKD *xd = &x->e_mbd; |
| 420 | B_MODE_INFO best_bmodes[16]; |
| 421 | MB_MODE_INFO best_mbmode; |
Scott LaVarnway | 0de458f | 2010-09-02 16:17:52 -0400 | [diff] [blame] | 422 | PARTITION_INFO best_partition; |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 423 | MV best_ref_mv; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 424 | MV mode_mv[MB_MODE_COUNT]; |
| 425 | MB_PREDICTION_MODE this_mode; |
| 426 | int num00; |
| 427 | int i; |
| 428 | int mdcounts[4]; |
| 429 | int best_rd = INT_MAX; // 1 << 30; |
| 430 | int best_intra_rd = INT_MAX; |
| 431 | int mode_index; |
| 432 | int ref_frame_cost[MAX_REF_FRAMES]; |
| 433 | int rate; |
| 434 | int rate2; |
| 435 | int distortion2; |
| 436 | int bestsme; |
| 437 | //int all_rds[MAX_MODES]; // Experimental debug code. |
| 438 | int best_mode_index = 0; |
| 439 | int sse = INT_MAX; |
| 440 | |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 441 | MV mvp; |
| 442 | int near_sadidx[8] = {0, 1, 2, 3, 4, 5, 6, 7}; |
| 443 | int saddone=0; |
| 444 | int sr=0; //search range got from mv_pred(). It uses step_param levels. (0-7) |
| 445 | |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 446 | MV nearest_mv[4]; |
| 447 | MV near_mv[4]; |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 448 | MV frame_best_ref_mv[4]; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 449 | int MDCounts[4][4]; |
| 450 | unsigned char *y_buffer[4]; |
| 451 | unsigned char *u_buffer[4]; |
| 452 | unsigned char *v_buffer[4]; |
| 453 | |
| 454 | int skip_mode[4] = {0, 0, 0, 0}; |
| 455 | |
| 456 | vpx_memset(mode_mv, 0, sizeof(mode_mv)); |
| 457 | vpx_memset(nearest_mv, 0, sizeof(nearest_mv)); |
| 458 | vpx_memset(near_mv, 0, sizeof(near_mv)); |
Guillermo Ballester Valor | 5a72620 | 2010-06-11 14:33:49 -0400 | [diff] [blame] | 459 | vpx_memset(&best_mbmode, 0, sizeof(best_mbmode)); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 460 | |
| 461 | |
| 462 | // set up all the refframe dependent pointers. |
| 463 | if (cpi->ref_frame_flags & VP8_LAST_FLAG) |
| 464 | { |
Fritz Koenig | 0ce3901 | 2010-07-22 08:07:32 -0400 | [diff] [blame] | 465 | YV12_BUFFER_CONFIG *lst_yv12 = &cpi->common.yv12_fb[cpi->common.lst_fb_idx]; |
| 466 | |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 467 | vp8_find_near_mvs(&x->e_mbd, x->e_mbd.mode_info_context, &nearest_mv[LAST_FRAME], &near_mv[LAST_FRAME], |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 468 | &frame_best_ref_mv[LAST_FRAME], MDCounts[LAST_FRAME], LAST_FRAME, cpi->common.ref_frame_sign_bias); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 469 | |
Fritz Koenig | 0ce3901 | 2010-07-22 08:07:32 -0400 | [diff] [blame] | 470 | y_buffer[LAST_FRAME] = lst_yv12->y_buffer + recon_yoffset; |
| 471 | u_buffer[LAST_FRAME] = lst_yv12->u_buffer + recon_uvoffset; |
| 472 | v_buffer[LAST_FRAME] = lst_yv12->v_buffer + recon_uvoffset; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 473 | } |
| 474 | else |
| 475 | skip_mode[LAST_FRAME] = 1; |
| 476 | |
| 477 | if (cpi->ref_frame_flags & VP8_GOLD_FLAG) |
| 478 | { |
Fritz Koenig | 0ce3901 | 2010-07-22 08:07:32 -0400 | [diff] [blame] | 479 | YV12_BUFFER_CONFIG *gld_yv12 = &cpi->common.yv12_fb[cpi->common.gld_fb_idx]; |
| 480 | |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 481 | vp8_find_near_mvs(&x->e_mbd, x->e_mbd.mode_info_context, &nearest_mv[GOLDEN_FRAME], &near_mv[GOLDEN_FRAME], |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 482 | &frame_best_ref_mv[GOLDEN_FRAME], MDCounts[GOLDEN_FRAME], GOLDEN_FRAME, cpi->common.ref_frame_sign_bias); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 483 | |
Fritz Koenig | 0ce3901 | 2010-07-22 08:07:32 -0400 | [diff] [blame] | 484 | y_buffer[GOLDEN_FRAME] = gld_yv12->y_buffer + recon_yoffset; |
| 485 | u_buffer[GOLDEN_FRAME] = gld_yv12->u_buffer + recon_uvoffset; |
| 486 | v_buffer[GOLDEN_FRAME] = gld_yv12->v_buffer + recon_uvoffset; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 487 | } |
| 488 | else |
| 489 | skip_mode[GOLDEN_FRAME] = 1; |
| 490 | |
| 491 | if (cpi->ref_frame_flags & VP8_ALT_FLAG && cpi->source_alt_ref_active) |
| 492 | { |
Fritz Koenig | 0ce3901 | 2010-07-22 08:07:32 -0400 | [diff] [blame] | 493 | YV12_BUFFER_CONFIG *alt_yv12 = &cpi->common.yv12_fb[cpi->common.alt_fb_idx]; |
| 494 | |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 495 | vp8_find_near_mvs(&x->e_mbd, x->e_mbd.mode_info_context, &nearest_mv[ALTREF_FRAME], &near_mv[ALTREF_FRAME], |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 496 | &frame_best_ref_mv[ALTREF_FRAME], MDCounts[ALTREF_FRAME], ALTREF_FRAME, cpi->common.ref_frame_sign_bias); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 497 | |
Fritz Koenig | 0ce3901 | 2010-07-22 08:07:32 -0400 | [diff] [blame] | 498 | y_buffer[ALTREF_FRAME] = alt_yv12->y_buffer + recon_yoffset; |
| 499 | u_buffer[ALTREF_FRAME] = alt_yv12->u_buffer + recon_uvoffset; |
| 500 | v_buffer[ALTREF_FRAME] = alt_yv12->v_buffer + recon_uvoffset; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 501 | } |
| 502 | else |
| 503 | skip_mode[ALTREF_FRAME] = 1; |
| 504 | |
| 505 | cpi->mbs_tested_so_far++; // Count of the number of MBs tested so far this frame |
| 506 | |
| 507 | *returnintra = best_intra_rd; |
| 508 | x->skip = 0; |
| 509 | |
| 510 | ref_frame_cost[INTRA_FRAME] = vp8_cost_zero(cpi->prob_intra_coded); |
| 511 | |
| 512 | // Special case treatment when GF and ARF are not sensible options for reference |
| 513 | if (cpi->ref_frame_flags == VP8_LAST_FLAG) |
| 514 | { |
| 515 | ref_frame_cost[LAST_FRAME] = vp8_cost_one(cpi->prob_intra_coded) |
| 516 | + vp8_cost_zero(255); |
| 517 | ref_frame_cost[GOLDEN_FRAME] = vp8_cost_one(cpi->prob_intra_coded) |
| 518 | + vp8_cost_one(255) |
| 519 | + vp8_cost_zero(128); |
| 520 | ref_frame_cost[ALTREF_FRAME] = vp8_cost_one(cpi->prob_intra_coded) |
| 521 | + vp8_cost_one(255) |
| 522 | + vp8_cost_one(128); |
| 523 | } |
| 524 | else |
| 525 | { |
| 526 | ref_frame_cost[LAST_FRAME] = vp8_cost_one(cpi->prob_intra_coded) |
| 527 | + vp8_cost_zero(cpi->prob_last_coded); |
| 528 | ref_frame_cost[GOLDEN_FRAME] = vp8_cost_one(cpi->prob_intra_coded) |
| 529 | + vp8_cost_one(cpi->prob_last_coded) |
| 530 | + vp8_cost_zero(cpi->prob_gf_coded); |
| 531 | ref_frame_cost[ALTREF_FRAME] = vp8_cost_one(cpi->prob_intra_coded) |
| 532 | + vp8_cost_one(cpi->prob_last_coded) |
| 533 | + vp8_cost_one(cpi->prob_gf_coded); |
| 534 | } |
| 535 | |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 536 | x->e_mbd.mode_info_context->mbmi.ref_frame = INTRA_FRAME; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 537 | |
| 538 | // if we encode a new mv this is important |
| 539 | // find the best new motion vector |
| 540 | for (mode_index = 0; mode_index < MAX_MODES; mode_index++) |
| 541 | { |
| 542 | int frame_cost; |
| 543 | int this_rd = INT_MAX; |
| 544 | |
| 545 | if (best_rd <= cpi->rd_threshes[mode_index]) |
| 546 | continue; |
| 547 | |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 548 | x->e_mbd.mode_info_context->mbmi.ref_frame = vp8_ref_frame_order[mode_index]; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 549 | |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 550 | if (skip_mode[x->e_mbd.mode_info_context->mbmi.ref_frame]) |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 551 | continue; |
| 552 | |
| 553 | // Check to see if the testing frequency for this mode is at its max |
| 554 | // If so then prevent it from being tested and increase the threshold for its testing |
| 555 | if (cpi->mode_test_hit_counts[mode_index] && (cpi->mode_check_freq[mode_index] > 1)) |
| 556 | { |
| 557 | //if ( (cpi->mbs_tested_so_far / cpi->mode_test_hit_counts[mode_index]) <= cpi->mode_check_freq[mode_index] ) |
| 558 | if (cpi->mbs_tested_so_far <= (cpi->mode_check_freq[mode_index] * cpi->mode_test_hit_counts[mode_index])) |
| 559 | { |
| 560 | // Increase the threshold for coding this mode to make it less likely to be chosen |
| 561 | cpi->rd_thresh_mult[mode_index] += 4; |
| 562 | |
| 563 | if (cpi->rd_thresh_mult[mode_index] > MAX_THRESHMULT) |
| 564 | cpi->rd_thresh_mult[mode_index] = MAX_THRESHMULT; |
| 565 | |
| 566 | cpi->rd_threshes[mode_index] = (cpi->rd_baseline_thresh[mode_index] >> 7) * cpi->rd_thresh_mult[mode_index]; |
| 567 | |
| 568 | continue; |
| 569 | } |
| 570 | } |
| 571 | |
| 572 | // We have now reached the point where we are going to test the current mode so increment the counter for the number of times it has been tested |
| 573 | cpi->mode_test_hit_counts[mode_index] ++; |
| 574 | |
| 575 | rate2 = 0; |
| 576 | distortion2 = 0; |
| 577 | |
| 578 | this_mode = vp8_mode_order[mode_index]; |
John Koleszar | 94c52e4 | 2010-06-18 12:39:21 -0400 | [diff] [blame] | 579 | |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 580 | // Experimental debug code. |
| 581 | //all_rds[mode_index] = -1; |
| 582 | |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 583 | x->e_mbd.mode_info_context->mbmi.mode = this_mode; |
| 584 | x->e_mbd.mode_info_context->mbmi.uv_mode = DC_PRED; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 585 | |
| 586 | // Work out the cost assosciated with selecting the reference frame |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 587 | frame_cost = ref_frame_cost[x->e_mbd.mode_info_context->mbmi.ref_frame]; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 588 | rate2 += frame_cost; |
| 589 | |
| 590 | // everything but intra |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 591 | if (x->e_mbd.mode_info_context->mbmi.ref_frame) |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 592 | { |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 593 | x->e_mbd.pre.y_buffer = y_buffer[x->e_mbd.mode_info_context->mbmi.ref_frame]; |
| 594 | x->e_mbd.pre.u_buffer = u_buffer[x->e_mbd.mode_info_context->mbmi.ref_frame]; |
| 595 | x->e_mbd.pre.v_buffer = v_buffer[x->e_mbd.mode_info_context->mbmi.ref_frame]; |
| 596 | mode_mv[NEARESTMV] = nearest_mv[x->e_mbd.mode_info_context->mbmi.ref_frame]; |
| 597 | mode_mv[NEARMV] = near_mv[x->e_mbd.mode_info_context->mbmi.ref_frame]; |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 598 | best_ref_mv = frame_best_ref_mv[x->e_mbd.mode_info_context->mbmi.ref_frame]; |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 599 | memcpy(mdcounts, MDCounts[x->e_mbd.mode_info_context->mbmi.ref_frame], sizeof(mdcounts)); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 600 | } |
| 601 | |
Paul Wilkins | 415371c | 2011-01-14 14:52:15 +0000 | [diff] [blame] | 602 | // Only consider ZEROMV/ALTREF_FRAME for alt ref frame, |
| 603 | // unless ARNR filtering is enabled in which case we want |
| 604 | // an unfiltered alternative |
| 605 | if (cpi->is_src_frame_alt_ref && (cpi->oxcf.arnr_max_frames == 0)) |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 606 | { |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 607 | if (this_mode != ZEROMV || x->e_mbd.mode_info_context->mbmi.ref_frame != ALTREF_FRAME) |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 608 | continue; |
| 609 | } |
| 610 | |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 611 | if(x->e_mbd.mode_info_context->mbmi.mode == NEWMV) |
| 612 | { |
| 613 | if(!saddone) |
| 614 | { |
| 615 | vp8_cal_sad(cpi,xd,x, recon_yoffset ,&near_sadidx[0] ); |
| 616 | saddone = 1; |
| 617 | } |
| 618 | |
| 619 | vp8_mv_pred(cpi, &x->e_mbd, x->e_mbd.mode_info_context, &mvp, |
| 620 | x->e_mbd.mode_info_context->mbmi.ref_frame, cpi->common.ref_frame_sign_bias, &sr, &near_sadidx[0]); |
| 621 | |
| 622 | /* adjust mvp to make sure it is within MV range */ |
| 623 | if(mvp.row > best_ref_mv.row + MAX_FULL_PEL_VAL) |
| 624 | mvp.row = best_ref_mv.row + MAX_FULL_PEL_VAL; |
| 625 | else if(mvp.row < best_ref_mv.row - MAX_FULL_PEL_VAL) |
| 626 | mvp.row = best_ref_mv.row - MAX_FULL_PEL_VAL; |
| 627 | if(mvp.col > best_ref_mv.col + MAX_FULL_PEL_VAL) |
| 628 | mvp.col = best_ref_mv.col + MAX_FULL_PEL_VAL; |
| 629 | else if(mvp.col < best_ref_mv.col - MAX_FULL_PEL_VAL) |
| 630 | mvp.col = best_ref_mv.col - MAX_FULL_PEL_VAL; |
| 631 | } |
| 632 | |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 633 | switch (this_mode) |
| 634 | { |
| 635 | case B_PRED: |
| 636 | distortion2 = *returndistortion; // Best so far passed in as breakout value to vp8_pick_intra4x4mby_modes |
| 637 | vp8_pick_intra4x4mby_modes(IF_RTCD(&cpi->rtcd), x, &rate, &distortion2); |
| 638 | rate2 += rate; |
| 639 | distortion2 = VARIANCE_INVOKE(&cpi->rtcd.variance, get16x16prederror)(x->src.y_buffer, x->src.y_stride, x->e_mbd.predictor, 16, 0x7fffffff); |
| 640 | |
| 641 | if (distortion2 == INT_MAX) |
| 642 | { |
| 643 | this_rd = INT_MAX; |
| 644 | } |
| 645 | else |
| 646 | { |
| 647 | this_rd = RD_ESTIMATE(x->rdmult, x->rddiv, rate2, distortion2); |
| 648 | |
| 649 | if (this_rd < best_intra_rd) |
| 650 | { |
| 651 | best_intra_rd = this_rd; |
| 652 | *returnintra = best_intra_rd ; |
| 653 | } |
| 654 | } |
| 655 | |
| 656 | break; |
| 657 | |
| 658 | case SPLITMV: |
| 659 | |
| 660 | // Split MV modes currently not supported when RD is nopt enabled. |
| 661 | break; |
| 662 | |
| 663 | case DC_PRED: |
| 664 | case V_PRED: |
| 665 | case H_PRED: |
| 666 | case TM_PRED: |
| 667 | vp8_build_intra_predictors_mby_ptr(&x->e_mbd); |
| 668 | distortion2 = VARIANCE_INVOKE(&cpi->rtcd.variance, get16x16prederror)(x->src.y_buffer, x->src.y_stride, x->e_mbd.predictor, 16, 0x7fffffff); |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 669 | rate2 += x->mbmode_cost[x->e_mbd.frame_type][x->e_mbd.mode_info_context->mbmi.mode]; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 670 | this_rd = RD_ESTIMATE(x->rdmult, x->rddiv, rate2, distortion2); |
| 671 | |
| 672 | if (this_rd < best_intra_rd) |
| 673 | { |
| 674 | best_intra_rd = this_rd; |
| 675 | *returnintra = best_intra_rd ; |
| 676 | } |
| 677 | |
| 678 | break; |
| 679 | |
| 680 | case NEWMV: |
| 681 | { |
| 682 | int thissme; |
| 683 | int step_param; |
| 684 | int further_steps; |
| 685 | int n = 0; |
| 686 | int sadpb = x->sadperbit16; |
| 687 | |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 688 | int col_min = (best_ref_mv.col - MAX_FULL_PEL_VAL) >>3; |
| 689 | int col_max = (best_ref_mv.col + MAX_FULL_PEL_VAL) >>3; |
| 690 | int row_min = (best_ref_mv.row - MAX_FULL_PEL_VAL) >>3; |
| 691 | int row_max = (best_ref_mv.row + MAX_FULL_PEL_VAL) >>3; |
| 692 | |
| 693 | int tmp_col_min = x->mv_col_min; |
| 694 | int tmp_col_max = x->mv_col_max; |
| 695 | int tmp_row_min = x->mv_row_min; |
| 696 | int tmp_row_max = x->mv_row_max; |
| 697 | |
| 698 | // Get intersection of UMV window and valid MV window to reduce # of checks in diamond search. |
| 699 | if (x->mv_col_min < col_min ) |
| 700 | x->mv_col_min = col_min; |
| 701 | if (x->mv_col_max > col_max ) |
| 702 | x->mv_col_max = col_max; |
| 703 | if (x->mv_row_min < row_min ) |
| 704 | x->mv_row_min = row_min; |
| 705 | if (x->mv_row_max > row_max ) |
| 706 | x->mv_row_max = row_max; |
| 707 | |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 708 | // Further step/diamond searches as necessary |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 709 | { |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 710 | int speed_adjust = (cpi->Speed > 5) ? ((cpi->Speed >= 8)? 3 : 2) : 1; |
| 711 | step_param = cpi->sf.first_step + speed_adjust; |
| 712 | sr += speed_adjust; |
| 713 | //adjust search range according to sr from mv prediction |
| 714 | if(sr > step_param) |
| 715 | step_param = sr; |
| 716 | further_steps = (cpi->Speed >= 8)? 0: (cpi->sf.max_step_search_steps - 1 - step_param); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 717 | } |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 718 | |
| 719 | if (cpi->sf.search_method == HEX) |
| 720 | { |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 721 | bestsme = vp8_hex_search(x, b, d, &best_ref_mv, &d->bmi.mv.as_mv, step_param, sadpb/*x->errorperbit*/, &num00, &cpi->fn_ptr[BLOCK_16X16], x->mvsadcost, x->mvcost); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 722 | mode_mv[NEWMV].row = d->bmi.mv.as_mv.row; |
| 723 | mode_mv[NEWMV].col = d->bmi.mv.as_mv.col; |
| 724 | } |
| 725 | else |
| 726 | { |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 727 | bestsme = cpi->diamond_search_sad(x, b, d, &mvp, &d->bmi.mv.as_mv, step_param, sadpb / 2/*x->errorperbit*/, &num00, &cpi->fn_ptr[BLOCK_16X16], x->mvsadcost, x->mvcost, &best_ref_mv); //sadpb < 9 |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 728 | mode_mv[NEWMV].row = d->bmi.mv.as_mv.row; |
| 729 | mode_mv[NEWMV].col = d->bmi.mv.as_mv.col; |
| 730 | |
| 731 | // Further step/diamond searches as necessary |
| 732 | n = 0; |
| 733 | //further_steps = (cpi->sf.max_step_search_steps - 1) - step_param; |
| 734 | |
| 735 | n = num00; |
| 736 | num00 = 0; |
| 737 | |
| 738 | while (n < further_steps) |
| 739 | { |
| 740 | n++; |
| 741 | |
| 742 | if (num00) |
| 743 | num00--; |
| 744 | else |
| 745 | { |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 746 | thissme = cpi->diamond_search_sad(x, b, d, &mvp, &d->bmi.mv.as_mv, step_param + n, sadpb / 4/*x->errorperbit*/, &num00, &cpi->fn_ptr[BLOCK_16X16], x->mvsadcost, x->mvcost, &best_ref_mv); //sadpb = 9 |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 747 | |
| 748 | if (thissme < bestsme) |
| 749 | { |
| 750 | bestsme = thissme; |
| 751 | mode_mv[NEWMV].row = d->bmi.mv.as_mv.row; |
| 752 | mode_mv[NEWMV].col = d->bmi.mv.as_mv.col; |
| 753 | } |
| 754 | else |
| 755 | { |
| 756 | d->bmi.mv.as_mv.row = mode_mv[NEWMV].row; |
| 757 | d->bmi.mv.as_mv.col = mode_mv[NEWMV].col; |
| 758 | } |
| 759 | } |
| 760 | } |
| 761 | } |
| 762 | |
Yunqing Wang | 7cbe684 | 2011-01-28 10:00:20 -0500 | [diff] [blame] | 763 | x->mv_col_min = tmp_col_min; |
| 764 | x->mv_col_max = tmp_col_max; |
| 765 | x->mv_row_min = tmp_row_min; |
| 766 | x->mv_row_max = tmp_row_max; |
| 767 | |
| 768 | if (bestsme < INT_MAX) |
| 769 | cpi->find_fractional_mv_step(x, b, d, &d->bmi.mv.as_mv, &best_ref_mv, x->errorperbit, &cpi->fn_ptr[BLOCK_16X16], cpi->mb.mvcost); |
| 770 | |
| 771 | mode_mv[NEWMV].row = d->bmi.mv.as_mv.row; |
| 772 | mode_mv[NEWMV].col = d->bmi.mv.as_mv.col; |
| 773 | |
| 774 | // mv cost; |
| 775 | rate2 += vp8_mv_bit_cost(&mode_mv[NEWMV], &best_ref_mv, cpi->mb.mvcost, 128); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 776 | } |
| 777 | |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 778 | case NEARESTMV: |
| 779 | case NEARMV: |
| 780 | |
| 781 | if (mode_mv[this_mode].row == 0 && mode_mv[this_mode].col == 0) |
| 782 | continue; |
| 783 | |
| 784 | case ZEROMV: |
| 785 | |
| 786 | // Trap vectors that reach beyond the UMV borders |
| 787 | // Note that ALL New MV, Nearest MV Near MV and Zero MV code drops through to this point |
| 788 | // because of the lack of break statements in the previous two cases. |
| 789 | if (((mode_mv[this_mode].row >> 3) < x->mv_row_min) || ((mode_mv[this_mode].row >> 3) > x->mv_row_max) || |
| 790 | ((mode_mv[this_mode].col >> 3) < x->mv_col_min) || ((mode_mv[this_mode].col >> 3) > x->mv_col_max)) |
| 791 | continue; |
| 792 | |
| 793 | rate2 += vp8_cost_mv_ref(this_mode, mdcounts); |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 794 | x->e_mbd.mode_info_context->mbmi.mode = this_mode; |
| 795 | x->e_mbd.mode_info_context->mbmi.mv.as_mv = mode_mv[this_mode]; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 796 | x->e_mbd.block[0].bmi.mode = this_mode; |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 797 | x->e_mbd.block[0].bmi.mv.as_int = x->e_mbd.mode_info_context->mbmi.mv.as_int; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 798 | |
John Koleszar | 209d82a | 2010-10-26 15:34:16 -0400 | [diff] [blame] | 799 | distortion2 = get_inter_mbpred_error(x, &cpi->fn_ptr[BLOCK_16X16], (unsigned int *)(&sse)); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 800 | |
| 801 | this_rd = RD_ESTIMATE(x->rdmult, x->rddiv, rate2, distortion2); |
| 802 | |
| 803 | if (cpi->active_map_enabled && x->active_ptr[0] == 0) |
| 804 | { |
| 805 | x->skip = 1; |
| 806 | } |
| 807 | else if (sse < x->encode_breakout) |
| 808 | { |
| 809 | // Check u and v to make sure skip is ok |
| 810 | int sse2 = 0; |
| 811 | |
| 812 | sse2 = VP8_UVSSE(x, IF_RTCD(&cpi->rtcd.variance)); |
| 813 | |
| 814 | if (sse2 * 2 < x->encode_breakout) |
| 815 | x->skip = 1; |
| 816 | else |
| 817 | x->skip = 0; |
| 818 | } |
| 819 | |
| 820 | break; |
| 821 | default: |
| 822 | break; |
| 823 | } |
| 824 | |
| 825 | // Experimental debug code. |
| 826 | //all_rds[mode_index] = this_rd; |
| 827 | |
| 828 | if (this_rd < best_rd || x->skip) |
| 829 | { |
| 830 | // Note index of best mode |
| 831 | best_mode_index = mode_index; |
| 832 | |
| 833 | *returnrate = rate2; |
| 834 | *returndistortion = distortion2; |
| 835 | best_rd = this_rd; |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 836 | vpx_memcpy(&best_mbmode, &x->e_mbd.mode_info_context->mbmi, sizeof(MB_MODE_INFO)); |
Scott LaVarnway | 0de458f | 2010-09-02 16:17:52 -0400 | [diff] [blame] | 837 | vpx_memcpy(&best_partition, x->partition_info, sizeof(PARTITION_INFO)); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 838 | |
| 839 | if (this_mode == B_PRED || this_mode == SPLITMV) |
| 840 | for (i = 0; i < 16; i++) |
| 841 | { |
| 842 | vpx_memcpy(&best_bmodes[i], &x->e_mbd.block[i].bmi, sizeof(B_MODE_INFO)); |
| 843 | } |
| 844 | else |
| 845 | { |
| 846 | best_bmodes[0].mv = x->e_mbd.block[0].bmi.mv; |
| 847 | } |
| 848 | |
| 849 | // Testing this mode gave rise to an improvement in best error score. Lower threshold a bit for next time |
| 850 | cpi->rd_thresh_mult[mode_index] = (cpi->rd_thresh_mult[mode_index] >= (MIN_THRESHMULT + 2)) ? cpi->rd_thresh_mult[mode_index] - 2 : MIN_THRESHMULT; |
| 851 | cpi->rd_threshes[mode_index] = (cpi->rd_baseline_thresh[mode_index] >> 7) * cpi->rd_thresh_mult[mode_index]; |
| 852 | } |
| 853 | |
| 854 | // If the mode did not help improve the best error case then raise the threshold for testing that mode next time around. |
| 855 | else |
| 856 | { |
| 857 | cpi->rd_thresh_mult[mode_index] += 4; |
| 858 | |
| 859 | if (cpi->rd_thresh_mult[mode_index] > MAX_THRESHMULT) |
| 860 | cpi->rd_thresh_mult[mode_index] = MAX_THRESHMULT; |
| 861 | |
| 862 | cpi->rd_threshes[mode_index] = (cpi->rd_baseline_thresh[mode_index] >> 7) * cpi->rd_thresh_mult[mode_index]; |
| 863 | } |
| 864 | |
| 865 | if (x->skip) |
| 866 | break; |
| 867 | } |
| 868 | |
| 869 | // Reduce the activation RD thresholds for the best choice mode |
| 870 | if ((cpi->rd_baseline_thresh[best_mode_index] > 0) && (cpi->rd_baseline_thresh[best_mode_index] < (INT_MAX >> 2))) |
| 871 | { |
| 872 | int best_adjustment = (cpi->rd_thresh_mult[best_mode_index] >> 3); |
| 873 | |
| 874 | cpi->rd_thresh_mult[best_mode_index] = (cpi->rd_thresh_mult[best_mode_index] >= (MIN_THRESHMULT + best_adjustment)) ? cpi->rd_thresh_mult[best_mode_index] - best_adjustment : MIN_THRESHMULT; |
| 875 | cpi->rd_threshes[best_mode_index] = (cpi->rd_baseline_thresh[best_mode_index] >> 7) * cpi->rd_thresh_mult[best_mode_index]; |
| 876 | } |
| 877 | |
| 878 | // Keep a record of best mode index for use in next loop |
| 879 | cpi->last_best_mode_index = best_mode_index; |
| 880 | |
| 881 | if (best_mbmode.mode <= B_PRED) |
| 882 | { |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 883 | x->e_mbd.mode_info_context->mbmi.ref_frame = INTRA_FRAME; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 884 | vp8_pick_intra_mbuv_mode(x); |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 885 | best_mbmode.uv_mode = x->e_mbd.mode_info_context->mbmi.uv_mode; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 886 | } |
| 887 | |
| 888 | |
| 889 | { |
| 890 | int this_rdbin = (*returndistortion >> 7); |
| 891 | |
| 892 | if (this_rdbin >= 1024) |
| 893 | { |
| 894 | this_rdbin = 1023; |
| 895 | } |
| 896 | |
| 897 | cpi->error_bins[this_rdbin] ++; |
| 898 | } |
| 899 | |
| 900 | |
| 901 | if (cpi->is_src_frame_alt_ref && (best_mbmode.mode != ZEROMV || best_mbmode.ref_frame != ALTREF_FRAME)) |
| 902 | { |
| 903 | best_mbmode.mode = ZEROMV; |
| 904 | best_mbmode.ref_frame = ALTREF_FRAME; |
| 905 | best_mbmode.mv.as_int = 0; |
| 906 | best_mbmode.uv_mode = 0; |
| 907 | best_mbmode.mb_skip_coeff = (cpi->common.mb_no_coeff_skip) ? 1 : 0; |
| 908 | best_mbmode.partitioning = 0; |
| 909 | best_mbmode.dc_diff = 0; |
| 910 | |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 911 | vpx_memcpy(&x->e_mbd.mode_info_context->mbmi, &best_mbmode, sizeof(MB_MODE_INFO)); |
Scott LaVarnway | 0de458f | 2010-09-02 16:17:52 -0400 | [diff] [blame] | 912 | vpx_memcpy(x->partition_info, &best_partition, sizeof(PARTITION_INFO)); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 913 | |
| 914 | for (i = 0; i < 16; i++) |
| 915 | { |
| 916 | vpx_memset(&x->e_mbd.block[i].bmi, 0, sizeof(B_MODE_INFO)); |
| 917 | } |
| 918 | |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 919 | x->e_mbd.mode_info_context->mbmi.mv.as_int = 0; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 920 | |
| 921 | return best_rd; |
| 922 | } |
| 923 | |
| 924 | |
| 925 | // macroblock modes |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 926 | vpx_memcpy(&x->e_mbd.mode_info_context->mbmi, &best_mbmode, sizeof(MB_MODE_INFO)); |
Scott LaVarnway | 0de458f | 2010-09-02 16:17:52 -0400 | [diff] [blame] | 927 | vpx_memcpy(x->partition_info, &best_partition, sizeof(PARTITION_INFO)); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 928 | |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 929 | if (x->e_mbd.mode_info_context->mbmi.mode == B_PRED || x->e_mbd.mode_info_context->mbmi.mode == SPLITMV) |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 930 | for (i = 0; i < 16; i++) |
| 931 | { |
| 932 | vpx_memcpy(&x->e_mbd.block[i].bmi, &best_bmodes[i], sizeof(B_MODE_INFO)); |
| 933 | |
| 934 | } |
| 935 | else |
| 936 | { |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 937 | vp8_set_mbmode_and_mvs(x, x->e_mbd.mode_info_context->mbmi.mode, &best_bmodes[0].mv.as_mv); |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 938 | } |
| 939 | |
Scott LaVarnway | 9c7a009 | 2010-08-12 16:25:43 -0400 | [diff] [blame] | 940 | x->e_mbd.mode_info_context->mbmi.mv.as_mv = x->e_mbd.block[15].bmi.mv.as_mv; |
John Koleszar | 0ea50ce | 2010-05-18 11:58:33 -0400 | [diff] [blame] | 941 | |
| 942 | return best_rd; |
| 943 | } |