Replace left shift with multiplications

This avoids the potential risk in left shift of negative numbers.

Change-Id: I7aecb499ee6ce7342b172adc4741de5c6c107a24
diff --git a/vp10/encoder/mcomp.c b/vp10/encoder/mcomp.c
index 9423ed2..054161d 100644
--- a/vp10/encoder/mcomp.c
+++ b/vp10/encoder/mcomp.c
@@ -99,8 +99,8 @@
 static int mvsad_err_cost(const MACROBLOCK *x, const MV *mv, const MV *ref,
                           int sad_per_bit) {
 #if CONFIG_REF_MV
-  const MV diff = { (mv->row - ref->row) << 3,
-                    (mv->col - ref->col) << 3 };
+  const MV diff = { (mv->row - ref->row) * 8,
+                    (mv->col - ref->col) * 8 };
   return ROUND_POWER_OF_TWO(
       (unsigned)mv_cost(&diff, x->nmvjointsadcost, x->mvsadcost) *
           sad_per_bit,
@@ -790,7 +790,7 @@
   if (use_upsampled_ref)
     besterr = upsampled_setup_center_error(xd, bestmv, ref_mv, error_per_bit,
                                            vfp, z, src_stride, y, y_stride,
-                                           second_pred, w, h, (offset << 3),
+                                           second_pred, w, h, (offset * 8),
                                            mvjcost, mvcost, sse1, distortion);
   else
     besterr = setup_center_error(xd, bestmv, ref_mv, error_per_bit, vfp,