Remove LoadBaseDispWide and StoreBaseDispWide.

Just pass k64 or kDouble to non-wide versions.

Change-Id: I000619c3b78d3a71db42edc747c8a0ba1ee229be
diff --git a/compiler/dex/quick/gen_common.cc b/compiler/dex/quick/gen_common.cc
index 2cd17cc..395cff7 100644
--- a/compiler/dex/quick/gen_common.cc
+++ b/compiler/dex/quick/gen_common.cc
@@ -564,13 +564,8 @@
       // There might have been a store before this volatile one so insert StoreStore barrier.
       GenMemBarrier(kStoreStore);
     }
-    if (is_long_or_double) {
-      StoreBaseDispWide(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg);
-    } else if (rl_src.ref) {
-      StoreRefDisp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg);
-    } else {
-      Store32Disp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg);
-    }
+    OpSize size = LoadStoreOpSize(is_long_or_double, rl_src.ref);
+    StoreBaseDisp(r_base, field_info.FieldOffset().Int32Value(), rl_src.reg, size);
     if (field_info.IsVolatile()) {
       // A load might follow the volatile store so insert a StoreLoad barrier.
       GenMemBarrier(kStoreLoad);
@@ -646,13 +641,8 @@
     }
     RegLocation rl_result = EvalLoc(rl_dest, result_reg_kind, true);
 
-    if (is_long_or_double) {
-      LoadBaseDispWide(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg, INVALID_SREG);
-    } else if (rl_result.ref) {
-      LoadRefDisp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg);
-    } else {
-      Load32Disp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg);
-    }
+    OpSize size = LoadStoreOpSize(is_long_or_double, rl_result.ref);
+    LoadBaseDisp(r_base, field_info.FieldOffset().Int32Value(), rl_result.reg, size, INVALID_SREG);
     FreeTemp(r_base);
 
     if (field_info.IsVolatile()) {
@@ -714,8 +704,8 @@
           result_reg_kind = kFPReg;
         }
         rl_result = EvalLoc(rl_dest, result_reg_kind, true);
-        LoadBaseDispWide(rl_obj.reg, field_info.FieldOffset().Int32Value(), rl_result.reg,
-                         rl_obj.s_reg_low);
+        LoadBaseDisp(rl_obj.reg, field_info.FieldOffset().Int32Value(), rl_result.reg,
+                     size, rl_obj.s_reg_low);
         MarkPossibleNullPointerException(opt_flags);
         if (field_info.IsVolatile()) {
           // Without context sensitive analysis, we must issue the most conservative barriers.
@@ -727,7 +717,7 @@
         RegStorage reg_ptr = AllocTemp();
         OpRegRegImm(kOpAdd, reg_ptr, rl_obj.reg, field_info.FieldOffset().Int32Value());
         rl_result = EvalLoc(rl_dest, reg_class, true);
-        LoadBaseDispWide(reg_ptr, 0, rl_result.reg, INVALID_SREG);
+        LoadBaseDisp(reg_ptr, 0, rl_result.reg, size, INVALID_SREG);
         MarkPossibleNullPointerException(opt_flags);
         if (field_info.IsVolatile()) {
           // Without context sensitive analysis, we must issue the most conservative barriers.
@@ -791,7 +781,7 @@
         // There might have been a store before this volatile one so insert StoreStore barrier.
         GenMemBarrier(kStoreStore);
       }
-      StoreBaseDispWide(reg_ptr, 0, rl_src.reg);
+      StoreBaseDisp(reg_ptr, 0, rl_src.reg, size);
       MarkPossibleNullPointerException(opt_flags);
       if (field_info.IsVolatile()) {
         // A load might follow the volatile store so insert a StoreLoad barrier.