64-bit prep

Preparation for 64-bit roll.
  o Eliminated storing pointers in 32-bit int slots in LIR.
  o General size reductions of common structures to reduce impact
    of doubled pointer sizes:
    - BasicBlock struct was 72 bytes, now is 48.
    - MIR struct was 72 bytes, now is 64.
    - RegLocation was 12 bytes, now is 8.
  o Generally replaced uses of BasicBlock* pointers with 16-bit Ids.
  o Replaced several doubly-linked lists with singly-linked to save
    one stored pointer per node.
  o We had quite a few uses of uintptr_t's that were a holdover from
    the JIT (which used pointers to mapped dex & actual code cache
    addresses rather than trace-relative offsets).  Replaced those with
    uint32_t's.
  o Clean up handling of embedded data for switch tables and array data.
  o Miscellaneous cleanup.

I anticipate one or two additional CLs to reduce the size of MIR and LIR
structs.

Change-Id: I58e426d3f8e5efe64c1146b2823453da99451230
diff --git a/compiler/dex/quick/gen_invoke.cc b/compiler/dex/quick/gen_invoke.cc
index 0a0cc17..3def7f5 100644
--- a/compiler/dex/quick/gen_invoke.cc
+++ b/compiler/dex/quick/gen_invoke.cc
@@ -908,7 +908,7 @@
     LoadWordDisp(rl_obj.low_reg, value_offset, reg_ptr);
     if (range_check) {
       // Set up a launch pad to allow retry in case of bounds violation */
-      launch_pad = RawLIR(0, kPseudoIntrinsicRetry, reinterpret_cast<uintptr_t>(info));
+      launch_pad = RawLIR(0, kPseudoIntrinsicRetry, WrapPointer(info));
       intrinsic_launchpads_.Insert(launch_pad);
       OpRegReg(kOpCmp, rl_idx.low_reg, reg_max);
       FreeTemp(reg_max);
@@ -919,7 +919,7 @@
       reg_max = AllocTemp();
       LoadWordDisp(rl_obj.low_reg, count_offset, reg_max);
       // Set up a launch pad to allow retry in case of bounds violation */
-      launch_pad = RawLIR(0, kPseudoIntrinsicRetry, reinterpret_cast<uintptr_t>(info));
+      launch_pad = RawLIR(0, kPseudoIntrinsicRetry, WrapPointer(info));
       intrinsic_launchpads_.Insert(launch_pad);
       OpRegReg(kOpCmp, rl_idx.low_reg, reg_max);
       FreeTemp(reg_max);
@@ -1085,7 +1085,7 @@
   }
   int r_tgt = (cu_->instruction_set != kX86) ? LoadHelper(QUICK_ENTRYPOINT_OFFSET(pIndexOf)) : 0;
   GenNullCheck(rl_obj.s_reg_low, reg_ptr, info->opt_flags);
-  LIR* launch_pad = RawLIR(0, kPseudoIntrinsicRetry, reinterpret_cast<uintptr_t>(info));
+  LIR* launch_pad = RawLIR(0, kPseudoIntrinsicRetry, WrapPointer(info));
   intrinsic_launchpads_.Insert(launch_pad);
   OpCmpImmBranch(kCondGt, reg_char, 0xFFFF, launch_pad);
   // NOTE: not a safepoint
@@ -1095,7 +1095,7 @@
     OpThreadMem(kOpBlx, QUICK_ENTRYPOINT_OFFSET(pIndexOf));
   }
   LIR* resume_tgt = NewLIR0(kPseudoTargetLabel);
-  launch_pad->operands[2] = reinterpret_cast<uintptr_t>(resume_tgt);
+  launch_pad->operands[2] = WrapPointer(resume_tgt);
   // Record that we've already inlined & null checked
   info->opt_flags |= (MIR_INLINED | MIR_IGNORE_NULL_CHECK);
   RegLocation rl_return = GetReturn(false);
@@ -1123,7 +1123,7 @@
       LoadHelper(QUICK_ENTRYPOINT_OFFSET(pStringCompareTo)) : 0;
   GenNullCheck(rl_this.s_reg_low, reg_this, info->opt_flags);
   // TUNING: check if rl_cmp.s_reg_low is already null checked
-  LIR* launch_pad = RawLIR(0, kPseudoIntrinsicRetry, reinterpret_cast<uintptr_t>(info));
+  LIR* launch_pad = RawLIR(0, kPseudoIntrinsicRetry, WrapPointer(info));
   intrinsic_launchpads_.Insert(launch_pad);
   OpCmpImmBranch(kCondEq, reg_cmp, 0, launch_pad);
   // NOTE: not a safepoint