Revert "Revert "ART: Register allocation and runtime support for try/catch""

The original CL triggered b/24084144 which has been fixed
by Ib72e12a018437c404e82f7ad414554c66a4c6f8c.

This reverts commit 659562aaf133c41b8d90ec9216c07646f0f14362.

Change-Id: Id8980436172457d0fcb276349c4405f7c4110a55
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index 5dbea52..65f41cc 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -225,26 +225,41 @@
 
 uintptr_t ArtMethod::ToNativeQuickPc(const uint32_t dex_pc, bool abort_on_failure) {
   const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
-  MappingTable table(entry_point != nullptr ?
-      GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
-  if (table.TotalSize() == 0) {
-    DCHECK_EQ(dex_pc, 0U);
-    return 0;   // Special no mapping/pc == 0 case
-  }
-  // Assume the caller wants a dex-to-pc mapping so check here first.
-  typedef MappingTable::DexToPcIterator It;
-  for (It cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
-    if (cur.DexPc() == dex_pc) {
-      return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
+  if (IsOptimized(sizeof(void*))) {
+    // Optimized code does not have a mapping table. Search for the dex-to-pc
+    // mapping in stack maps.
+    CodeInfo code_info = GetOptimizedCodeInfo();
+    StackMapEncoding encoding = code_info.ExtractEncoding();
+
+    // Assume the caller needs the mapping for a catch handler. If there are
+    // multiple stack maps for this dex_pc, it will hit the catch stack map first.
+    StackMap stack_map = code_info.GetCatchStackMapForDexPc(dex_pc, encoding);
+    if (stack_map.IsValid()) {
+      return reinterpret_cast<uintptr_t>(entry_point) + stack_map.GetNativePcOffset(encoding);
+    }
+  } else {
+    MappingTable table(entry_point != nullptr ?
+        GetMappingTable(EntryPointToCodePointer(entry_point), sizeof(void*)) : nullptr);
+    if (table.TotalSize() == 0) {
+      DCHECK_EQ(dex_pc, 0U);
+      return 0;   // Special no mapping/pc == 0 case
+    }
+    // Assume the caller wants a dex-to-pc mapping so check here first.
+    typedef MappingTable::DexToPcIterator It;
+    for (It cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
+      if (cur.DexPc() == dex_pc) {
+        return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
+      }
+    }
+    // Now check pc-to-dex mappings.
+    typedef MappingTable::PcToDexIterator It2;
+    for (It2 cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
+      if (cur.DexPc() == dex_pc) {
+        return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
+      }
     }
   }
-  // Now check pc-to-dex mappings.
-  typedef MappingTable::PcToDexIterator It2;
-  for (It2 cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
-    if (cur.DexPc() == dex_pc) {
-      return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
-    }
-  }
+
   if (abort_on_failure) {
     LOG(FATAL) << "Failed to find native offset for dex pc 0x" << std::hex << dex_pc
                << " in " << PrettyMethod(this);
diff --git a/runtime/quick_exception_handler.cc b/runtime/quick_exception_handler.cc
index 60defba..b9d76b4 100644
--- a/runtime/quick_exception_handler.cc
+++ b/runtime/quick_exception_handler.cc
@@ -146,6 +146,107 @@
     // Put exception back in root set with clear throw location.
     self_->SetException(exception_ref.Get());
   }
+  // If the handler is in optimized code, we need to set the catch environment.
+  if (*handler_quick_frame_ != nullptr &&
+      handler_method_ != nullptr &&
+      handler_method_->IsOptimized(sizeof(void*))) {
+    SetCatchEnvironmentForOptimizedHandler(&visitor);
+  }
+}
+
+static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
+  // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
+  // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
+  // distinguish between core/FPU registers and low/high bits on 64-bit.
+  switch (kind) {
+    case DexRegisterLocation::Kind::kConstant:
+    case DexRegisterLocation::Kind::kInStack:
+      // VRegKind is ignored.
+      return VRegKind::kUndefined;
+
+    case DexRegisterLocation::Kind::kInRegister:
+      // Selects core register. For 64-bit registers, selects low 32 bits.
+      return VRegKind::kLongLoVReg;
+
+    case DexRegisterLocation::Kind::kInRegisterHigh:
+      // Selects core register. For 64-bit registers, selects high 32 bits.
+      return VRegKind::kLongHiVReg;
+
+    case DexRegisterLocation::Kind::kInFpuRegister:
+      // Selects FPU register. For 64-bit registers, selects low 32 bits.
+      return VRegKind::kDoubleLoVReg;
+
+    case DexRegisterLocation::Kind::kInFpuRegisterHigh:
+      // Selects FPU register. For 64-bit registers, selects high 32 bits.
+      return VRegKind::kDoubleHiVReg;
+
+    default:
+      LOG(FATAL) << "Unexpected vreg location "
+                 << DexRegisterLocation::PrettyDescriptor(kind);
+      UNREACHABLE();
+  }
+}
+
+void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
+  DCHECK(!is_deoptimization_);
+  DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
+  DCHECK(handler_method_ != nullptr && handler_method_->IsOptimized(sizeof(void*)));
+
+  if (kDebugExceptionDelivery) {
+    self_->DumpStack(LOG(INFO) << "Setting catch phis: ");
+  }
+
+  const size_t number_of_vregs = handler_method_->GetCodeItem()->registers_size_;
+  CodeInfo code_info = handler_method_->GetOptimizedCodeInfo();
+  StackMapEncoding encoding = code_info.ExtractEncoding();
+
+  // Find stack map of the throwing instruction.
+  StackMap throw_stack_map =
+      code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset(), encoding);
+  DCHECK(throw_stack_map.IsValid());
+  DexRegisterMap throw_vreg_map =
+      code_info.GetDexRegisterMapOf(throw_stack_map, encoding, number_of_vregs);
+
+  // Find stack map of the catch block.
+  StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc(), encoding);
+  DCHECK(catch_stack_map.IsValid());
+  DexRegisterMap catch_vreg_map =
+      code_info.GetDexRegisterMapOf(catch_stack_map, encoding, number_of_vregs);
+
+  // Copy values between them.
+  for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
+    DexRegisterLocation::Kind catch_location =
+        catch_vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
+    if (catch_location == DexRegisterLocation::Kind::kNone) {
+      continue;
+    }
+    DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
+
+    // Get vreg value from its current location.
+    uint32_t vreg_value;
+    VRegKind vreg_kind = ToVRegKind(throw_vreg_map.GetLocationKind(vreg,
+                                                                   number_of_vregs,
+                                                                   code_info,
+                                                                   encoding));
+    bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
+                                                   vreg,
+                                                   vreg_kind,
+                                                   &vreg_value);
+    CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
+                            << "method=" << PrettyMethod(stack_visitor->GetMethod()) << ", "
+                            << "dex_pc=" << stack_visitor->GetDexPc() << ", "
+                            << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
+
+    // Copy value to the catch phi's stack slot.
+    int32_t slot_offset = catch_vreg_map.GetStackOffsetInBytes(vreg,
+                                                               number_of_vregs,
+                                                               code_info,
+                                                               encoding);
+    ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
+    uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
+    uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
+    *slot_ptr = vreg_value;
+  }
 }
 
 // Prepares deoptimization.
diff --git a/runtime/quick_exception_handler.h b/runtime/quick_exception_handler.h
index 4db95a8..2e05c7e 100644
--- a/runtime/quick_exception_handler.h
+++ b/runtime/quick_exception_handler.h
@@ -49,11 +49,14 @@
   // Deoptimize the stack to the upcall. For every compiled frame, we create a "copy"
   // shadow frame that will be executed with the interpreter.
   void DeoptimizeStack() SHARED_REQUIRES(Locks::mutator_lock_);
-
   // Update the instrumentation stack by removing all methods that will be unwound
   // by the exception being thrown.
   void UpdateInstrumentationStack() SHARED_REQUIRES(Locks::mutator_lock_);
 
+  // Set up environment before delivering an exception to optimized code.
+  void SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor)
+      SHARED_REQUIRES(Locks::mutator_lock_);
+
   // Long jump either to a catch handler or to the upcall.
   NO_RETURN void DoLongJump() SHARED_REQUIRES(Locks::mutator_lock_);
 
diff --git a/runtime/stack.cc b/runtime/stack.cc
index a765a3f..d956f0e 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -325,6 +325,10 @@
 
 bool StackVisitor::GetRegisterIfAccessible(uint32_t reg, VRegKind kind, uint32_t* val) const {
   const bool is_float = (kind == kFloatVReg) || (kind == kDoubleLoVReg) || (kind == kDoubleHiVReg);
+
+  // X86 float registers are 64-bit and the logic below does not apply.
+  DCHECK(!is_float || kRuntimeISA != InstructionSet::kX86);
+
   if (!IsAccessibleRegister(reg, is_float)) {
     return false;
   }
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 07b79b5..a15a081 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -1115,7 +1115,7 @@
     region_.StoreUnaligned<NumberOfStackMapsType>(kNumberOfStackMapsOffset, number_of_stack_maps);
   }
 
-  // Get the size all the stack maps of this CodeInfo object, in bytes.
+  // Get the size of all the stack maps of this CodeInfo object, in bytes.
   size_t GetStackMapsSize(const StackMapEncoding& encoding) const {
     return encoding.ComputeStackMapSize() * GetNumberOfStackMaps();
   }
@@ -1174,9 +1174,23 @@
     return StackMap();
   }
 
+  // Searches the stack map list backwards because catch stack maps are stored
+  // at the end.
+  StackMap GetCatchStackMapForDexPc(uint32_t dex_pc, const StackMapEncoding& encoding) const {
+    for (size_t i = GetNumberOfStackMaps(); i > 0; --i) {
+      StackMap stack_map = GetStackMapAt(i - 1, encoding);
+      if (stack_map.GetDexPc(encoding) == dex_pc) {
+        return stack_map;
+      }
+    }
+    return StackMap();
+  }
+
   StackMap GetStackMapForNativePcOffset(uint32_t native_pc_offset,
                                         const StackMapEncoding& encoding) const {
-    // TODO: stack maps are sorted by native pc, we can do a binary search.
+    // TODO: Safepoint stack maps are sorted by native_pc_offset but catch stack
+    //       maps are not. If we knew that the method does not have try/catch,
+    //       we could do binary search.
     for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
       StackMap stack_map = GetStackMapAt(i, encoding);
       if (stack_map.GetNativePcOffset(encoding) == native_pc_offset) {