ART: Enable basic optimizations for try/catch

Generating code for try/catch methods requires having run at least the
instruction simplifier to remove redundant suspend checks. This patch
enables the first group of optimizations when try/catch is present.

Enabled optimizations:
1) IntrinsicsRecognizer
Does not modify the graph, only sets HInvoke::intrinsic_.

2) ConstantFolding
Does not deal with throwing instructions.

3) InstructionSimplifier
May remove a throwing instruction (e.g. LoadClass in VisitCheckCast),
or may turn a throwing instruction into a non-throwing one (ArraySet).
Their corresponding catch phi inputs are not removed but correctness
is preserved.

4) ReferenceTypePropagation
Does not modify the graph, only sets type properties. Typing of
LoadException from catch handler information was added.

5) DeadCodeElimination
Removing individual instructions is fine (same as 3). Removal of dead
blocks was disabled for try/catch.

Change-Id: I2722c3229eb8aaf326391e07f522dbf5186774b8
diff --git a/compiler/optimizing/dead_code_elimination.cc b/compiler/optimizing/dead_code_elimination.cc
index 5de629d..78470db 100644
--- a/compiler/optimizing/dead_code_elimination.cc
+++ b/compiler/optimizing/dead_code_elimination.cc
@@ -142,7 +142,10 @@
 }
 
 void HDeadCodeElimination::Run() {
-  RemoveDeadBlocks();
+  if (!graph_->HasTryCatch()) {
+    // TODO: Update dead block elimination and enable for try/catch.
+    RemoveDeadBlocks();
+  }
   SsaRedundantPhiElimination(graph_).Run();
   RemoveDeadInstructions();
 }
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index 64c680c..4332d7e 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -346,6 +346,16 @@
   }
 }
 
+bool HGraph::HasTryCatch() const {
+  for (size_t i = 0, e = blocks_.Size(); i < e; ++i) {
+    HBasicBlock* block = blocks_.Get(i);
+    if (block != nullptr && (block->IsTryBlock() || block->IsCatchBlock())) {
+      return true;
+    }
+  }
+  return false;
+}
+
 void HGraph::SimplifyCFG() {
   // Simplify the CFG for future analysis, and code generation:
   // (1): Split critical edges.
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index c4f64b4..c8b22a8 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -350,6 +350,9 @@
     return instruction_set_;
   }
 
+  // TODO: Remove once the full compilation pipeline is enabled for try/catch.
+  bool HasTryCatch() const;
+
  private:
   void VisitBlockForDominatorTree(HBasicBlock* block,
                                   HBasicBlock* predecessor,
@@ -4471,6 +4474,8 @@
  public:
   HLoadException() : HExpression(Primitive::kPrimNot, SideEffects::None()) {}
 
+  bool CanBeNull() const OVERRIDE { return false; }
+
   DECLARE_INSTRUCTION(LoadException);
 
  private:
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 866e717..bd40c9f 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -359,11 +359,6 @@
       || instruction_set == kX86_64;
 }
 
-static bool CanOptimize(const DexFile::CodeItem& code_item) {
-  // TODO: We currently cannot optimize methods with try/catch.
-  return code_item.tries_size_ == 0;
-}
-
 static void RunOptimizations(HOptimization* optimizations[],
                              size_t length,
                              PassObserver* pass_observer) {
@@ -470,6 +465,13 @@
 
   RunOptimizations(optimizations1, arraysize(optimizations1), pass_observer);
 
+  if (graph->HasTryCatch()) {
+    // TODO: Update the optimizations below to work correctly under try/catch
+    //       semantics. The optimizations above suffice for running codegen
+    //       in the meanwhile.
+    return;
+  }
+
   MaybeRunInliner(graph, driver, stats, dex_compilation_unit, pass_observer, handles);
 
   HOptimization* optimizations2[] = {
@@ -529,6 +531,10 @@
   RunOptimizations(graph, compiler_driver, compilation_stats_.get(),
                    dex_compilation_unit, pass_observer, &handles);
 
+  if (graph->HasTryCatch()) {
+    return nullptr;
+  }
+
   AllocateRegisters(graph, codegen, pass_observer);
 
   CodeVectorAllocator allocator;
@@ -717,7 +723,6 @@
     }
   }
 
-  bool can_optimize = CanOptimize(*code_item);
   bool can_allocate_registers = RegisterAllocator::CanAllocateRegistersFor(*graph, instruction_set);
 
   // `run_optimizations_` is set explicitly (either through a compiler filter
@@ -738,16 +743,12 @@
       }
     }
 
-    if (can_optimize) {
-      return CompileOptimized(graph,
-                              codegen.get(),
-                              compiler_driver,
-                              dex_compilation_unit,
-                              &pass_observer);
-    }
-  }
-
-  if (shouldOptimize && can_allocate_registers) {
+    return CompileOptimized(graph,
+                            codegen.get(),
+                            compiler_driver,
+                            dex_compilation_unit,
+                            &pass_observer);
+  } else if (shouldOptimize && can_allocate_registers) {
     LOG(FATAL) << "Could not allocate registers in optimizing compiler";
     UNREACHABLE();
   } else if (can_use_baseline) {
@@ -755,8 +756,6 @@
 
     if (!run_optimizations_) {
       MaybeRecordStat(MethodCompilationStat::kNotOptimizedDisabled);
-    } else if (!can_optimize) {
-      MaybeRecordStat(MethodCompilationStat::kNotOptimizedTryCatch);
     } else if (!can_allocate_registers) {
       MaybeRecordStat(MethodCompilationStat::kNotOptimizedRegisterAllocator);
     }
diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc
index 1349df9..5d02948 100644
--- a/compiler/optimizing/reference_type_propagation.cc
+++ b/compiler/optimizing/reference_type_propagation.cc
@@ -30,12 +30,14 @@
              GrowableArray<HInstruction*>* worklist,
              ReferenceTypeInfo::TypeHandle object_class_handle,
              ReferenceTypeInfo::TypeHandle class_class_handle,
-             ReferenceTypeInfo::TypeHandle string_class_handle)
+             ReferenceTypeInfo::TypeHandle string_class_handle,
+             ReferenceTypeInfo::TypeHandle throwable_class_handle)
     : HGraphDelegateVisitor(graph),
       handles_(handles),
       object_class_handle_(object_class_handle),
       class_class_handle_(class_class_handle),
       string_class_handle_(string_class_handle),
+      throwable_class_handle_(throwable_class_handle),
       worklist_(worklist) {}
 
   void VisitNullConstant(HNullConstant* null_constant) OVERRIDE;
@@ -43,6 +45,7 @@
   void VisitLoadClass(HLoadClass* load_class) OVERRIDE;
   void VisitClinitCheck(HClinitCheck* clinit_check) OVERRIDE;
   void VisitLoadString(HLoadString* instr) OVERRIDE;
+  void VisitLoadException(HLoadException* instr) OVERRIDE;
   void VisitNewArray(HNewArray* instr) OVERRIDE;
   void VisitParameterValue(HParameterValue* instr) OVERRIDE;
   void UpdateFieldAccessTypeInfo(HInstruction* instr, const FieldInfo& info);
@@ -64,6 +67,7 @@
   ReferenceTypeInfo::TypeHandle object_class_handle_;
   ReferenceTypeInfo::TypeHandle class_class_handle_;
   ReferenceTypeInfo::TypeHandle string_class_handle_;
+  ReferenceTypeInfo::TypeHandle throwable_class_handle_;
   GrowableArray<HInstruction*>* worklist_;
 
   static constexpr size_t kDefaultWorklistSize = 8;
@@ -79,12 +83,15 @@
   object_class_handle_ = handles_->NewHandle(linker->GetClassRoot(ClassLinker::kJavaLangObject));
   string_class_handle_ = handles_->NewHandle(linker->GetClassRoot(ClassLinker::kJavaLangString));
   class_class_handle_ = handles_->NewHandle(linker->GetClassRoot(ClassLinker::kJavaLangClass));
+  throwable_class_handle_ =
+      handles_->NewHandle(linker->GetClassRoot(ClassLinker::kJavaLangThrowable));
 
   if (kIsDebugBuild) {
     ScopedObjectAccess soa(Thread::Current());
     DCHECK(ReferenceTypeInfo::IsValidHandle(object_class_handle_));
     DCHECK(ReferenceTypeInfo::IsValidHandle(class_class_handle_));
     DCHECK(ReferenceTypeInfo::IsValidHandle(string_class_handle_));
+    DCHECK(ReferenceTypeInfo::IsValidHandle(throwable_class_handle_));
   }
 }
 
@@ -129,7 +136,8 @@
                      &worklist_,
                      object_class_handle_,
                      class_class_handle_,
-                     string_class_handle_);
+                     string_class_handle_,
+                     throwable_class_handle_);
   // Handle Phis first as there might be instructions in the same block who depend on them.
   for (HInstructionIterator it(block->GetPhis()); !it.Done(); it.Advance()) {
     VisitPhi(it.Current()->AsPhi());
@@ -459,6 +467,21 @@
   instr->SetReferenceTypeInfo(ReferenceTypeInfo::Create(string_class_handle_, /* is_exact */ true));
 }
 
+void RTPVisitor::VisitLoadException(HLoadException* instr) {
+  DCHECK(instr->GetBlock()->IsCatchBlock());
+  TryCatchInformation* catch_info = instr->GetBlock()->GetTryCatchInformation();
+
+  if (catch_info->IsCatchAllTypeIndex()) {
+    instr->SetReferenceTypeInfo(ReferenceTypeInfo::Create(throwable_class_handle_,
+                                /* is_exact */ false));
+  } else {
+    UpdateReferenceTypeInfo(instr,
+                            catch_info->GetCatchTypeIndex(),
+                            catch_info->GetCatchDexFile(),
+                            /* is_exact */ false);
+  }
+}
+
 void RTPVisitor::VisitNullCheck(HNullCheck* instr) {
   ScopedObjectAccess soa(Thread::Current());
   ReferenceTypeInfo parent_rti = instr->InputAt(0)->GetReferenceTypeInfo();
diff --git a/compiler/optimizing/reference_type_propagation.h b/compiler/optimizing/reference_type_propagation.h
index 14d4a82..62f6ab8 100644
--- a/compiler/optimizing/reference_type_propagation.h
+++ b/compiler/optimizing/reference_type_propagation.h
@@ -62,6 +62,7 @@
   ReferenceTypeInfo::TypeHandle object_class_handle_;
   ReferenceTypeInfo::TypeHandle class_class_handle_;
   ReferenceTypeInfo::TypeHandle string_class_handle_;
+  ReferenceTypeInfo::TypeHandle throwable_class_handle_;
 
   static constexpr size_t kDefaultWorklistSize = 8;