Move most of art::Thread to ObjPtr

Bug: 31113334

Test: test-art-host

Change-Id: I73d0de3c875d3ec3d732034fdc961dae79ef4070
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index bf1d4ea..a3e5b55 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -705,7 +705,7 @@
       QuickExceptionHandler::DumpFramesWithType(self, true);
     }
 
-    mirror::Throwable* pending_exception = nullptr;
+    ObjPtr<mirror::Throwable> pending_exception;
     bool from_code = false;
     self->PopDeoptimizationContext(&result, &pending_exception, /* out */ &from_code);
 
diff --git a/runtime/quick_exception_handler.cc b/runtime/quick_exception_handler.cc
index a81458f..b809c3e 100644
--- a/runtime/quick_exception_handler.cc
+++ b/runtime/quick_exception_handler.cc
@@ -140,7 +140,7 @@
   DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
 };
 
-void QuickExceptionHandler::FindCatch(mirror::Throwable* exception) {
+void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception) {
   DCHECK(!is_deoptimization_);
   if (kDebugExceptionDelivery) {
     mirror::String* msg = exception->GetDetailMessage();
diff --git a/runtime/quick_exception_handler.h b/runtime/quick_exception_handler.h
index 5592126..3ead7db 100644
--- a/runtime/quick_exception_handler.h
+++ b/runtime/quick_exception_handler.h
@@ -46,7 +46,7 @@
   }
 
   // Find the catch handler for the given exception.
-  void FindCatch(mirror::Throwable* exception) REQUIRES_SHARED(Locks::mutator_lock_);
+  void FindCatch(ObjPtr<mirror::Throwable> exception) REQUIRES_SHARED(Locks::mutator_lock_);
 
   // Deoptimize the stack to the upcall/some code that's not deoptimizeable. For
   // every compiled frame, we create a "copy" shadow frame that will be executed
diff --git a/runtime/thread.cc b/runtime/thread.cc
index aff12ff..a4f0631 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -154,18 +154,18 @@
   DeoptimizationContextRecord(const JValue& ret_val,
                               bool is_reference,
                               bool from_code,
-                              mirror::Throwable* pending_exception,
+                              ObjPtr<mirror::Throwable> pending_exception,
                               DeoptimizationContextRecord* link)
       : ret_val_(ret_val),
         is_reference_(is_reference),
         from_code_(from_code),
-        pending_exception_(pending_exception),
+        pending_exception_(pending_exception.Ptr()),
         link_(link) {}
 
   JValue GetReturnValue() const { return ret_val_; }
   bool IsReference() const { return is_reference_; }
   bool GetFromCode() const { return from_code_; }
-  mirror::Throwable* GetPendingException() const { return pending_exception_; }
+  ObjPtr<mirror::Throwable> GetPendingException() const { return pending_exception_; }
   DeoptimizationContextRecord* GetLink() const { return link_; }
   mirror::Object** GetReturnValueAsGCRoot() {
     DCHECK(is_reference_);
@@ -219,7 +219,7 @@
 void Thread::PushDeoptimizationContext(const JValue& return_value,
                                        bool is_reference,
                                        bool from_code,
-                                       mirror::Throwable* exception) {
+                                       ObjPtr<mirror::Throwable> exception) {
   DeoptimizationContextRecord* record = new DeoptimizationContextRecord(
       return_value,
       is_reference,
@@ -230,7 +230,7 @@
 }
 
 void Thread::PopDeoptimizationContext(JValue* result,
-                                      mirror::Throwable** exception,
+                                      ObjPtr<mirror::Throwable>* exception,
                                       bool* from_code) {
   AssertHasDeoptimizationContext();
   DeoptimizationContextRecord* record = tlsPtr_.deoptimization_context_stack;
@@ -434,7 +434,7 @@
     Dbg::PostThreadStart(self);
 
     // Invoke the 'run' method of our java.lang.Thread.
-    mirror::Object* receiver = self->tlsPtr_.opeer;
+    ObjPtr<mirror::Object> receiver = self->tlsPtr_.opeer;
     jmethodID mid = WellKnownClasses::java_lang_Thread_run;
     ScopedLocalRef<jobject> ref(soa.Env(), soa.AddLocalReference<jobject>(receiver));
     InvokeVirtualOrInterfaceWithJValues(soa, ref.get(), mid, nullptr);
@@ -446,7 +446,7 @@
 }
 
 Thread* Thread::FromManagedThread(const ScopedObjectAccessAlreadyRunnable& soa,
-                                  mirror::Object* thread_peer) {
+                                  ObjPtr<mirror::Object> thread_peer) {
   ArtField* f = jni::DecodeArtField(WellKnownClasses::java_lang_Thread_nativePeer);
   Thread* result = reinterpret_cast<Thread*>(static_cast<uintptr_t>(f->GetLong(thread_peer)));
   // Sanity check that if we have a result it is either suspended or we hold the thread_list_lock_
@@ -1573,8 +1573,8 @@
     }
     m = m->GetInterfaceMethodIfProxy(kRuntimePointerSize);
     const int kMaxRepetition = 3;
-    mirror::Class* c = m->GetDeclaringClass();
-    mirror::DexCache* dex_cache = c->GetDexCache();
+    ObjPtr<mirror::Class> c = m->GetDeclaringClass();
+    ObjPtr<mirror::DexCache> dex_cache = c->GetDexCache();
     int line_number = -1;
     if (dex_cache != nullptr) {  // be tolerant of bad input
       const DexFile* dex_file = dex_cache->GetDexFile();
@@ -1860,17 +1860,15 @@
 void Thread::AssertNoPendingException() const {
   if (UNLIKELY(IsExceptionPending())) {
     ScopedObjectAccess soa(Thread::Current());
-    mirror::Throwable* exception = GetException();
-    LOG(FATAL) << "No pending exception expected: " << exception->Dump();
+    LOG(FATAL) << "No pending exception expected: " << GetException()->Dump();
   }
 }
 
 void Thread::AssertNoPendingExceptionForNewException(const char* msg) const {
   if (UNLIKELY(IsExceptionPending())) {
     ScopedObjectAccess soa(Thread::Current());
-    mirror::Throwable* exception = GetException();
     LOG(FATAL) << "Throwing new exception '" << msg << "' with unexpected pending exception: "
-        << exception->Dump();
+        << GetException()->Dump();
   }
 }
 
@@ -2213,7 +2211,7 @@
     // class of the ArtMethod pointers.
     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
     StackHandleScope<1> hs(self_);
-    mirror::Class* array_class = class_linker->GetClassRoot(ClassLinker::kObjectArrayClass);
+    ObjPtr<mirror::Class> array_class = class_linker->GetClassRoot(ClassLinker::kObjectArrayClass);
     // The first element is the methods and dex pc array, the other elements are declaring classes
     // for the methods to ensure classes in the stack trace don't get unloaded.
     Handle<mirror::ObjectArray<mirror::Object>> trace(
@@ -2225,7 +2223,8 @@
       self_->AssertPendingOOMException();
       return false;
     }
-    mirror::PointerArray* methods_and_pcs = class_linker->AllocPointerArray(self_, depth * 2);
+    ObjPtr<mirror::PointerArray> methods_and_pcs =
+        class_linker->AllocPointerArray(self_, depth * 2);
     const char* last_no_suspend_cause =
         self_->StartAssertNoThreadSuspension("Building internal stack trace");
     if (methods_and_pcs == nullptr) {
@@ -2255,7 +2254,7 @@
     if (m->IsRuntimeMethod()) {
       return true;  // Ignore runtime frames (in particular callee save).
     }
-    mirror::PointerArray* trace_methods_and_pcs = GetTraceMethodsAndPCs();
+    ObjPtr<mirror::PointerArray> trace_methods_and_pcs = GetTraceMethodsAndPCs();
     trace_methods_and_pcs->SetElementPtrSize<kTransactionActive>(count_, m, pointer_size_);
     trace_methods_and_pcs->SetElementPtrSize<kTransactionActive>(
         trace_methods_and_pcs->GetLength() / 2 + count_,
@@ -2268,8 +2267,8 @@
     return true;
   }
 
-  mirror::PointerArray* GetTraceMethodsAndPCs() const REQUIRES_SHARED(Locks::mutator_lock_) {
-    return down_cast<mirror::PointerArray*>(trace_->Get(0));
+  ObjPtr<mirror::PointerArray> GetTraceMethodsAndPCs() const REQUIRES_SHARED(Locks::mutator_lock_) {
+    return ObjPtr<mirror::PointerArray>::DownCast(MakeObjPtr(trace_->Get(0)));
   }
 
   mirror::ObjectArray<mirror::Object>* GetInternalStackTrace() const {
@@ -2311,7 +2310,7 @@
   build_trace_visitor.WalkStack();
   mirror::ObjectArray<mirror::Object>* trace = build_trace_visitor.GetInternalStackTrace();
   if (kIsDebugBuild) {
-    mirror::PointerArray* trace_methods = build_trace_visitor.GetTraceMethodsAndPCs();
+    ObjPtr<mirror::PointerArray> trace_methods = build_trace_visitor.GetTraceMethodsAndPCs();
     // Second half of trace_methods is dex PCs.
     for (uint32_t i = 0; i < static_cast<uint32_t>(trace_methods->GetLength() / 2); ++i) {
       auto* method = trace_methods->GetElementPtrSize<ArtMethod*>(
@@ -2326,7 +2325,7 @@
 template jobject Thread::CreateInternalStackTrace<true>(
     const ScopedObjectAccessAlreadyRunnable& soa) const;
 
-bool Thread::IsExceptionThrownByCurrentMethod(mirror::Throwable* exception) const {
+bool Thread::IsExceptionThrownByCurrentMethod(ObjPtr<mirror::Throwable> exception) const {
   CountStackDepthVisitor count_visitor(const_cast<Thread*>(this));
   count_visitor.WalkStack();
   return count_visitor.GetDepth() == exception->GetStackDepth();
@@ -2368,12 +2367,12 @@
   }
 
   for (int32_t i = 0; i < depth; ++i) {
-    mirror::ObjectArray<mirror::Object>* decoded_traces =
+    ObjPtr<mirror::ObjectArray<mirror::Object>> decoded_traces =
         soa.Decode<mirror::Object>(internal)->AsObjectArray<mirror::Object>();
     // Methods and dex PC trace is element 0.
     DCHECK(decoded_traces->Get(0)->IsIntArray() || decoded_traces->Get(0)->IsLongArray());
-    mirror::PointerArray* const method_trace =
-        down_cast<mirror::PointerArray*>(decoded_traces->Get(0));
+    ObjPtr<mirror::PointerArray> const method_trace =
+        ObjPtr<mirror::PointerArray>::DownCast(MakeObjPtr(decoded_traces->Get(0)));
     // Prepare parameters for StackTraceElement(String cls, String method, String file, int line)
     ArtMethod* method = method_trace->GetElementPtrSize<ArtMethod*>(i, kRuntimePointerSize);
     uint32_t dex_pc = method_trace->GetElementPtrSize<uint32_t>(
@@ -2415,8 +2414,11 @@
     if (method_name_object.Get() == nullptr) {
       return nullptr;
     }
-    mirror::StackTraceElement* obj = mirror::StackTraceElement::Alloc(
-        soa.Self(), class_name_object, method_name_object, source_name_object, line_number);
+    ObjPtr<mirror::StackTraceElement> obj =mirror::StackTraceElement::Alloc(soa.Self(),
+                                                                            class_name_object,
+                                                                            method_name_object,
+                                                                            source_name_object,
+                                                                            line_number);
     if (obj == nullptr) {
       return nullptr;
     }
@@ -2447,7 +2449,7 @@
   ThrowNewWrappedException(exception_class_descriptor, msg);
 }
 
-static mirror::ClassLoader* GetCurrentClassLoader(Thread* self)
+static ObjPtr<mirror::ClassLoader> GetCurrentClassLoader(Thread* self)
     REQUIRES_SHARED(Locks::mutator_lock_) {
   ArtMethod* method = self->GetCurrentMethod(nullptr);
   return method != nullptr
@@ -2794,7 +2796,7 @@
 
 void Thread::QuickDeliverException() {
   // Get exception from thread.
-  mirror::Throwable* exception = GetException();
+  ObjPtr<mirror::Throwable> exception = GetException();
   CHECK(exception != nullptr);
   if (exception == GetDeoptimizationException()) {
     artDeoptimize(this);
@@ -2807,8 +2809,8 @@
       IsExceptionThrownByCurrentMethod(exception)) {
     // Instrumentation may cause GC so keep the exception object safe.
     StackHandleScope<1> hs(this);
-    HandleWrapper<mirror::Throwable> h_exception(hs.NewHandleWrapper(&exception));
-    instrumentation->ExceptionCaughtEvent(this, exception);
+    HandleWrapperObjPtr<mirror::Throwable> h_exception(hs.NewHandleWrapper(&exception));
+    instrumentation->ExceptionCaughtEvent(this, exception.Ptr());
   }
   // Does instrumentation need to deoptimize the stack?
   // Note: we do this *after* reporting the exception to instrumentation in case it
@@ -2870,7 +2872,7 @@
     dex_pc_ = GetDexPc(abort_on_error_);
     return false;
   }
-  mirror::Object* this_object_;
+  ObjPtr<mirror::Object> this_object_;
   ArtMethod* method_;
   uint32_t dex_pc_;
   const bool abort_on_error_;
@@ -2885,11 +2887,8 @@
   return visitor.method_;
 }
 
-bool Thread::HoldsLock(mirror::Object* object) const {
-  if (object == nullptr) {
-    return false;
-  }
-  return object->GetLockOwnerThreadId() == GetThreadId();
+bool Thread::HoldsLock(ObjPtr<mirror::Object> object) const {
+  return object != nullptr && object->GetLockOwnerThreadId() == GetThreadId();
 }
 
 // RootVisitor parameters are: (const Object* obj, size_t vreg, const StackVisitor* visitor).
@@ -2945,7 +2944,7 @@
   void VisitDeclaringClass(ArtMethod* method)
       REQUIRES_SHARED(Locks::mutator_lock_)
       NO_THREAD_SAFETY_ANALYSIS {
-    mirror::Class* klass = method->GetDeclaringClassUnchecked<kWithoutReadBarrier>();
+    ObjPtr<mirror::Class> klass = method->GetDeclaringClassUnchecked<kWithoutReadBarrier>();
     // klass can be null for runtime methods.
     if (klass != nullptr) {
       if (kVerifyImageObjectsMarked) {
@@ -2954,10 +2953,10 @@
                                                                                 /*fail_ok*/true);
         if (space != nullptr && space->IsImageSpace()) {
           bool failed = false;
-          if (!space->GetLiveBitmap()->Test(klass)) {
+          if (!space->GetLiveBitmap()->Test(klass.Ptr())) {
             failed = true;
             LOG(FATAL_WITHOUT_ABORT) << "Unmarked object in image " << *space;
-          } else if (!heap->GetLiveBitmap()->Test(klass)) {
+          } else if (!heap->GetLiveBitmap()->Test(klass.Ptr())) {
             failed = true;
             LOG(FATAL_WITHOUT_ABORT) << "Unmarked object in image through live bitmap " << *space;
           }
@@ -2965,17 +2964,17 @@
             GetThread()->Dump(LOG_STREAM(FATAL_WITHOUT_ABORT));
             space->AsImageSpace()->DumpSections(LOG_STREAM(FATAL_WITHOUT_ABORT));
             LOG(FATAL_WITHOUT_ABORT) << "Method@" << method->GetDexMethodIndex() << ":" << method
-                                     << " klass@" << klass;
+                                     << " klass@" << klass.Ptr();
             // Pretty info last in case it crashes.
             LOG(FATAL) << "Method " << method->PrettyMethod() << " klass "
                        << klass->PrettyClass();
           }
         }
       }
-      mirror::Object* new_ref = klass;
+      mirror::Object* new_ref = klass.Ptr();
       visitor_(&new_ref, -1, this);
       if (new_ref != klass) {
-        method->CASDeclaringClass(klass, new_ref->AsClass());
+        method->CASDeclaringClass(klass.Ptr(), new_ref->AsClass());
       }
     }
   }
@@ -3367,7 +3366,7 @@
   ClearException();
   ShadowFrame* shadow_frame =
       PopStackedShadowFrame(StackedShadowFrameType::kDeoptimizationShadowFrame);
-  mirror::Throwable* pending_exception = nullptr;
+  ObjPtr<mirror::Throwable> pending_exception;
   bool from_code = false;
   PopDeoptimizationContext(result, &pending_exception, &from_code);
   SetTopOfStack(nullptr);
diff --git a/runtime/thread.h b/runtime/thread.h
index 411d85f..c7acfc4 100644
--- a/runtime/thread.h
+++ b/runtime/thread.h
@@ -177,7 +177,7 @@
   void CheckEmptyCheckpoint() REQUIRES_SHARED(Locks::mutator_lock_);
 
   static Thread* FromManagedThread(const ScopedObjectAccessAlreadyRunnable& ts,
-                                   mirror::Object* thread_peer)
+                                   ObjPtr<mirror::Object> thread_peer)
       REQUIRES(Locks::thread_list_lock_, !Locks::thread_suspend_count_lock_)
       REQUIRES_SHARED(Locks::mutator_lock_);
   static Thread* FromManagedThread(const ScopedObjectAccessAlreadyRunnable& ts, jobject thread)
@@ -312,7 +312,7 @@
 
   size_t NumberOfHeldMutexes() const;
 
-  bool HoldsLock(mirror::Object*) const REQUIRES_SHARED(Locks::mutator_lock_);
+  bool HoldsLock(ObjPtr<mirror::Object> object) const REQUIRES_SHARED(Locks::mutator_lock_);
 
   /*
    * Changes the priority of this thread to match that of the java.lang.Thread object.
@@ -413,7 +413,7 @@
 
   // Returns whether the given exception was thrown by the current Java method being executed
   // (Note that this includes native Java methods).
-  bool IsExceptionThrownByCurrentMethod(mirror::Throwable* exception) const
+  bool IsExceptionThrownByCurrentMethod(ObjPtr<mirror::Throwable> exception) const
       REQUIRES_SHARED(Locks::mutator_lock_);
 
   void SetTopOfStack(ArtMethod** top_method) {
@@ -925,9 +925,11 @@
   void PushDeoptimizationContext(const JValue& return_value,
                                  bool is_reference,
                                  bool from_code,
-                                 mirror::Throwable* exception)
+                                 ObjPtr<mirror::Throwable> exception)
       REQUIRES_SHARED(Locks::mutator_lock_);
-  void PopDeoptimizationContext(JValue* result, mirror::Throwable** exception, bool* from_code)
+  void PopDeoptimizationContext(JValue* result,
+                                ObjPtr<mirror::Throwable>* exception,
+                                bool* from_code)
       REQUIRES_SHARED(Locks::mutator_lock_);
   void AssertHasDeoptimizationContext()
       REQUIRES_SHARED(Locks::mutator_lock_);