ART: Use reinterpret_cast{32,64}<> when appropriate.
And fix UnstartedRuntime checking for null arguments in all
functions where we now use reinterpret_cast32<>.
This is a follow-up to
https://android-review.googlesource.com/783607 .
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: Pixel 2 XL boots.
Test: m test-art-target-gtest
Test: testrunner.py --target --optimizing
Bug: 117427174
Change-Id: I58f8ad59f70e3fbb1d06aef419cd26555706fa65
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 0092b97..b51fc30 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -45,6 +45,7 @@
#include "art_field-inl.h"
#include "art_method-inl.h"
#include "base/bit_utils.h"
+#include "base/casts.h"
#include "base/file_utils.h"
#include "base/memory_tool.h"
#include "base/mutex.h"
@@ -500,7 +501,7 @@
Thread* Thread::FromManagedThread(const ScopedObjectAccessAlreadyRunnable& soa,
ObjPtr<mirror::Object> thread_peer) {
ArtField* f = jni::DecodeArtField(WellKnownClasses::java_lang_Thread_nativePeer);
- Thread* result = reinterpret_cast<Thread*>(static_cast<uintptr_t>(f->GetLong(thread_peer)));
+ Thread* result = reinterpret_cast64<Thread*>(f->GetLong(thread_peer));
// Sanity check that if we have a result it is either suspended or we hold the thread_list_lock_
// to stop it from going away.
if (kIsDebugBuild) {
@@ -907,7 +908,7 @@
}
self->GetJniEnv()->SetLongField(thread_peer,
WellKnownClasses::java_lang_Thread_nativePeer,
- reinterpret_cast<jlong>(self));
+ reinterpret_cast64<jlong>(self));
return true;
};
return Attach(thread_name, as_daemon, set_peer_action);
@@ -949,8 +950,9 @@
Thread* self = this;
DCHECK_EQ(self, Thread::Current());
- env->SetLongField(peer.get(), WellKnownClasses::java_lang_Thread_nativePeer,
- reinterpret_cast<jlong>(self));
+ env->SetLongField(peer.get(),
+ WellKnownClasses::java_lang_Thread_nativePeer,
+ reinterpret_cast64<jlong>(self));
ScopedObjectAccess soa(self);
StackHandleScope<1> hs(self);
@@ -3567,8 +3569,8 @@
if (!m->IsNative() && !m->IsRuntimeMethod() && (!m->IsProxyMethod() || m->IsConstructor())) {
const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
DCHECK(method_header->IsOptimized());
- StackReference<mirror::Object>* vreg_base = reinterpret_cast<StackReference<mirror::Object>*>(
- reinterpret_cast<uintptr_t>(cur_quick_frame));
+ StackReference<mirror::Object>* vreg_base =
+ reinterpret_cast<StackReference<mirror::Object>*>(cur_quick_frame);
uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
CodeInfo code_info(method_header, kPrecise
? CodeInfo::DecodeFlags::AllTables // We will need dex register maps.