blob: aa115623b3c96402aa39b9bafec7e768cada38d4 [file] [log] [blame]
Ian Rogers2fa6b2e2012-10-17 00:10:17 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe3cfa4d02015-10-06 17:04:01 -070017#include "interpreter.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070018
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010019#include <limits>
Vladimir Marko72101082019-02-05 16:16:30 +000020#include <string_view>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080021
Andreas Gampe580667b2017-10-23 11:20:39 -070022#include "common_dex_operations.h"
Andreas Gampe103992b2016-01-04 15:32:43 -080023#include "common_throws.h"
David Sehr9e734c72018-01-04 17:56:19 -080024#include "dex/dex_file_types.h"
Andreas Gampe3cfa4d02015-10-06 17:04:01 -070025#include "interpreter_common.h"
Andreas Gampe5e26eb12016-08-22 17:54:17 -070026#include "interpreter_mterp_impl.h"
27#include "interpreter_switch_impl.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070028#include "jit/jit.h"
29#include "jit/jit_code_cache.h"
Mathieu Chartier28bd2e42016-10-04 13:54:57 -070030#include "jvalue-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070031#include "mirror/string-inl.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070032#include "mterp/mterp.h"
Andreas Gampe373a9b52017-10-18 09:01:57 -070033#include "nativehelper/scoped_local_ref.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070034#include "scoped_thread_state_change-inl.h"
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +010035#include "shadow_frame-inl.h"
Andreas Gampeb3025922015-09-01 14:45:00 -070036#include "stack.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070037#include "thread-inl.h"
Andreas Gampe2969bcd2015-03-09 12:57:41 -070038#include "unstarted_runtime.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070039
Ian Rogers2fa6b2e2012-10-17 00:10:17 -070040namespace art {
41namespace interpreter {
42
Mathieu Chartieref41db72016-10-25 15:08:01 -070043ALWAYS_INLINE static ObjPtr<mirror::Object> ObjArg(uint32_t arg)
44 REQUIRES_SHARED(Locks::mutator_lock_) {
45 return ObjPtr<mirror::Object>(reinterpret_cast<mirror::Object*>(arg));
46}
47
48static void InterpreterJni(Thread* self,
49 ArtMethod* method,
Vladimir Marko72101082019-02-05 16:16:30 +000050 std::string_view shorty,
Mathieu Chartieref41db72016-10-25 15:08:01 -070051 ObjPtr<mirror::Object> receiver,
52 uint32_t* args,
53 JValue* result)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070054 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers64b6d142012-10-29 16:34:15 -070055 // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
56 // it should be removed and JNI compiled stubs used instead.
57 ScopedObjectAccessUnchecked soa(self);
58 if (method->IsStatic()) {
59 if (shorty == "L") {
Andreas Gampec55bb392018-09-21 00:02:02 +000060 using fntype = jobject(JNIEnv*, jclass);
Mathieu Chartier2d721012014-11-10 11:08:06 -080061 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -070062 ScopedLocalRef<jclass> klass(soa.Env(),
63 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
Ian Rogers556d6372012-11-20 12:19:36 -080064 jobject jresult;
65 {
66 ScopedThreadStateChange tsc(self, kNative);
67 jresult = fn(soa.Env(), klass.get());
68 }
Mathieu Chartieref41db72016-10-25 15:08:01 -070069 result->SetL(soa.Decode<mirror::Object>(jresult));
Ian Rogers64b6d142012-10-29 16:34:15 -070070 } else if (shorty == "V") {
Andreas Gampec55bb392018-09-21 00:02:02 +000071 using fntype = void(JNIEnv*, jclass);
Mathieu Chartier2d721012014-11-10 11:08:06 -080072 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -070073 ScopedLocalRef<jclass> klass(soa.Env(),
74 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
75 ScopedThreadStateChange tsc(self, kNative);
76 fn(soa.Env(), klass.get());
77 } else if (shorty == "Z") {
Andreas Gampec55bb392018-09-21 00:02:02 +000078 using fntype = jboolean(JNIEnv*, jclass);
Mathieu Chartier2d721012014-11-10 11:08:06 -080079 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -070080 ScopedLocalRef<jclass> klass(soa.Env(),
81 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
82 ScopedThreadStateChange tsc(self, kNative);
83 result->SetZ(fn(soa.Env(), klass.get()));
84 } else if (shorty == "BI") {
Andreas Gampec55bb392018-09-21 00:02:02 +000085 using fntype = jbyte(JNIEnv*, jclass, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -080086 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -070087 ScopedLocalRef<jclass> klass(soa.Env(),
88 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
89 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -080090 result->SetB(fn(soa.Env(), klass.get(), args[0]));
Ian Rogers64b6d142012-10-29 16:34:15 -070091 } else if (shorty == "II") {
Andreas Gampec55bb392018-09-21 00:02:02 +000092 using fntype = jint(JNIEnv*, jclass, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -080093 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -070094 ScopedLocalRef<jclass> klass(soa.Env(),
95 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
96 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -080097 result->SetI(fn(soa.Env(), klass.get(), args[0]));
Ian Rogers64b6d142012-10-29 16:34:15 -070098 } else if (shorty == "LL") {
Andreas Gampec55bb392018-09-21 00:02:02 +000099 using fntype = jobject(JNIEnv*, jclass, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800100 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700101 ScopedLocalRef<jclass> klass(soa.Env(),
102 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
103 ScopedLocalRef<jobject> arg0(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700104 soa.AddLocalReference<jobject>(ObjArg(args[0])));
Ian Rogers556d6372012-11-20 12:19:36 -0800105 jobject jresult;
106 {
107 ScopedThreadStateChange tsc(self, kNative);
108 jresult = fn(soa.Env(), klass.get(), arg0.get());
109 }
Mathieu Chartieref41db72016-10-25 15:08:01 -0700110 result->SetL(soa.Decode<mirror::Object>(jresult));
Ian Rogers64b6d142012-10-29 16:34:15 -0700111 } else if (shorty == "IIZ") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000112 using fntype = jint(JNIEnv*, jclass, jint, jboolean);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800113 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700114 ScopedLocalRef<jclass> klass(soa.Env(),
115 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
116 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800117 result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
Ian Rogers64b6d142012-10-29 16:34:15 -0700118 } else if (shorty == "ILI") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000119 using fntype = jint(JNIEnv*, jclass, jobject, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800120 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(
121 method->GetEntryPointFromJni()));
Ian Rogers64b6d142012-10-29 16:34:15 -0700122 ScopedLocalRef<jclass> klass(soa.Env(),
123 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
124 ScopedLocalRef<jobject> arg0(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700125 soa.AddLocalReference<jobject>(ObjArg(args[0])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700126 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800127 result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
Ian Rogers64b6d142012-10-29 16:34:15 -0700128 } else if (shorty == "SIZ") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000129 using fntype = jshort(JNIEnv*, jclass, jint, jboolean);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700130 fntype* const fn =
131 reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni()));
Ian Rogers64b6d142012-10-29 16:34:15 -0700132 ScopedLocalRef<jclass> klass(soa.Env(),
133 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
134 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800135 result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
Ian Rogers64b6d142012-10-29 16:34:15 -0700136 } else if (shorty == "VIZ") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000137 using fntype = void(JNIEnv*, jclass, jint, jboolean);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800138 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700139 ScopedLocalRef<jclass> klass(soa.Env(),
140 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
141 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800142 fn(soa.Env(), klass.get(), args[0], args[1]);
Ian Rogers64b6d142012-10-29 16:34:15 -0700143 } else if (shorty == "ZLL") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000144 using fntype = jboolean(JNIEnv*, jclass, jobject, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800145 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700146 ScopedLocalRef<jclass> klass(soa.Env(),
147 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
148 ScopedLocalRef<jobject> arg0(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700149 soa.AddLocalReference<jobject>(ObjArg(args[0])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700150 ScopedLocalRef<jobject> arg1(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700151 soa.AddLocalReference<jobject>(ObjArg(args[1])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700152 ScopedThreadStateChange tsc(self, kNative);
153 result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
154 } else if (shorty == "ZILL") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000155 using fntype = jboolean(JNIEnv*, jclass, jint, jobject, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800156 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700157 ScopedLocalRef<jclass> klass(soa.Env(),
158 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
159 ScopedLocalRef<jobject> arg1(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700160 soa.AddLocalReference<jobject>(ObjArg(args[1])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700161 ScopedLocalRef<jobject> arg2(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700162 soa.AddLocalReference<jobject>(ObjArg(args[2])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700163 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800164 result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
Ian Rogers64b6d142012-10-29 16:34:15 -0700165 } else if (shorty == "VILII") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000166 using fntype = void(JNIEnv*, jclass, jint, jobject, jint, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800167 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700168 ScopedLocalRef<jclass> klass(soa.Env(),
169 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
170 ScopedLocalRef<jobject> arg1(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700171 soa.AddLocalReference<jobject>(ObjArg(args[1])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700172 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800173 fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
Ian Rogers64b6d142012-10-29 16:34:15 -0700174 } else if (shorty == "VLILII") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000175 using fntype = void(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800176 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700177 ScopedLocalRef<jclass> klass(soa.Env(),
178 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
179 ScopedLocalRef<jobject> arg0(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700180 soa.AddLocalReference<jobject>(ObjArg(args[0])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700181 ScopedLocalRef<jobject> arg2(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700182 soa.AddLocalReference<jobject>(ObjArg(args[2])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700183 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800184 fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
Ian Rogers64b6d142012-10-29 16:34:15 -0700185 } else {
David Sehr709b0702016-10-13 09:12:37 -0700186 LOG(FATAL) << "Do something with static native method: " << method->PrettyMethod()
Ian Rogers64b6d142012-10-29 16:34:15 -0700187 << " shorty: " << shorty;
188 }
189 } else {
190 if (shorty == "L") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000191 using fntype = jobject(JNIEnv*, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800192 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700193 ScopedLocalRef<jobject> rcvr(soa.Env(),
194 soa.AddLocalReference<jobject>(receiver));
Ian Rogers556d6372012-11-20 12:19:36 -0800195 jobject jresult;
196 {
197 ScopedThreadStateChange tsc(self, kNative);
198 jresult = fn(soa.Env(), rcvr.get());
199 }
Mathieu Chartieref41db72016-10-25 15:08:01 -0700200 result->SetL(soa.Decode<mirror::Object>(jresult));
Jeff Hao3dd9f762013-07-08 13:09:25 -0700201 } else if (shorty == "V") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000202 using fntype = void(JNIEnv*, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800203 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Jeff Hao3dd9f762013-07-08 13:09:25 -0700204 ScopedLocalRef<jobject> rcvr(soa.Env(),
205 soa.AddLocalReference<jobject>(receiver));
206 ScopedThreadStateChange tsc(self, kNative);
207 fn(soa.Env(), rcvr.get());
Ian Rogers64b6d142012-10-29 16:34:15 -0700208 } else if (shorty == "LL") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000209 using fntype = jobject(JNIEnv*, jobject, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800210 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700211 ScopedLocalRef<jobject> rcvr(soa.Env(),
212 soa.AddLocalReference<jobject>(receiver));
213 ScopedLocalRef<jobject> arg0(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700214 soa.AddLocalReference<jobject>(ObjArg(args[0])));
Ian Rogers556d6372012-11-20 12:19:36 -0800215 jobject jresult;
216 {
217 ScopedThreadStateChange tsc(self, kNative);
218 jresult = fn(soa.Env(), rcvr.get(), arg0.get());
Ian Rogers556d6372012-11-20 12:19:36 -0800219 }
Mathieu Chartieref41db72016-10-25 15:08:01 -0700220 result->SetL(soa.Decode<mirror::Object>(jresult));
Ian Rogers64b6d142012-10-29 16:34:15 -0700221 ScopedThreadStateChange tsc(self, kNative);
Ian Rogers64b6d142012-10-29 16:34:15 -0700222 } else if (shorty == "III") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000223 using fntype = jint(JNIEnv*, jobject, jint, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800224 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700225 ScopedLocalRef<jobject> rcvr(soa.Env(),
226 soa.AddLocalReference<jobject>(receiver));
227 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800228 result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
Ian Rogers64b6d142012-10-29 16:34:15 -0700229 } else {
David Sehr709b0702016-10-13 09:12:37 -0700230 LOG(FATAL) << "Do something with native method: " << method->PrettyMethod()
Ian Rogers64b6d142012-10-29 16:34:15 -0700231 << " shorty: " << shorty;
232 }
233 }
234}
235
Sebastien Hertz8ece0502013-08-07 11:26:41 +0200236enum InterpreterImplKind {
buzbee1452bee2015-03-06 14:43:04 -0800237 kSwitchImplKind, // Switch-based interpreter implementation.
buzbee1452bee2015-03-06 14:43:04 -0800238 kMterpImplKind // Assembly interpreter
Sebastien Hertz8ece0502013-08-07 11:26:41 +0200239};
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700240
David Srbecky8ed45c82018-11-08 15:08:57 +0000241#if ART_USE_CXX_INTERPRETER
242static constexpr InterpreterImplKind kInterpreterImplKind = kSwitchImplKind;
243#else
buzbee1452bee2015-03-06 14:43:04 -0800244static constexpr InterpreterImplKind kInterpreterImplKind = kMterpImplKind;
David Srbecky8ed45c82018-11-08 15:08:57 +0000245#endif
Alexey Frunze00b53b72016-02-02 20:25:45 -0800246
Aart Bik01223202016-05-05 15:10:42 -0700247static inline JValue Execute(
248 Thread* self,
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800249 const CodeItemDataAccessor& accessor,
Aart Bik01223202016-05-05 15:10:42 -0700250 ShadowFrame& shadow_frame,
251 JValue result_register,
Vladimir Markofe948752018-03-23 18:11:43 +0000252 bool stay_in_interpreter = false,
253 bool from_deoptimize = false) REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800254 DCHECK(!shadow_frame.GetMethod()->IsAbstract());
Ian Rogers848871b2013-08-05 10:56:33 -0700255 DCHECK(!shadow_frame.GetMethod()->IsNative());
David Srbeckycb4f09e2018-10-21 08:45:22 +0100256
David Srbeckyd3883902019-02-26 17:29:32 +0000257 // Check that we are using the right interpreter.
258 if (kIsDebugBuild && self->UseMterp() != CanUseMterp()) {
259 // The flag might be currently being updated on all threads. Retry with lock.
260 MutexLock tll_mu(self, *Locks::thread_list_lock_);
261 DCHECK_EQ(self->UseMterp(), CanUseMterp());
262 }
263
Vladimir Markofe948752018-03-23 18:11:43 +0000264 if (LIKELY(!from_deoptimize)) { // Entering the method, but not via deoptimization.
buzbee734f3aa2016-01-28 14:20:06 -0800265 if (kIsDebugBuild) {
Vladimir Markofe948752018-03-23 18:11:43 +0000266 CHECK_EQ(shadow_frame.GetDexPC(), 0u);
buzbee734f3aa2016-01-28 14:20:06 -0800267 self->AssertNoPendingException();
268 }
269 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
270 ArtMethod *method = shadow_frame.GetMethod();
271
272 if (UNLIKELY(instrumentation->HasMethodEntryListeners())) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800273 instrumentation->MethodEnterEvent(self,
274 shadow_frame.GetThisObject(accessor.InsSize()),
275 method,
276 0);
Alex Light0aa7a5a2018-10-10 15:58:14 +0000277 if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
278 // The caller will retry this invoke. Just return immediately without any value.
279 DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
280 DCHECK(PrevFrameWillRetry(self, shadow_frame));
281 return JValue();
282 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700283 if (UNLIKELY(self->IsExceptionPending())) {
284 instrumentation->MethodUnwindEvent(self,
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800285 shadow_frame.GetThisObject(accessor.InsSize()),
Alex Lightb7edcda2017-04-27 13:20:31 -0700286 method,
287 0);
288 return JValue();
289 }
buzbee734f3aa2016-01-28 14:20:06 -0800290 }
291
Aart Bik01223202016-05-05 15:10:42 -0700292 if (!stay_in_interpreter) {
293 jit::Jit* jit = Runtime::Current()->GetJit();
294 if (jit != nullptr) {
295 jit->MethodEntered(self, shadow_frame.GetMethod());
296 if (jit->CanInvokeCompiledCode(method)) {
297 JValue result;
buzbee734f3aa2016-01-28 14:20:06 -0800298
Aart Bik01223202016-05-05 15:10:42 -0700299 // Pop the shadow frame before calling into compiled code.
300 self->PopShadowFrame();
Jeff Hao5ea84132017-05-05 16:59:29 -0700301 // Calculate the offset of the first input reg. The input registers are in the high regs.
302 // It's ok to access the code item here since JIT code will have been touched by the
303 // interpreter and compiler already.
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800304 uint16_t arg_offset = accessor.RegistersSize() - accessor.InsSize();
Jeff Hao5ea84132017-05-05 16:59:29 -0700305 ArtInterpreterToCompiledCodeBridge(self, nullptr, &shadow_frame, arg_offset, &result);
Aart Bik01223202016-05-05 15:10:42 -0700306 // Push the shadow frame back as the caller will expect it.
307 self->PushShadowFrame(&shadow_frame);
buzbee734f3aa2016-01-28 14:20:06 -0800308
Aart Bik01223202016-05-05 15:10:42 -0700309 return result;
310 }
Nicolas Geoffray274fe4a2016-04-12 16:33:24 +0100311 }
buzbee734f3aa2016-01-28 14:20:06 -0800312 }
313 }
314
Andreas Gampe580667b2017-10-23 11:20:39 -0700315 ArtMethod* method = shadow_frame.GetMethod();
316
317 DCheckStaticState(self, method);
Sebastien Hertz8ece0502013-08-07 11:26:41 +0200318
Andreas Gampe56fdd0e2016-04-28 14:56:54 -0700319 // Lock counting is a special version of accessibility checks, and for simplicity and
320 // reduction of template parameters, we gate it behind access-checks mode.
Andreas Gampe56fdd0e2016-04-28 14:56:54 -0700321 DCHECK(!method->SkipAccessChecks() || !method->MustCountLocks());
322
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100323 bool transaction_active = Runtime::Current()->IsActiveTransaction();
Andreas Gampe56fdd0e2016-04-28 14:56:54 -0700324 if (LIKELY(method->SkipAccessChecks())) {
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200325 // Enter the "without access check" interpreter.
buzbee1452bee2015-03-06 14:43:04 -0800326 if (kInterpreterImplKind == kMterpImplKind) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100327 if (transaction_active) {
buzbee1452bee2015-03-06 14:43:04 -0800328 // No Mterp variant - just use the switch interpreter.
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800329 return ExecuteSwitchImpl<false, true>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800330 false);
Bill Buzbeefd522f92016-02-11 22:37:42 +0000331 } else if (UNLIKELY(!Runtime::Current()->IsStarted())) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800332 return ExecuteSwitchImpl<false, false>(self, accessor, shadow_frame, result_register,
Bill Buzbeefd522f92016-02-11 22:37:42 +0000333 false);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100334 } else {
buzbee1452bee2015-03-06 14:43:04 -0800335 while (true) {
Bill Buzbeefd522f92016-02-11 22:37:42 +0000336 // Mterp does not support all instrumentation/debugging.
David Srbecky28f6cff2018-10-16 15:07:28 +0100337 if (!self->UseMterp()) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800338 return ExecuteSwitchImpl<false, false>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800339 false);
buzbee1452bee2015-03-06 14:43:04 -0800340 }
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800341 bool returned = ExecuteMterpImpl(self,
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800342 accessor.Insns(),
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800343 &shadow_frame,
344 &result_register);
buzbee1452bee2015-03-06 14:43:04 -0800345 if (returned) {
346 return result_register;
347 } else {
348 // Mterp didn't like that instruction. Single-step it with the reference interpreter.
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800349 result_register = ExecuteSwitchImpl<false, false>(self, accessor, shadow_frame,
Mathieu Chartieref41db72016-10-25 15:08:01 -0700350 result_register, true);
Andreas Gampee2abbc62017-09-15 11:59:26 -0700351 if (shadow_frame.GetDexPC() == dex::kDexNoIndex) {
buzbee1452bee2015-03-06 14:43:04 -0800352 // Single-stepped a return or an exception not handled locally. Return to caller.
buzbeed6b48db2016-01-28 15:48:55 -0800353 return result_register;
buzbee1452bee2015-03-06 14:43:04 -0800354 }
355 }
356 }
357 }
buzbeef61df9b2016-09-07 07:12:29 -0700358 } else {
359 DCHECK_EQ(kInterpreterImplKind, kSwitchImplKind);
buzbee1452bee2015-03-06 14:43:04 -0800360 if (transaction_active) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800361 return ExecuteSwitchImpl<false, true>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800362 false);
363 } else {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800364 return ExecuteSwitchImpl<false, false>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800365 false);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100366 }
Sebastien Hertz8ece0502013-08-07 11:26:41 +0200367 }
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200368 } else {
369 // Enter the "with access check" interpreter.
Andreas Gampe81c61bf2018-10-11 18:57:39 -0700370
371 // The boot classpath should really not have to run access checks.
372 DCHECK(method->GetDeclaringClass()->GetClassLoader() != nullptr
373 || Runtime::Current()->IsVerificationSoftFail()
374 || Runtime::Current()->IsAotCompiler())
375 << method->PrettyMethod();
376
buzbee1452bee2015-03-06 14:43:04 -0800377 if (kInterpreterImplKind == kMterpImplKind) {
378 // No access check variants for Mterp. Just use the switch version.
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100379 if (transaction_active) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800380 return ExecuteSwitchImpl<true, true>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800381 false);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100382 } else {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800383 return ExecuteSwitchImpl<true, false>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800384 false);
385 }
buzbeef61df9b2016-09-07 07:12:29 -0700386 } else {
387 DCHECK_EQ(kInterpreterImplKind, kSwitchImplKind);
buzbee1452bee2015-03-06 14:43:04 -0800388 if (transaction_active) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800389 return ExecuteSwitchImpl<true, true>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800390 false);
391 } else {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800392 return ExecuteSwitchImpl<true, false>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800393 false);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100394 }
Sebastien Hertz8ece0502013-08-07 11:26:41 +0200395 }
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200396 }
397}
398
Mathieu Chartieref41db72016-10-25 15:08:01 -0700399void EnterInterpreterFromInvoke(Thread* self,
400 ArtMethod* method,
401 ObjPtr<mirror::Object> receiver,
402 uint32_t* args,
403 JValue* result,
Aart Bik01223202016-05-05 15:10:42 -0700404 bool stay_in_interpreter) {
Ian Rogers64b6d142012-10-29 16:34:15 -0700405 DCHECK_EQ(self, Thread::Current());
Nicolas Geoffray535a3fb2014-07-22 15:17:38 +0100406 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
407 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
jeffhaod7521322012-11-21 15:38:24 -0800408 ThrowStackOverflowError(self);
409 return;
410 }
411
Alex Lightdb01a092017-04-03 15:39:55 -0700412 // This can happen if we are in forced interpreter mode and an obsolete method is called using
413 // reflection.
414 if (UNLIKELY(method->IsObsolete())) {
415 ThrowInternalError("Attempting to invoke obsolete version of '%s'.",
416 method->PrettyMethod().c_str());
417 return;
418 }
419
Mathieu Chartiere861ebd2013-10-09 15:01:21 -0700420 const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
David Sehr0225f8e2018-01-31 08:52:24 +0000421 CodeItemDataAccessor accessor(method->DexInstructionData());
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700422 uint16_t num_regs;
423 uint16_t num_ins;
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800424 if (accessor.HasCodeItem()) {
425 num_regs = accessor.RegistersSize();
426 num_ins = accessor.InsSize();
Alex Light9139e002015-10-09 15:59:48 -0700427 } else if (!method->IsInvokable()) {
Mathieu Chartiere861ebd2013-10-09 15:01:21 -0700428 self->EndAssertNoThreadSuspension(old_cause);
Alex Light9139e002015-10-09 15:59:48 -0700429 method->ThrowInvocationTimeError();
jeffhao0a9bb732012-11-26 12:28:49 -0800430 return;
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700431 } else {
432 DCHECK(method->IsNative());
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700433 num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty());
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700434 if (!method->IsStatic()) {
435 num_regs++;
436 num_ins++;
437 }
438 }
439 // Set up shadow frame with matching number of reference slots to vregs.
440 ShadowFrame* last_shadow_frame = self->GetManagedStack()->GetTopShadowFrame();
Andreas Gampeb3025922015-09-01 14:45:00 -0700441 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
Andreas Gampe03ec9302015-08-27 17:41:47 -0700442 CREATE_SHADOW_FRAME(num_regs, last_shadow_frame, method, /* dex pc */ 0);
Andreas Gampeb3025922015-09-01 14:45:00 -0700443 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
Jeff Hao66135192013-05-14 11:02:41 -0700444 self->PushShadowFrame(shadow_frame);
Mathieu Chartiere861ebd2013-10-09 15:01:21 -0700445
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700446 size_t cur_reg = num_regs - num_ins;
447 if (!method->IsStatic()) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700448 CHECK(receiver != nullptr);
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100449 shadow_frame->SetVRegReference(cur_reg, receiver);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700450 ++cur_reg;
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700451 }
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700452 uint32_t shorty_len = 0;
453 const char* shorty = method->GetShorty(&shorty_len);
Jeff Hao5d917302013-02-27 17:57:33 -0800454 for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700455 DCHECK_LT(shorty_pos + 1, shorty_len);
Jeff Hao5d917302013-02-27 17:57:33 -0800456 switch (shorty[shorty_pos + 1]) {
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700457 case 'L': {
Mathieu Chartieref41db72016-10-25 15:08:01 -0700458 ObjPtr<mirror::Object> o =
459 reinterpret_cast<StackReference<mirror::Object>*>(&args[arg_pos])->AsMirrorPtr();
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100460 shadow_frame->SetVRegReference(cur_reg, o);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700461 break;
462 }
Jeff Hao5d917302013-02-27 17:57:33 -0800463 case 'J': case 'D': {
464 uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
465 shadow_frame->SetVRegLong(cur_reg, wide_value);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700466 cur_reg++;
Jeff Hao5d917302013-02-27 17:57:33 -0800467 arg_pos++;
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700468 break;
Jeff Hao5d917302013-02-27 17:57:33 -0800469 }
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700470 default:
Jeff Hao5d917302013-02-27 17:57:33 -0800471 shadow_frame->SetVReg(cur_reg, args[arg_pos]);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700472 break;
473 }
474 }
Mathieu Chartier92246bb2014-02-25 18:22:39 -0800475 self->EndAssertNoThreadSuspension(old_cause);
476 // Do this after populating the shadow frame in case EnsureInitialized causes a GC.
Ian Rogers6c5cb212014-06-18 16:07:20 -0700477 if (method->IsStatic() && UNLIKELY(!method->GetDeclaringClass()->IsInitialized())) {
Mathieu Chartier92246bb2014-02-25 18:22:39 -0800478 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700479 StackHandleScope<1> hs(self);
480 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700481 if (UNLIKELY(!class_linker->EnsureInitialized(self, h_class, true, true))) {
Mathieu Chartier92246bb2014-02-25 18:22:39 -0800482 CHECK(self->IsExceptionPending());
483 self->PopShadowFrame();
484 return;
485 }
486 }
Ian Rogers64b6d142012-10-29 16:34:15 -0700487 if (LIKELY(!method->IsNative())) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800488 JValue r = Execute(self, accessor, *shadow_frame, JValue(), stay_in_interpreter);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700489 if (result != nullptr) {
Jeff Hao6474d192013-03-26 14:08:09 -0700490 *result = r;
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700491 }
492 } else {
Ian Rogers64b6d142012-10-29 16:34:15 -0700493 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
494 // generated stub) except during testing and image writing.
Mathieu Chartier92246bb2014-02-25 18:22:39 -0800495 // Update args to be the args in the shadow frame since the input ones could hold stale
496 // references pointers due to moving GC.
497 args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
Ian Rogers64b6d142012-10-29 16:34:15 -0700498 if (!Runtime::Current()->IsStarted()) {
Mathieu Chartieref41db72016-10-25 15:08:01 -0700499 UnstartedRuntime::Jni(self, method, receiver.Ptr(), args, result);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700500 } else {
Jeff Hao6474d192013-03-26 14:08:09 -0700501 InterpreterJni(self, method, shorty, receiver, args, result);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700502 }
503 }
504 self->PopShadowFrame();
505}
506
Mingyao Yangffedec52016-05-19 10:48:40 -0700507static int16_t GetReceiverRegisterForStringInit(const Instruction* instr) {
508 DCHECK(instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE ||
509 instr->Opcode() == Instruction::INVOKE_DIRECT);
510 return (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
511 instr->VRegC_3rc() : instr->VRegC_35c();
512}
513
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100514void EnterInterpreterFromDeoptimize(Thread* self,
515 ShadowFrame* shadow_frame,
Mingyao Yang2ee17902017-08-30 11:37:08 -0700516 JValue* ret_val,
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100517 bool from_code,
Mingyao Yang2ee17902017-08-30 11:37:08 -0700518 DeoptimizationMethodType deopt_method_type)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700519 REQUIRES_SHARED(Locks::mutator_lock_) {
Jeff Hao11ffc2d2013-02-01 11:52:17 -0800520 JValue value;
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700521 // Set value to last known result in case the shadow frame chain is empty.
522 value.SetJ(ret_val->GetJ());
Alex Light0aa7a5a2018-10-10 15:58:14 +0000523 // How many frames we have executed.
524 size_t frame_cnt = 0;
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700525 while (shadow_frame != nullptr) {
Andreas Gampe56fdd0e2016-04-28 14:56:54 -0700526 // We do not want to recover lock state for lock counting when deoptimizing. Currently,
527 // the compiler should not have compiled a method that failed structured-locking checks.
528 DCHECK(!shadow_frame->GetMethod()->MustCountLocks());
529
Ian Rogers62d6c772013-02-27 08:32:07 -0800530 self->SetTopOfShadowStack(shadow_frame);
David Sehr0225f8e2018-01-31 08:52:24 +0000531 CodeItemDataAccessor accessor(shadow_frame->GetMethod()->DexInstructionData());
Sebastien Hertz270a0e12015-01-16 19:49:09 +0100532 const uint32_t dex_pc = shadow_frame->GetDexPC();
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100533 uint32_t new_dex_pc = dex_pc;
Sebastien Hertz270a0e12015-01-16 19:49:09 +0100534 if (UNLIKELY(self->IsExceptionPending())) {
Sebastien Hertz520633b2015-09-08 17:03:36 +0200535 // If we deoptimize from the QuickExceptionHandler, we already reported the exception to
536 // the instrumentation. To prevent from reporting it a second time, we simply pass a
537 // null Instrumentation*.
Sebastien Hertz270a0e12015-01-16 19:49:09 +0100538 const instrumentation::Instrumentation* const instrumentation =
Alex Light0aa7a5a2018-10-10 15:58:14 +0000539 frame_cnt == 0 ? nullptr : Runtime::Current()->GetInstrumentation();
Alex Light9fb1ab12017-09-05 09:32:49 -0700540 new_dex_pc = MoveToExceptionHandler(
Andreas Gampee2abbc62017-09-15 11:59:26 -0700541 self, *shadow_frame, instrumentation) ? shadow_frame->GetDexPC() : dex::kDexNoIndex;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100542 } else if (!from_code) {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700543 // Deoptimization is not called from code directly.
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800544 const Instruction* instr = &accessor.InstructionAt(dex_pc);
Alex Light0aa7a5a2018-10-10 15:58:14 +0000545 if (deopt_method_type == DeoptimizationMethodType::kKeepDexPc ||
546 shadow_frame->GetForceRetryInstruction()) {
547 DCHECK(frame_cnt == 0 || (frame_cnt == 1 && shadow_frame->GetForceRetryInstruction()))
548 << "frame_cnt: " << frame_cnt
549 << " force-retry: " << shadow_frame->GetForceRetryInstruction();
Mingyao Yang2ee17902017-08-30 11:37:08 -0700550 // Need to re-execute the dex instruction.
551 // (1) An invocation might be split into class initialization and invoke.
552 // In this case, the invoke should not be skipped.
553 // (2) A suspend check should also execute the dex instruction at the
554 // corresponding dex pc.
Alex Light0aa7a5a2018-10-10 15:58:14 +0000555 // If the ForceRetryInstruction bit is set this must be the second frame (the first being
556 // the one that is being popped).
Mingyao Yang2ee17902017-08-30 11:37:08 -0700557 DCHECK_EQ(new_dex_pc, dex_pc);
Alex Light0aa7a5a2018-10-10 15:58:14 +0000558 shadow_frame->SetForceRetryInstruction(false);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700559 } else if (instr->Opcode() == Instruction::MONITOR_ENTER ||
560 instr->Opcode() == Instruction::MONITOR_EXIT) {
561 DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
Alex Light0aa7a5a2018-10-10 15:58:14 +0000562 DCHECK_EQ(frame_cnt, 0u);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700563 // Non-idempotent dex instruction should not be re-executed.
564 // On the other hand, if a MONITOR_ENTER is at the dex_pc of a suspend
565 // check, that MONITOR_ENTER should be executed. That case is handled
566 // above.
567 new_dex_pc = dex_pc + instr->SizeInCodeUnits();
568 } else if (instr->IsInvoke()) {
569 DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
Mingyao Yangffedec52016-05-19 10:48:40 -0700570 if (IsStringInit(instr, shadow_frame->GetMethod())) {
571 uint16_t this_obj_vreg = GetReceiverRegisterForStringInit(instr);
572 // Move the StringFactory.newStringFromChars() result into the register representing
573 // "this object" when invoking the string constructor in the original dex instruction.
574 // Also move the result into all aliases.
575 DCHECK(value.GetL()->IsString());
576 SetStringInitValueToAllAliases(shadow_frame, this_obj_vreg, value);
577 // Calling string constructor in the original dex code doesn't generate a result value.
578 value.SetJ(0);
579 }
Mingyao Yang504a6902016-04-28 16:23:01 -0700580 new_dex_pc = dex_pc + instr->SizeInCodeUnits();
581 } else if (instr->Opcode() == Instruction::NEW_INSTANCE) {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700582 // A NEW_INSTANCE is simply re-executed, including
583 // "new-instance String" which is compiled into a call into
584 // StringFactory.newEmptyString().
585 DCHECK_EQ(new_dex_pc, dex_pc);
Mingyao Yang504a6902016-04-28 16:23:01 -0700586 } else {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700587 DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
Alex Light0aa7a5a2018-10-10 15:58:14 +0000588 DCHECK_EQ(frame_cnt, 0u);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700589 // By default, we re-execute the dex instruction since if they are not
590 // an invoke, so that we don't have to decode the dex instruction to move
591 // result into the right vreg. All slow paths have been audited to be
592 // idempotent except monitor-enter/exit and invocation stubs.
593 // TODO: move result and advance dex pc. That also requires that we
594 // can tell the return type of a runtime method, possibly by decoding
595 // the dex instruction at the caller.
596 DCHECK_EQ(new_dex_pc, dex_pc);
Mingyao Yang504a6902016-04-28 16:23:01 -0700597 }
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100598 } else {
599 // Nothing to do, the dex_pc is the one at which the code requested
600 // the deoptimization.
Alex Light0aa7a5a2018-10-10 15:58:14 +0000601 DCHECK_EQ(frame_cnt, 0u);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700602 DCHECK_EQ(new_dex_pc, dex_pc);
Sebastien Hertz270a0e12015-01-16 19:49:09 +0100603 }
Andreas Gampee2abbc62017-09-15 11:59:26 -0700604 if (new_dex_pc != dex::kDexNoIndex) {
Nicolas Geoffray95974242017-09-04 08:45:51 +0000605 shadow_frame->SetDexPC(new_dex_pc);
Vladimir Markofe948752018-03-23 18:11:43 +0000606 value = Execute(self,
607 accessor,
608 *shadow_frame,
609 value,
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700610 /* stay_in_interpreter= */ true,
611 /* from_deoptimize= */ true);
Sebastien Hertz270a0e12015-01-16 19:49:09 +0100612 }
Jeff Hao11ffc2d2013-02-01 11:52:17 -0800613 ShadowFrame* old_frame = shadow_frame;
614 shadow_frame = shadow_frame->GetLink();
Christopher Ferris241a9582015-04-27 15:19:41 -0700615 ShadowFrame::DeleteDeoptimizedFrame(old_frame);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700616 // Following deoptimizations of shadow frames must be at invocation point
617 // and should advance dex pc past the invoke instruction.
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100618 from_code = false;
Mingyao Yang2ee17902017-08-30 11:37:08 -0700619 deopt_method_type = DeoptimizationMethodType::kDefault;
Alex Light0aa7a5a2018-10-10 15:58:14 +0000620 frame_cnt++;
Jeff Hao11ffc2d2013-02-01 11:52:17 -0800621 }
622 ret_val->SetJ(value.GetJ());
623}
624
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800625JValue EnterInterpreterFromEntryPoint(Thread* self, const CodeItemDataAccessor& accessor,
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700626 ShadowFrame* shadow_frame) {
Ian Rogersf3e98552013-03-20 15:49:49 -0700627 DCHECK_EQ(self, Thread::Current());
Nicolas Geoffray535a3fb2014-07-22 15:17:38 +0100628 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
629 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
Ian Rogersf3e98552013-03-20 15:49:49 -0700630 ThrowStackOverflowError(self);
631 return JValue();
632 }
633
Nicolas Geoffray71cd50f2016-04-14 15:00:33 +0100634 jit::Jit* jit = Runtime::Current()->GetJit();
635 if (jit != nullptr) {
636 jit->NotifyCompiledCodeToInterpreterTransition(self, shadow_frame->GetMethod());
637 }
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800638 return Execute(self, accessor, *shadow_frame, JValue());
Ian Rogers7db619b2013-01-16 18:35:48 -0800639}
640
Mathieu Chartieref41db72016-10-25 15:08:01 -0700641void ArtInterpreterToInterpreterBridge(Thread* self,
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800642 const CodeItemDataAccessor& accessor,
Mathieu Chartieref41db72016-10-25 15:08:01 -0700643 ShadowFrame* shadow_frame,
644 JValue* result) {
Nicolas Geoffray535a3fb2014-07-22 15:17:38 +0100645 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
646 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
Jeff Hao16743632013-05-08 10:59:04 -0700647 ThrowStackOverflowError(self);
Jeff Hao69510672013-05-21 17:34:55 -0700648 return;
Jeff Hao16743632013-05-08 10:59:04 -0700649 }
650
Mathieu Chartiere861ebd2013-10-09 15:01:21 -0700651 self->PushShadowFrame(shadow_frame);
Alex Lighteb7c1442015-08-31 13:17:42 -0700652 ArtMethod* method = shadow_frame->GetMethod();
Sebastien Hertzc61124b2013-09-10 11:44:19 +0200653 // Ensure static methods are initialized.
Alex Lighteb7c1442015-08-31 13:17:42 -0700654 const bool is_static = method->IsStatic();
Ian Rogerse94652f2014-12-02 11:13:19 -0800655 if (is_static) {
Mathieu Chartieref41db72016-10-25 15:08:01 -0700656 ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
Ian Rogers6c5cb212014-06-18 16:07:20 -0700657 if (UNLIKELY(!declaring_class->IsInitialized())) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700658 StackHandleScope<1> hs(self);
Mathieu Chartieref41db72016-10-25 15:08:01 -0700659 HandleWrapperObjPtr<mirror::Class> h_declaring_class(hs.NewHandleWrapper(&declaring_class));
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700660 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
Ian Rogers7b078e82014-09-10 14:44:24 -0700661 self, h_declaring_class, true, true))) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700662 DCHECK(self->IsExceptionPending());
Mathieu Chartiere861ebd2013-10-09 15:01:21 -0700663 self->PopShadowFrame();
Sebastien Hertzc61124b2013-09-10 11:44:19 +0200664 return;
665 }
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700666 CHECK(h_declaring_class->IsInitializing());
Jeff Hao16743632013-05-08 10:59:04 -0700667 }
Jeff Hao16743632013-05-08 10:59:04 -0700668 }
Jeff Hao16743632013-05-08 10:59:04 -0700669
Ian Rogerse94652f2014-12-02 11:13:19 -0800670 if (LIKELY(!shadow_frame->GetMethod()->IsNative())) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800671 result->SetJ(Execute(self, accessor, *shadow_frame, JValue()).GetJ());
Jeff Hao16743632013-05-08 10:59:04 -0700672 } else {
673 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
674 // generated stub) except during testing and image writing.
675 CHECK(!Runtime::Current()->IsStarted());
Mathieu Chartieref41db72016-10-25 15:08:01 -0700676 ObjPtr<mirror::Object> receiver = is_static ? nullptr : shadow_frame->GetVRegReference(0);
Ian Rogerse94652f2014-12-02 11:13:19 -0800677 uint32_t* args = shadow_frame->GetVRegArgs(is_static ? 0 : 1);
Mathieu Chartieref41db72016-10-25 15:08:01 -0700678 UnstartedRuntime::Jni(self, shadow_frame->GetMethod(), receiver.Ptr(), args, result);
Jeff Hao16743632013-05-08 10:59:04 -0700679 }
680
681 self->PopShadowFrame();
Jeff Hao16743632013-05-08 10:59:04 -0700682}
683
buzbee1452bee2015-03-06 14:43:04 -0800684void CheckInterpreterAsmConstants() {
685 CheckMterpAsmConstants();
686}
687
688void InitInterpreterTls(Thread* self) {
689 InitMterpTls(self);
690}
691
Alex Light0aa7a5a2018-10-10 15:58:14 +0000692bool PrevFrameWillRetry(Thread* self, const ShadowFrame& frame) {
693 ShadowFrame* prev_frame = frame.GetLink();
694 if (prev_frame == nullptr) {
695 NthCallerVisitor vis(self, 1, false);
696 vis.WalkStack();
697 prev_frame = vis.GetCurrentShadowFrame();
698 if (prev_frame == nullptr) {
699 prev_frame = self->FindDebuggerShadowFrame(vis.GetFrameId());
700 }
701 }
702 return prev_frame != nullptr && prev_frame->GetForceRetryInstruction();
703}
704
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700705} // namespace interpreter
706} // namespace art