blob: 0e5fdd411e2cde21135807c333797a3f0e8940b3 [file] [log] [blame]
Ian Rogers2fa6b2e2012-10-17 00:10:17 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe3cfa4d02015-10-06 17:04:01 -070017#include "interpreter.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070018
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010019#include <limits>
Vladimir Marko72101082019-02-05 16:16:30 +000020#include <string_view>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080021
Andreas Gampe580667b2017-10-23 11:20:39 -070022#include "common_dex_operations.h"
Andreas Gampe103992b2016-01-04 15:32:43 -080023#include "common_throws.h"
David Sehr9e734c72018-01-04 17:56:19 -080024#include "dex/dex_file_types.h"
Andreas Gampe3cfa4d02015-10-06 17:04:01 -070025#include "interpreter_common.h"
Andreas Gampe5e26eb12016-08-22 17:54:17 -070026#include "interpreter_mterp_impl.h"
27#include "interpreter_switch_impl.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070028#include "jit/jit.h"
29#include "jit/jit_code_cache.h"
Mathieu Chartier28bd2e42016-10-04 13:54:57 -070030#include "jvalue-inl.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070031#include "mirror/string-inl.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070032#include "mterp/mterp.h"
Andreas Gampe373a9b52017-10-18 09:01:57 -070033#include "nativehelper/scoped_local_ref.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070034#include "scoped_thread_state_change-inl.h"
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +010035#include "shadow_frame-inl.h"
Andreas Gampeb3025922015-09-01 14:45:00 -070036#include "stack.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070037#include "thread-inl.h"
Andreas Gampe2969bcd2015-03-09 12:57:41 -070038#include "unstarted_runtime.h"
Ian Rogersb0fa5dc2014-04-28 16:47:08 -070039
Ian Rogers2fa6b2e2012-10-17 00:10:17 -070040namespace art {
41namespace interpreter {
42
Mathieu Chartieref41db72016-10-25 15:08:01 -070043ALWAYS_INLINE static ObjPtr<mirror::Object> ObjArg(uint32_t arg)
44 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markod7e9bbf2019-03-28 13:18:57 +000045 return reinterpret_cast<mirror::Object*>(arg);
Mathieu Chartieref41db72016-10-25 15:08:01 -070046}
47
48static void InterpreterJni(Thread* self,
49 ArtMethod* method,
Vladimir Marko72101082019-02-05 16:16:30 +000050 std::string_view shorty,
Mathieu Chartieref41db72016-10-25 15:08:01 -070051 ObjPtr<mirror::Object> receiver,
52 uint32_t* args,
53 JValue* result)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070054 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers64b6d142012-10-29 16:34:15 -070055 // TODO: The following enters JNI code using a typedef-ed function rather than the JNI compiler,
56 // it should be removed and JNI compiled stubs used instead.
57 ScopedObjectAccessUnchecked soa(self);
58 if (method->IsStatic()) {
59 if (shorty == "L") {
Andreas Gampec55bb392018-09-21 00:02:02 +000060 using fntype = jobject(JNIEnv*, jclass);
Mathieu Chartier2d721012014-11-10 11:08:06 -080061 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -070062 ScopedLocalRef<jclass> klass(soa.Env(),
63 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
Ian Rogers556d6372012-11-20 12:19:36 -080064 jobject jresult;
65 {
66 ScopedThreadStateChange tsc(self, kNative);
67 jresult = fn(soa.Env(), klass.get());
68 }
Mathieu Chartieref41db72016-10-25 15:08:01 -070069 result->SetL(soa.Decode<mirror::Object>(jresult));
Ian Rogers64b6d142012-10-29 16:34:15 -070070 } else if (shorty == "V") {
Andreas Gampec55bb392018-09-21 00:02:02 +000071 using fntype = void(JNIEnv*, jclass);
Mathieu Chartier2d721012014-11-10 11:08:06 -080072 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -070073 ScopedLocalRef<jclass> klass(soa.Env(),
74 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
75 ScopedThreadStateChange tsc(self, kNative);
76 fn(soa.Env(), klass.get());
77 } else if (shorty == "Z") {
Andreas Gampec55bb392018-09-21 00:02:02 +000078 using fntype = jboolean(JNIEnv*, jclass);
Mathieu Chartier2d721012014-11-10 11:08:06 -080079 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -070080 ScopedLocalRef<jclass> klass(soa.Env(),
81 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
82 ScopedThreadStateChange tsc(self, kNative);
83 result->SetZ(fn(soa.Env(), klass.get()));
84 } else if (shorty == "BI") {
Andreas Gampec55bb392018-09-21 00:02:02 +000085 using fntype = jbyte(JNIEnv*, jclass, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -080086 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -070087 ScopedLocalRef<jclass> klass(soa.Env(),
88 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
89 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -080090 result->SetB(fn(soa.Env(), klass.get(), args[0]));
Ian Rogers64b6d142012-10-29 16:34:15 -070091 } else if (shorty == "II") {
Andreas Gampec55bb392018-09-21 00:02:02 +000092 using fntype = jint(JNIEnv*, jclass, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -080093 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -070094 ScopedLocalRef<jclass> klass(soa.Env(),
95 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
96 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -080097 result->SetI(fn(soa.Env(), klass.get(), args[0]));
Ian Rogers64b6d142012-10-29 16:34:15 -070098 } else if (shorty == "LL") {
Andreas Gampec55bb392018-09-21 00:02:02 +000099 using fntype = jobject(JNIEnv*, jclass, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800100 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700101 ScopedLocalRef<jclass> klass(soa.Env(),
102 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
103 ScopedLocalRef<jobject> arg0(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700104 soa.AddLocalReference<jobject>(ObjArg(args[0])));
Ian Rogers556d6372012-11-20 12:19:36 -0800105 jobject jresult;
106 {
107 ScopedThreadStateChange tsc(self, kNative);
108 jresult = fn(soa.Env(), klass.get(), arg0.get());
109 }
Mathieu Chartieref41db72016-10-25 15:08:01 -0700110 result->SetL(soa.Decode<mirror::Object>(jresult));
Ian Rogers64b6d142012-10-29 16:34:15 -0700111 } else if (shorty == "IIZ") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000112 using fntype = jint(JNIEnv*, jclass, jint, jboolean);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800113 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700114 ScopedLocalRef<jclass> klass(soa.Env(),
115 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
116 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800117 result->SetI(fn(soa.Env(), klass.get(), args[0], args[1]));
Ian Rogers64b6d142012-10-29 16:34:15 -0700118 } else if (shorty == "ILI") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000119 using fntype = jint(JNIEnv*, jclass, jobject, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800120 fntype* const fn = reinterpret_cast<fntype*>(const_cast<void*>(
121 method->GetEntryPointFromJni()));
Ian Rogers64b6d142012-10-29 16:34:15 -0700122 ScopedLocalRef<jclass> klass(soa.Env(),
123 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
124 ScopedLocalRef<jobject> arg0(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700125 soa.AddLocalReference<jobject>(ObjArg(args[0])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700126 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800127 result->SetI(fn(soa.Env(), klass.get(), arg0.get(), args[1]));
Ian Rogers64b6d142012-10-29 16:34:15 -0700128 } else if (shorty == "SIZ") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000129 using fntype = jshort(JNIEnv*, jclass, jint, jboolean);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700130 fntype* const fn =
131 reinterpret_cast<fntype*>(const_cast<void*>(method->GetEntryPointFromJni()));
Ian Rogers64b6d142012-10-29 16:34:15 -0700132 ScopedLocalRef<jclass> klass(soa.Env(),
133 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
134 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800135 result->SetS(fn(soa.Env(), klass.get(), args[0], args[1]));
Ian Rogers64b6d142012-10-29 16:34:15 -0700136 } else if (shorty == "VIZ") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000137 using fntype = void(JNIEnv*, jclass, jint, jboolean);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800138 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700139 ScopedLocalRef<jclass> klass(soa.Env(),
140 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
141 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800142 fn(soa.Env(), klass.get(), args[0], args[1]);
Ian Rogers64b6d142012-10-29 16:34:15 -0700143 } else if (shorty == "ZLL") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000144 using fntype = jboolean(JNIEnv*, jclass, jobject, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800145 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700146 ScopedLocalRef<jclass> klass(soa.Env(),
147 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
148 ScopedLocalRef<jobject> arg0(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700149 soa.AddLocalReference<jobject>(ObjArg(args[0])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700150 ScopedLocalRef<jobject> arg1(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700151 soa.AddLocalReference<jobject>(ObjArg(args[1])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700152 ScopedThreadStateChange tsc(self, kNative);
153 result->SetZ(fn(soa.Env(), klass.get(), arg0.get(), arg1.get()));
154 } else if (shorty == "ZILL") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000155 using fntype = jboolean(JNIEnv*, jclass, jint, jobject, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800156 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700157 ScopedLocalRef<jclass> klass(soa.Env(),
158 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
159 ScopedLocalRef<jobject> arg1(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700160 soa.AddLocalReference<jobject>(ObjArg(args[1])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700161 ScopedLocalRef<jobject> arg2(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700162 soa.AddLocalReference<jobject>(ObjArg(args[2])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700163 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800164 result->SetZ(fn(soa.Env(), klass.get(), args[0], arg1.get(), arg2.get()));
Ian Rogers64b6d142012-10-29 16:34:15 -0700165 } else if (shorty == "VILII") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000166 using fntype = void(JNIEnv*, jclass, jint, jobject, jint, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800167 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700168 ScopedLocalRef<jclass> klass(soa.Env(),
169 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
170 ScopedLocalRef<jobject> arg1(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700171 soa.AddLocalReference<jobject>(ObjArg(args[1])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700172 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800173 fn(soa.Env(), klass.get(), args[0], arg1.get(), args[2], args[3]);
Ian Rogers64b6d142012-10-29 16:34:15 -0700174 } else if (shorty == "VLILII") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000175 using fntype = void(JNIEnv*, jclass, jobject, jint, jobject, jint, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800176 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700177 ScopedLocalRef<jclass> klass(soa.Env(),
178 soa.AddLocalReference<jclass>(method->GetDeclaringClass()));
179 ScopedLocalRef<jobject> arg0(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700180 soa.AddLocalReference<jobject>(ObjArg(args[0])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700181 ScopedLocalRef<jobject> arg2(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700182 soa.AddLocalReference<jobject>(ObjArg(args[2])));
Ian Rogers64b6d142012-10-29 16:34:15 -0700183 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800184 fn(soa.Env(), klass.get(), arg0.get(), args[1], arg2.get(), args[3], args[4]);
Ian Rogers64b6d142012-10-29 16:34:15 -0700185 } else {
David Sehr709b0702016-10-13 09:12:37 -0700186 LOG(FATAL) << "Do something with static native method: " << method->PrettyMethod()
Ian Rogers64b6d142012-10-29 16:34:15 -0700187 << " shorty: " << shorty;
188 }
189 } else {
190 if (shorty == "L") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000191 using fntype = jobject(JNIEnv*, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800192 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700193 ScopedLocalRef<jobject> rcvr(soa.Env(),
194 soa.AddLocalReference<jobject>(receiver));
Ian Rogers556d6372012-11-20 12:19:36 -0800195 jobject jresult;
196 {
197 ScopedThreadStateChange tsc(self, kNative);
198 jresult = fn(soa.Env(), rcvr.get());
199 }
Mathieu Chartieref41db72016-10-25 15:08:01 -0700200 result->SetL(soa.Decode<mirror::Object>(jresult));
Jeff Hao3dd9f762013-07-08 13:09:25 -0700201 } else if (shorty == "V") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000202 using fntype = void(JNIEnv*, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800203 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Jeff Hao3dd9f762013-07-08 13:09:25 -0700204 ScopedLocalRef<jobject> rcvr(soa.Env(),
205 soa.AddLocalReference<jobject>(receiver));
206 ScopedThreadStateChange tsc(self, kNative);
207 fn(soa.Env(), rcvr.get());
Ian Rogers64b6d142012-10-29 16:34:15 -0700208 } else if (shorty == "LL") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000209 using fntype = jobject(JNIEnv*, jobject, jobject);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800210 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700211 ScopedLocalRef<jobject> rcvr(soa.Env(),
212 soa.AddLocalReference<jobject>(receiver));
213 ScopedLocalRef<jobject> arg0(soa.Env(),
Mathieu Chartieref41db72016-10-25 15:08:01 -0700214 soa.AddLocalReference<jobject>(ObjArg(args[0])));
Ian Rogers556d6372012-11-20 12:19:36 -0800215 jobject jresult;
216 {
217 ScopedThreadStateChange tsc(self, kNative);
218 jresult = fn(soa.Env(), rcvr.get(), arg0.get());
Ian Rogers556d6372012-11-20 12:19:36 -0800219 }
Mathieu Chartieref41db72016-10-25 15:08:01 -0700220 result->SetL(soa.Decode<mirror::Object>(jresult));
Ian Rogers64b6d142012-10-29 16:34:15 -0700221 ScopedThreadStateChange tsc(self, kNative);
Ian Rogers64b6d142012-10-29 16:34:15 -0700222 } else if (shorty == "III") {
Andreas Gampec55bb392018-09-21 00:02:02 +0000223 using fntype = jint(JNIEnv*, jobject, jint, jint);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800224 fntype* const fn = reinterpret_cast<fntype*>(method->GetEntryPointFromJni());
Ian Rogers64b6d142012-10-29 16:34:15 -0700225 ScopedLocalRef<jobject> rcvr(soa.Env(),
226 soa.AddLocalReference<jobject>(receiver));
227 ScopedThreadStateChange tsc(self, kNative);
Jeff Hao5d917302013-02-27 17:57:33 -0800228 result->SetI(fn(soa.Env(), rcvr.get(), args[0], args[1]));
Ian Rogers64b6d142012-10-29 16:34:15 -0700229 } else {
David Sehr709b0702016-10-13 09:12:37 -0700230 LOG(FATAL) << "Do something with native method: " << method->PrettyMethod()
Ian Rogers64b6d142012-10-29 16:34:15 -0700231 << " shorty: " << shorty;
232 }
233 }
234}
235
Sebastien Hertz8ece0502013-08-07 11:26:41 +0200236enum InterpreterImplKind {
buzbee1452bee2015-03-06 14:43:04 -0800237 kSwitchImplKind, // Switch-based interpreter implementation.
buzbee1452bee2015-03-06 14:43:04 -0800238 kMterpImplKind // Assembly interpreter
Sebastien Hertz8ece0502013-08-07 11:26:41 +0200239};
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700240
David Srbecky8ed45c82018-11-08 15:08:57 +0000241#if ART_USE_CXX_INTERPRETER
242static constexpr InterpreterImplKind kInterpreterImplKind = kSwitchImplKind;
243#else
buzbee1452bee2015-03-06 14:43:04 -0800244static constexpr InterpreterImplKind kInterpreterImplKind = kMterpImplKind;
David Srbecky8ed45c82018-11-08 15:08:57 +0000245#endif
Alexey Frunze00b53b72016-02-02 20:25:45 -0800246
Aart Bik01223202016-05-05 15:10:42 -0700247static inline JValue Execute(
248 Thread* self,
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800249 const CodeItemDataAccessor& accessor,
Aart Bik01223202016-05-05 15:10:42 -0700250 ShadowFrame& shadow_frame,
251 JValue result_register,
Vladimir Markofe948752018-03-23 18:11:43 +0000252 bool stay_in_interpreter = false,
253 bool from_deoptimize = false) REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800254 DCHECK(!shadow_frame.GetMethod()->IsAbstract());
Ian Rogers848871b2013-08-05 10:56:33 -0700255 DCHECK(!shadow_frame.GetMethod()->IsNative());
David Srbeckycb4f09e2018-10-21 08:45:22 +0100256
David Srbeckyd3883902019-02-26 17:29:32 +0000257 // Check that we are using the right interpreter.
258 if (kIsDebugBuild && self->UseMterp() != CanUseMterp()) {
259 // The flag might be currently being updated on all threads. Retry with lock.
260 MutexLock tll_mu(self, *Locks::thread_list_lock_);
261 DCHECK_EQ(self->UseMterp(), CanUseMterp());
262 }
263
Vladimir Markofe948752018-03-23 18:11:43 +0000264 if (LIKELY(!from_deoptimize)) { // Entering the method, but not via deoptimization.
buzbee734f3aa2016-01-28 14:20:06 -0800265 if (kIsDebugBuild) {
Vladimir Markofe948752018-03-23 18:11:43 +0000266 CHECK_EQ(shadow_frame.GetDexPC(), 0u);
buzbee734f3aa2016-01-28 14:20:06 -0800267 self->AssertNoPendingException();
268 }
269 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
270 ArtMethod *method = shadow_frame.GetMethod();
271
272 if (UNLIKELY(instrumentation->HasMethodEntryListeners())) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800273 instrumentation->MethodEnterEvent(self,
274 shadow_frame.GetThisObject(accessor.InsSize()),
275 method,
276 0);
Alex Light0aa7a5a2018-10-10 15:58:14 +0000277 if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
Alex Lightb7c640d2019-03-20 15:52:13 -0700278 // The caller will retry this invoke or ignore the result. Just return immediately without
279 // any value.
Alex Light0aa7a5a2018-10-10 15:58:14 +0000280 DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
Alex Lightb7c640d2019-03-20 15:52:13 -0700281 JValue ret = JValue();
282 bool res = PerformNonStandardReturn<MonitorState::kNoMonitorsLocked>(
283 self,
284 shadow_frame,
285 ret,
286 instrumentation,
287 accessor.InsSize(),
288 0);
289 DCHECK(res) << "Expected to perform non-standard return!";
290 return ret;
Alex Light0aa7a5a2018-10-10 15:58:14 +0000291 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700292 if (UNLIKELY(self->IsExceptionPending())) {
293 instrumentation->MethodUnwindEvent(self,
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800294 shadow_frame.GetThisObject(accessor.InsSize()),
Alex Lightb7edcda2017-04-27 13:20:31 -0700295 method,
296 0);
Alex Lightb7c640d2019-03-20 15:52:13 -0700297 JValue ret = JValue();
298 if (UNLIKELY(shadow_frame.GetForcePopFrame())) {
299 DCHECK(Runtime::Current()->AreNonStandardExitsEnabled());
300 bool res = PerformNonStandardReturn<MonitorState::kNoMonitorsLocked>(
301 self,
302 shadow_frame,
303 ret,
304 instrumentation,
305 accessor.InsSize(),
306 0);
307 DCHECK(res) << "Expected to perform non-standard return!";
308 }
309 return ret;
Alex Lightb7edcda2017-04-27 13:20:31 -0700310 }
buzbee734f3aa2016-01-28 14:20:06 -0800311 }
312
Alex Light3dacdd62019-03-12 15:45:47 +0000313 if (!stay_in_interpreter && !self->IsForceInterpreter()) {
Aart Bik01223202016-05-05 15:10:42 -0700314 jit::Jit* jit = Runtime::Current()->GetJit();
315 if (jit != nullptr) {
316 jit->MethodEntered(self, shadow_frame.GetMethod());
317 if (jit->CanInvokeCompiledCode(method)) {
318 JValue result;
buzbee734f3aa2016-01-28 14:20:06 -0800319
Aart Bik01223202016-05-05 15:10:42 -0700320 // Pop the shadow frame before calling into compiled code.
321 self->PopShadowFrame();
Jeff Hao5ea84132017-05-05 16:59:29 -0700322 // Calculate the offset of the first input reg. The input registers are in the high regs.
323 // It's ok to access the code item here since JIT code will have been touched by the
324 // interpreter and compiler already.
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800325 uint16_t arg_offset = accessor.RegistersSize() - accessor.InsSize();
Jeff Hao5ea84132017-05-05 16:59:29 -0700326 ArtInterpreterToCompiledCodeBridge(self, nullptr, &shadow_frame, arg_offset, &result);
Aart Bik01223202016-05-05 15:10:42 -0700327 // Push the shadow frame back as the caller will expect it.
328 self->PushShadowFrame(&shadow_frame);
buzbee734f3aa2016-01-28 14:20:06 -0800329
Aart Bik01223202016-05-05 15:10:42 -0700330 return result;
331 }
Nicolas Geoffray274fe4a2016-04-12 16:33:24 +0100332 }
buzbee734f3aa2016-01-28 14:20:06 -0800333 }
334 }
335
Andreas Gampe580667b2017-10-23 11:20:39 -0700336 ArtMethod* method = shadow_frame.GetMethod();
337
338 DCheckStaticState(self, method);
Sebastien Hertz8ece0502013-08-07 11:26:41 +0200339
Andreas Gampe56fdd0e2016-04-28 14:56:54 -0700340 // Lock counting is a special version of accessibility checks, and for simplicity and
341 // reduction of template parameters, we gate it behind access-checks mode.
Andreas Gampe56fdd0e2016-04-28 14:56:54 -0700342 DCHECK(!method->SkipAccessChecks() || !method->MustCountLocks());
343
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100344 bool transaction_active = Runtime::Current()->IsActiveTransaction();
Mathieu Chartier765b2a02019-05-02 11:04:13 -0700345 VLOG(interpreter) << "Interpreting " << method->PrettyMethod();
Andreas Gampe56fdd0e2016-04-28 14:56:54 -0700346 if (LIKELY(method->SkipAccessChecks())) {
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200347 // Enter the "without access check" interpreter.
buzbee1452bee2015-03-06 14:43:04 -0800348 if (kInterpreterImplKind == kMterpImplKind) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100349 if (transaction_active) {
buzbee1452bee2015-03-06 14:43:04 -0800350 // No Mterp variant - just use the switch interpreter.
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800351 return ExecuteSwitchImpl<false, true>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800352 false);
Bill Buzbeefd522f92016-02-11 22:37:42 +0000353 } else if (UNLIKELY(!Runtime::Current()->IsStarted())) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800354 return ExecuteSwitchImpl<false, false>(self, accessor, shadow_frame, result_register,
Bill Buzbeefd522f92016-02-11 22:37:42 +0000355 false);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100356 } else {
buzbee1452bee2015-03-06 14:43:04 -0800357 while (true) {
Bill Buzbeefd522f92016-02-11 22:37:42 +0000358 // Mterp does not support all instrumentation/debugging.
David Srbecky28f6cff2018-10-16 15:07:28 +0100359 if (!self->UseMterp()) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800360 return ExecuteSwitchImpl<false, false>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800361 false);
buzbee1452bee2015-03-06 14:43:04 -0800362 }
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800363 bool returned = ExecuteMterpImpl(self,
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800364 accessor.Insns(),
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800365 &shadow_frame,
366 &result_register);
buzbee1452bee2015-03-06 14:43:04 -0800367 if (returned) {
368 return result_register;
369 } else {
370 // Mterp didn't like that instruction. Single-step it with the reference interpreter.
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800371 result_register = ExecuteSwitchImpl<false, false>(self, accessor, shadow_frame,
Mathieu Chartieref41db72016-10-25 15:08:01 -0700372 result_register, true);
Andreas Gampee2abbc62017-09-15 11:59:26 -0700373 if (shadow_frame.GetDexPC() == dex::kDexNoIndex) {
buzbee1452bee2015-03-06 14:43:04 -0800374 // Single-stepped a return or an exception not handled locally. Return to caller.
buzbeed6b48db2016-01-28 15:48:55 -0800375 return result_register;
buzbee1452bee2015-03-06 14:43:04 -0800376 }
377 }
378 }
379 }
buzbeef61df9b2016-09-07 07:12:29 -0700380 } else {
381 DCHECK_EQ(kInterpreterImplKind, kSwitchImplKind);
buzbee1452bee2015-03-06 14:43:04 -0800382 if (transaction_active) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800383 return ExecuteSwitchImpl<false, true>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800384 false);
385 } else {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800386 return ExecuteSwitchImpl<false, false>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800387 false);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100388 }
Sebastien Hertz8ece0502013-08-07 11:26:41 +0200389 }
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200390 } else {
391 // Enter the "with access check" interpreter.
Andreas Gampe81c61bf2018-10-11 18:57:39 -0700392
393 // The boot classpath should really not have to run access checks.
394 DCHECK(method->GetDeclaringClass()->GetClassLoader() != nullptr
395 || Runtime::Current()->IsVerificationSoftFail()
396 || Runtime::Current()->IsAotCompiler())
397 << method->PrettyMethod();
398
buzbee1452bee2015-03-06 14:43:04 -0800399 if (kInterpreterImplKind == kMterpImplKind) {
400 // No access check variants for Mterp. Just use the switch version.
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100401 if (transaction_active) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800402 return ExecuteSwitchImpl<true, true>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800403 false);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100404 } else {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800405 return ExecuteSwitchImpl<true, false>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800406 false);
407 }
buzbeef61df9b2016-09-07 07:12:29 -0700408 } else {
409 DCHECK_EQ(kInterpreterImplKind, kSwitchImplKind);
buzbee1452bee2015-03-06 14:43:04 -0800410 if (transaction_active) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800411 return ExecuteSwitchImpl<true, true>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800412 false);
413 } else {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800414 return ExecuteSwitchImpl<true, false>(self, accessor, shadow_frame, result_register,
buzbee1452bee2015-03-06 14:43:04 -0800415 false);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100416 }
Sebastien Hertz8ece0502013-08-07 11:26:41 +0200417 }
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200418 }
419}
420
Mathieu Chartieref41db72016-10-25 15:08:01 -0700421void EnterInterpreterFromInvoke(Thread* self,
422 ArtMethod* method,
423 ObjPtr<mirror::Object> receiver,
424 uint32_t* args,
425 JValue* result,
Aart Bik01223202016-05-05 15:10:42 -0700426 bool stay_in_interpreter) {
Ian Rogers64b6d142012-10-29 16:34:15 -0700427 DCHECK_EQ(self, Thread::Current());
Nicolas Geoffray535a3fb2014-07-22 15:17:38 +0100428 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
429 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
jeffhaod7521322012-11-21 15:38:24 -0800430 ThrowStackOverflowError(self);
431 return;
432 }
433
Alex Lightdb01a092017-04-03 15:39:55 -0700434 // This can happen if we are in forced interpreter mode and an obsolete method is called using
435 // reflection.
436 if (UNLIKELY(method->IsObsolete())) {
437 ThrowInternalError("Attempting to invoke obsolete version of '%s'.",
438 method->PrettyMethod().c_str());
439 return;
440 }
441
Mathieu Chartiere861ebd2013-10-09 15:01:21 -0700442 const char* old_cause = self->StartAssertNoThreadSuspension("EnterInterpreterFromInvoke");
David Sehr0225f8e2018-01-31 08:52:24 +0000443 CodeItemDataAccessor accessor(method->DexInstructionData());
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700444 uint16_t num_regs;
445 uint16_t num_ins;
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800446 if (accessor.HasCodeItem()) {
447 num_regs = accessor.RegistersSize();
448 num_ins = accessor.InsSize();
Alex Light9139e002015-10-09 15:59:48 -0700449 } else if (!method->IsInvokable()) {
Mathieu Chartiere861ebd2013-10-09 15:01:21 -0700450 self->EndAssertNoThreadSuspension(old_cause);
Alex Light9139e002015-10-09 15:59:48 -0700451 method->ThrowInvocationTimeError();
jeffhao0a9bb732012-11-26 12:28:49 -0800452 return;
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700453 } else {
454 DCHECK(method->IsNative());
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700455 num_regs = num_ins = ArtMethod::NumArgRegisters(method->GetShorty());
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700456 if (!method->IsStatic()) {
457 num_regs++;
458 num_ins++;
459 }
460 }
461 // Set up shadow frame with matching number of reference slots to vregs.
462 ShadowFrame* last_shadow_frame = self->GetManagedStack()->GetTopShadowFrame();
Andreas Gampeb3025922015-09-01 14:45:00 -0700463 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
Andreas Gampe03ec9302015-08-27 17:41:47 -0700464 CREATE_SHADOW_FRAME(num_regs, last_shadow_frame, method, /* dex pc */ 0);
Andreas Gampeb3025922015-09-01 14:45:00 -0700465 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
Jeff Hao66135192013-05-14 11:02:41 -0700466 self->PushShadowFrame(shadow_frame);
Mathieu Chartiere861ebd2013-10-09 15:01:21 -0700467
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700468 size_t cur_reg = num_regs - num_ins;
469 if (!method->IsStatic()) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700470 CHECK(receiver != nullptr);
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100471 shadow_frame->SetVRegReference(cur_reg, receiver);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700472 ++cur_reg;
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700473 }
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700474 uint32_t shorty_len = 0;
475 const char* shorty = method->GetShorty(&shorty_len);
Jeff Hao5d917302013-02-27 17:57:33 -0800476 for (size_t shorty_pos = 0, arg_pos = 0; cur_reg < num_regs; ++shorty_pos, ++arg_pos, cur_reg++) {
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700477 DCHECK_LT(shorty_pos + 1, shorty_len);
Jeff Hao5d917302013-02-27 17:57:33 -0800478 switch (shorty[shorty_pos + 1]) {
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700479 case 'L': {
Mathieu Chartieref41db72016-10-25 15:08:01 -0700480 ObjPtr<mirror::Object> o =
481 reinterpret_cast<StackReference<mirror::Object>*>(&args[arg_pos])->AsMirrorPtr();
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100482 shadow_frame->SetVRegReference(cur_reg, o);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700483 break;
484 }
Jeff Hao5d917302013-02-27 17:57:33 -0800485 case 'J': case 'D': {
486 uint64_t wide_value = (static_cast<uint64_t>(args[arg_pos + 1]) << 32) | args[arg_pos];
487 shadow_frame->SetVRegLong(cur_reg, wide_value);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700488 cur_reg++;
Jeff Hao5d917302013-02-27 17:57:33 -0800489 arg_pos++;
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700490 break;
Jeff Hao5d917302013-02-27 17:57:33 -0800491 }
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700492 default:
Jeff Hao5d917302013-02-27 17:57:33 -0800493 shadow_frame->SetVReg(cur_reg, args[arg_pos]);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700494 break;
495 }
496 }
Mathieu Chartier92246bb2014-02-25 18:22:39 -0800497 self->EndAssertNoThreadSuspension(old_cause);
498 // Do this after populating the shadow frame in case EnsureInitialized causes a GC.
Vladimir Markobaa81b52019-08-02 10:14:04 +0100499 if (method->IsStatic()) {
500 ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
501 if (UNLIKELY(!declaring_class->IsVisiblyInitialized())) {
502 StackHandleScope<1> hs(self);
503 Handle<mirror::Class> h_class(hs.NewHandle(declaring_class));
504 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
505 self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true))) {
506 CHECK(self->IsExceptionPending());
507 self->PopShadowFrame();
508 return;
509 }
510 DCHECK(h_class->IsInitializing());
Mathieu Chartier92246bb2014-02-25 18:22:39 -0800511 }
512 }
Ian Rogers64b6d142012-10-29 16:34:15 -0700513 if (LIKELY(!method->IsNative())) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800514 JValue r = Execute(self, accessor, *shadow_frame, JValue(), stay_in_interpreter);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700515 if (result != nullptr) {
Jeff Hao6474d192013-03-26 14:08:09 -0700516 *result = r;
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700517 }
518 } else {
Ian Rogers64b6d142012-10-29 16:34:15 -0700519 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
520 // generated stub) except during testing and image writing.
Mathieu Chartier92246bb2014-02-25 18:22:39 -0800521 // Update args to be the args in the shadow frame since the input ones could hold stale
522 // references pointers due to moving GC.
523 args = shadow_frame->GetVRegArgs(method->IsStatic() ? 0 : 1);
Ian Rogers64b6d142012-10-29 16:34:15 -0700524 if (!Runtime::Current()->IsStarted()) {
Mathieu Chartieref41db72016-10-25 15:08:01 -0700525 UnstartedRuntime::Jni(self, method, receiver.Ptr(), args, result);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700526 } else {
Jeff Hao6474d192013-03-26 14:08:09 -0700527 InterpreterJni(self, method, shorty, receiver, args, result);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700528 }
529 }
530 self->PopShadowFrame();
531}
532
Mingyao Yangffedec52016-05-19 10:48:40 -0700533static int16_t GetReceiverRegisterForStringInit(const Instruction* instr) {
534 DCHECK(instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE ||
535 instr->Opcode() == Instruction::INVOKE_DIRECT);
536 return (instr->Opcode() == Instruction::INVOKE_DIRECT_RANGE) ?
537 instr->VRegC_3rc() : instr->VRegC_35c();
538}
539
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100540void EnterInterpreterFromDeoptimize(Thread* self,
541 ShadowFrame* shadow_frame,
Mingyao Yang2ee17902017-08-30 11:37:08 -0700542 JValue* ret_val,
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100543 bool from_code,
Mingyao Yang2ee17902017-08-30 11:37:08 -0700544 DeoptimizationMethodType deopt_method_type)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700545 REQUIRES_SHARED(Locks::mutator_lock_) {
Jeff Hao11ffc2d2013-02-01 11:52:17 -0800546 JValue value;
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700547 // Set value to last known result in case the shadow frame chain is empty.
548 value.SetJ(ret_val->GetJ());
Alex Light0aa7a5a2018-10-10 15:58:14 +0000549 // How many frames we have executed.
550 size_t frame_cnt = 0;
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700551 while (shadow_frame != nullptr) {
Andreas Gampe56fdd0e2016-04-28 14:56:54 -0700552 // We do not want to recover lock state for lock counting when deoptimizing. Currently,
553 // the compiler should not have compiled a method that failed structured-locking checks.
554 DCHECK(!shadow_frame->GetMethod()->MustCountLocks());
555
Ian Rogers62d6c772013-02-27 08:32:07 -0800556 self->SetTopOfShadowStack(shadow_frame);
David Sehr0225f8e2018-01-31 08:52:24 +0000557 CodeItemDataAccessor accessor(shadow_frame->GetMethod()->DexInstructionData());
Sebastien Hertz270a0e12015-01-16 19:49:09 +0100558 const uint32_t dex_pc = shadow_frame->GetDexPC();
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100559 uint32_t new_dex_pc = dex_pc;
Sebastien Hertz270a0e12015-01-16 19:49:09 +0100560 if (UNLIKELY(self->IsExceptionPending())) {
Sebastien Hertz520633b2015-09-08 17:03:36 +0200561 // If we deoptimize from the QuickExceptionHandler, we already reported the exception to
562 // the instrumentation. To prevent from reporting it a second time, we simply pass a
563 // null Instrumentation*.
Sebastien Hertz270a0e12015-01-16 19:49:09 +0100564 const instrumentation::Instrumentation* const instrumentation =
Alex Light0aa7a5a2018-10-10 15:58:14 +0000565 frame_cnt == 0 ? nullptr : Runtime::Current()->GetInstrumentation();
Alex Light9fb1ab12017-09-05 09:32:49 -0700566 new_dex_pc = MoveToExceptionHandler(
Andreas Gampee2abbc62017-09-15 11:59:26 -0700567 self, *shadow_frame, instrumentation) ? shadow_frame->GetDexPC() : dex::kDexNoIndex;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100568 } else if (!from_code) {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700569 // Deoptimization is not called from code directly.
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800570 const Instruction* instr = &accessor.InstructionAt(dex_pc);
Alex Light0aa7a5a2018-10-10 15:58:14 +0000571 if (deopt_method_type == DeoptimizationMethodType::kKeepDexPc ||
572 shadow_frame->GetForceRetryInstruction()) {
573 DCHECK(frame_cnt == 0 || (frame_cnt == 1 && shadow_frame->GetForceRetryInstruction()))
574 << "frame_cnt: " << frame_cnt
575 << " force-retry: " << shadow_frame->GetForceRetryInstruction();
Mingyao Yang2ee17902017-08-30 11:37:08 -0700576 // Need to re-execute the dex instruction.
577 // (1) An invocation might be split into class initialization and invoke.
578 // In this case, the invoke should not be skipped.
579 // (2) A suspend check should also execute the dex instruction at the
580 // corresponding dex pc.
Alex Light0aa7a5a2018-10-10 15:58:14 +0000581 // If the ForceRetryInstruction bit is set this must be the second frame (the first being
582 // the one that is being popped).
Mingyao Yang2ee17902017-08-30 11:37:08 -0700583 DCHECK_EQ(new_dex_pc, dex_pc);
Alex Light0aa7a5a2018-10-10 15:58:14 +0000584 shadow_frame->SetForceRetryInstruction(false);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700585 } else if (instr->Opcode() == Instruction::MONITOR_ENTER ||
586 instr->Opcode() == Instruction::MONITOR_EXIT) {
587 DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
Alex Light0aa7a5a2018-10-10 15:58:14 +0000588 DCHECK_EQ(frame_cnt, 0u);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700589 // Non-idempotent dex instruction should not be re-executed.
590 // On the other hand, if a MONITOR_ENTER is at the dex_pc of a suspend
591 // check, that MONITOR_ENTER should be executed. That case is handled
592 // above.
593 new_dex_pc = dex_pc + instr->SizeInCodeUnits();
594 } else if (instr->IsInvoke()) {
595 DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
Mingyao Yangffedec52016-05-19 10:48:40 -0700596 if (IsStringInit(instr, shadow_frame->GetMethod())) {
597 uint16_t this_obj_vreg = GetReceiverRegisterForStringInit(instr);
598 // Move the StringFactory.newStringFromChars() result into the register representing
599 // "this object" when invoking the string constructor in the original dex instruction.
600 // Also move the result into all aliases.
601 DCHECK(value.GetL()->IsString());
602 SetStringInitValueToAllAliases(shadow_frame, this_obj_vreg, value);
603 // Calling string constructor in the original dex code doesn't generate a result value.
604 value.SetJ(0);
605 }
Mingyao Yang504a6902016-04-28 16:23:01 -0700606 new_dex_pc = dex_pc + instr->SizeInCodeUnits();
607 } else if (instr->Opcode() == Instruction::NEW_INSTANCE) {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700608 // A NEW_INSTANCE is simply re-executed, including
609 // "new-instance String" which is compiled into a call into
610 // StringFactory.newEmptyString().
611 DCHECK_EQ(new_dex_pc, dex_pc);
Mingyao Yang504a6902016-04-28 16:23:01 -0700612 } else {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700613 DCHECK(deopt_method_type == DeoptimizationMethodType::kDefault);
Alex Light0aa7a5a2018-10-10 15:58:14 +0000614 DCHECK_EQ(frame_cnt, 0u);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700615 // By default, we re-execute the dex instruction since if they are not
616 // an invoke, so that we don't have to decode the dex instruction to move
617 // result into the right vreg. All slow paths have been audited to be
618 // idempotent except monitor-enter/exit and invocation stubs.
619 // TODO: move result and advance dex pc. That also requires that we
620 // can tell the return type of a runtime method, possibly by decoding
621 // the dex instruction at the caller.
622 DCHECK_EQ(new_dex_pc, dex_pc);
Mingyao Yang504a6902016-04-28 16:23:01 -0700623 }
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100624 } else {
625 // Nothing to do, the dex_pc is the one at which the code requested
626 // the deoptimization.
Alex Light0aa7a5a2018-10-10 15:58:14 +0000627 DCHECK_EQ(frame_cnt, 0u);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700628 DCHECK_EQ(new_dex_pc, dex_pc);
Sebastien Hertz270a0e12015-01-16 19:49:09 +0100629 }
Andreas Gampee2abbc62017-09-15 11:59:26 -0700630 if (new_dex_pc != dex::kDexNoIndex) {
Nicolas Geoffray95974242017-09-04 08:45:51 +0000631 shadow_frame->SetDexPC(new_dex_pc);
Vladimir Markofe948752018-03-23 18:11:43 +0000632 value = Execute(self,
633 accessor,
634 *shadow_frame,
635 value,
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700636 /* stay_in_interpreter= */ true,
637 /* from_deoptimize= */ true);
Sebastien Hertz270a0e12015-01-16 19:49:09 +0100638 }
Jeff Hao11ffc2d2013-02-01 11:52:17 -0800639 ShadowFrame* old_frame = shadow_frame;
640 shadow_frame = shadow_frame->GetLink();
Christopher Ferris241a9582015-04-27 15:19:41 -0700641 ShadowFrame::DeleteDeoptimizedFrame(old_frame);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700642 // Following deoptimizations of shadow frames must be at invocation point
643 // and should advance dex pc past the invoke instruction.
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100644 from_code = false;
Mingyao Yang2ee17902017-08-30 11:37:08 -0700645 deopt_method_type = DeoptimizationMethodType::kDefault;
Alex Light0aa7a5a2018-10-10 15:58:14 +0000646 frame_cnt++;
Jeff Hao11ffc2d2013-02-01 11:52:17 -0800647 }
648 ret_val->SetJ(value.GetJ());
649}
650
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800651JValue EnterInterpreterFromEntryPoint(Thread* self, const CodeItemDataAccessor& accessor,
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700652 ShadowFrame* shadow_frame) {
Ian Rogersf3e98552013-03-20 15:49:49 -0700653 DCHECK_EQ(self, Thread::Current());
Nicolas Geoffray535a3fb2014-07-22 15:17:38 +0100654 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
655 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
Ian Rogersf3e98552013-03-20 15:49:49 -0700656 ThrowStackOverflowError(self);
657 return JValue();
658 }
659
Nicolas Geoffray71cd50f2016-04-14 15:00:33 +0100660 jit::Jit* jit = Runtime::Current()->GetJit();
661 if (jit != nullptr) {
662 jit->NotifyCompiledCodeToInterpreterTransition(self, shadow_frame->GetMethod());
663 }
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800664 return Execute(self, accessor, *shadow_frame, JValue());
Ian Rogers7db619b2013-01-16 18:35:48 -0800665}
666
Mathieu Chartieref41db72016-10-25 15:08:01 -0700667void ArtInterpreterToInterpreterBridge(Thread* self,
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800668 const CodeItemDataAccessor& accessor,
Mathieu Chartieref41db72016-10-25 15:08:01 -0700669 ShadowFrame* shadow_frame,
670 JValue* result) {
Nicolas Geoffray535a3fb2014-07-22 15:17:38 +0100671 bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
672 if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
Jeff Hao16743632013-05-08 10:59:04 -0700673 ThrowStackOverflowError(self);
Jeff Hao69510672013-05-21 17:34:55 -0700674 return;
Jeff Hao16743632013-05-08 10:59:04 -0700675 }
676
Mathieu Chartiere861ebd2013-10-09 15:01:21 -0700677 self->PushShadowFrame(shadow_frame);
Alex Lighteb7c1442015-08-31 13:17:42 -0700678 ArtMethod* method = shadow_frame->GetMethod();
Sebastien Hertzc61124b2013-09-10 11:44:19 +0200679 // Ensure static methods are initialized.
Alex Lighteb7c1442015-08-31 13:17:42 -0700680 const bool is_static = method->IsStatic();
Ian Rogerse94652f2014-12-02 11:13:19 -0800681 if (is_static) {
Mathieu Chartieref41db72016-10-25 15:08:01 -0700682 ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
Vladimir Markobaa81b52019-08-02 10:14:04 +0100683 if (UNLIKELY(!declaring_class->IsVisiblyInitialized())) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700684 StackHandleScope<1> hs(self);
Vladimir Markobaa81b52019-08-02 10:14:04 +0100685 Handle<mirror::Class> h_class(hs.NewHandle(declaring_class));
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700686 if (UNLIKELY(!Runtime::Current()->GetClassLinker()->EnsureInitialized(
Vladimir Markobaa81b52019-08-02 10:14:04 +0100687 self, h_class, /*can_init_fields=*/ true, /*can_init_parents=*/ true))) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700688 DCHECK(self->IsExceptionPending());
Mathieu Chartiere861ebd2013-10-09 15:01:21 -0700689 self->PopShadowFrame();
Sebastien Hertzc61124b2013-09-10 11:44:19 +0200690 return;
691 }
Vladimir Markobaa81b52019-08-02 10:14:04 +0100692 DCHECK(h_class->IsInitializing());
Jeff Hao16743632013-05-08 10:59:04 -0700693 }
Jeff Hao16743632013-05-08 10:59:04 -0700694 }
Jeff Hao16743632013-05-08 10:59:04 -0700695
Ian Rogerse94652f2014-12-02 11:13:19 -0800696 if (LIKELY(!shadow_frame->GetMethod()->IsNative())) {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800697 result->SetJ(Execute(self, accessor, *shadow_frame, JValue()).GetJ());
Jeff Hao16743632013-05-08 10:59:04 -0700698 } else {
699 // We don't expect to be asked to interpret native code (which is entered via a JNI compiler
700 // generated stub) except during testing and image writing.
701 CHECK(!Runtime::Current()->IsStarted());
Mathieu Chartieref41db72016-10-25 15:08:01 -0700702 ObjPtr<mirror::Object> receiver = is_static ? nullptr : shadow_frame->GetVRegReference(0);
Ian Rogerse94652f2014-12-02 11:13:19 -0800703 uint32_t* args = shadow_frame->GetVRegArgs(is_static ? 0 : 1);
Mathieu Chartieref41db72016-10-25 15:08:01 -0700704 UnstartedRuntime::Jni(self, shadow_frame->GetMethod(), receiver.Ptr(), args, result);
Jeff Hao16743632013-05-08 10:59:04 -0700705 }
706
707 self->PopShadowFrame();
Jeff Hao16743632013-05-08 10:59:04 -0700708}
709
buzbee1452bee2015-03-06 14:43:04 -0800710void CheckInterpreterAsmConstants() {
711 CheckMterpAsmConstants();
712}
713
714void InitInterpreterTls(Thread* self) {
715 InitMterpTls(self);
716}
717
Alex Light0aa7a5a2018-10-10 15:58:14 +0000718bool PrevFrameWillRetry(Thread* self, const ShadowFrame& frame) {
719 ShadowFrame* prev_frame = frame.GetLink();
720 if (prev_frame == nullptr) {
721 NthCallerVisitor vis(self, 1, false);
722 vis.WalkStack();
723 prev_frame = vis.GetCurrentShadowFrame();
724 if (prev_frame == nullptr) {
725 prev_frame = self->FindDebuggerShadowFrame(vis.GetFrameId());
726 }
727 }
728 return prev_frame != nullptr && prev_frame->GetForceRetryInstruction();
729}
730
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700731} // namespace interpreter
732} // namespace art