blob: 1407ea92cbabbdf23b4d205700eb4c09b795ba41 [file] [log] [blame]
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics.h"
18
Andreas Gampea1d2f952017-04-20 22:53:58 -070019#include "art_field-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080020#include "art_method-inl.h"
David Sehrc431b9d2018-03-02 12:01:51 -080021#include "base/utils.h"
Andreas Gampebfb5ba92015-09-01 15:45:02 +000022#include "class_linker.h"
Roland Levillain1d775d22018-09-07 13:56:57 +010023#include "class_root.h"
David Sehr8c0961f2018-01-23 16:11:38 -080024#include "dex/invoke_type.h"
Nicolas Geoffray331605a2017-03-01 11:01:41 +000025#include "driver/compiler_options.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010026#include "gc/space/image_space.h"
27#include "image-inl.h"
28#include "intrinsic_objects.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080029#include "nodes.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010030#include "obj_ptr-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070031#include "scoped_thread_state_change-inl.h"
Andreas Gampeb486a982017-06-01 13:45:54 -070032#include "thread-current-inl.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080033
34namespace art {
35
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +010036// Check that intrinsic enum values fit within space set aside in ArtMethod modifier flags.
37#define CHECK_INTRINSICS_ENUM_VALUES(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
38 static_assert( \
39 static_cast<uint32_t>(Intrinsics::k ## Name) <= (kAccIntrinsicBits >> CTZ(kAccIntrinsicBits)), \
Orion Hodson4a4610a2017-09-28 16:57:55 +010040 "Instrinsics enumeration space overflow.");
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +010041#include "intrinsics_list.h"
42 INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)
43#undef INTRINSICS_LIST
44#undef CHECK_INTRINSICS_ENUM_VALUES
45
Andreas Gampe71fb52f2014-12-29 17:43:08 -080046// Function that returns whether an intrinsic is static/direct or virtual.
47static inline InvokeType GetIntrinsicInvokeType(Intrinsics i) {
48 switch (i) {
49 case Intrinsics::kNone:
50 return kInterface; // Non-sensical for intrinsic.
Nicolas Geoffray762869d2016-07-15 15:28:35 +010051#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080052 case Intrinsics::k ## Name: \
Andreas Gampe71fb52f2014-12-29 17:43:08 -080053 return IsStatic;
54#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070055 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Andreas Gampe71fb52f2014-12-29 17:43:08 -080056#undef INTRINSICS_LIST
57#undef OPTIMIZING_INTRINSICS
58 }
59 return kInterface;
60}
61
agicsaki57b81ec2015-08-11 17:39:37 -070062// Function that returns whether an intrinsic needs an environment or not.
Agi Csaki05f20562015-08-19 14:58:14 -070063static inline IntrinsicNeedsEnvironmentOrCache NeedsEnvironmentOrCache(Intrinsics i) {
agicsaki57b81ec2015-08-11 17:39:37 -070064 switch (i) {
65 case Intrinsics::kNone:
Agi Csaki05f20562015-08-19 14:58:14 -070066 return kNeedsEnvironmentOrCache; // Non-sensical for intrinsic.
Nicolas Geoffray762869d2016-07-15 15:28:35 +010067#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080068 case Intrinsics::k ## Name: \
Agi Csaki05f20562015-08-19 14:58:14 -070069 return NeedsEnvironmentOrCache;
agicsaki57b81ec2015-08-11 17:39:37 -070070#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070071 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
agicsaki57b81ec2015-08-11 17:39:37 -070072#undef INTRINSICS_LIST
73#undef OPTIMIZING_INTRINSICS
74 }
Agi Csaki05f20562015-08-19 14:58:14 -070075 return kNeedsEnvironmentOrCache;
agicsaki57b81ec2015-08-11 17:39:37 -070076}
Andreas Gampe71fb52f2014-12-29 17:43:08 -080077
Aart Bik5d75afe2015-12-14 11:57:01 -080078// Function that returns whether an intrinsic has side effects.
79static inline IntrinsicSideEffects GetSideEffects(Intrinsics i) {
80 switch (i) {
81 case Intrinsics::kNone:
82 return kAllSideEffects;
Nicolas Geoffray762869d2016-07-15 15:28:35 +010083#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080084 case Intrinsics::k ## Name: \
85 return SideEffects;
86#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070087 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Aart Bik5d75afe2015-12-14 11:57:01 -080088#undef INTRINSICS_LIST
89#undef OPTIMIZING_INTRINSICS
90 }
91 return kAllSideEffects;
92}
93
94// Function that returns whether an intrinsic can throw exceptions.
95static inline IntrinsicExceptions GetExceptions(Intrinsics i) {
96 switch (i) {
97 case Intrinsics::kNone:
98 return kCanThrow;
Nicolas Geoffray762869d2016-07-15 15:28:35 +010099#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -0800100 case Intrinsics::k ## Name: \
101 return Exceptions;
102#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -0700103 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Aart Bik5d75afe2015-12-14 11:57:01 -0800104#undef INTRINSICS_LIST
105#undef OPTIMIZING_INTRINSICS
106 }
107 return kCanThrow;
108}
109
Orion Hodson4c71d002017-11-29 11:03:25 +0000110static bool CheckInvokeType(Intrinsics intrinsic, HInvoke* invoke)
111 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000112 // Whenever the intrinsic is marked as static, report an error if we find an InvokeVirtual.
113 //
114 // Whenever the intrinsic is marked as direct and we find an InvokeVirtual, a devirtualization
115 // failure occured. We might be in a situation where we have inlined a method that calls an
116 // intrinsic, but that method is in a different dex file on which we do not have a
117 // verified_method that would have helped the compiler driver sharpen the call. In that case,
118 // make sure that the intrinsic is actually for some final method (or in a final class), as
119 // otherwise the intrinsics setup is broken.
120 //
121 // For the last direction, we have intrinsics for virtual functions that will perform a check
122 // inline. If the precise type is known, however, the instruction will be sharpened to an
123 // InvokeStaticOrDirect.
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800124 InvokeType intrinsic_type = GetIntrinsicInvokeType(intrinsic);
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100125 InvokeType invoke_type = invoke->GetInvokeType();
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100126
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800127 switch (intrinsic_type) {
128 case kStatic:
129 return (invoke_type == kStatic);
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000130
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800131 case kDirect:
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000132 if (invoke_type == kDirect) {
133 return true;
134 }
135 if (invoke_type == kVirtual) {
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100136 ArtMethod* art_method = invoke->GetResolvedMethod();
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100137 return (art_method->IsFinal() || art_method->GetDeclaringClass()->IsFinal());
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000138 }
139 return false;
140
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800141 case kVirtual:
142 // Call might be devirtualized.
Mingyao Yang6b1aebe2017-11-27 15:39:04 -0800143 return (invoke_type == kVirtual || invoke_type == kDirect || invoke_type == kInterface);
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800144
Orion Hodsonb1b52062017-11-27 11:51:42 +0000145 case kSuper:
146 case kInterface:
147 case kPolymorphic:
Orion Hodson4c8e12e2018-05-18 08:33:20 +0100148 case kCustom:
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800149 return false;
150 }
Orion Hodsonb1b52062017-11-27 11:51:42 +0000151 LOG(FATAL) << "Unknown intrinsic invoke type: " << intrinsic_type;
152 UNREACHABLE();
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800153}
154
Mingyao Yang6b1aebe2017-11-27 15:39:04 -0800155bool IntrinsicsRecognizer::Recognize(HInvoke* invoke,
156 ArtMethod* art_method,
157 /*out*/ bool* wrong_invoke_type) {
158 if (art_method == nullptr) {
159 art_method = invoke->GetResolvedMethod();
160 }
Orion Hodsonb1b52062017-11-27 11:51:42 +0000161 *wrong_invoke_type = false;
162 if (art_method == nullptr || !art_method->IsIntrinsic()) {
163 return false;
164 }
165
Orion Hodson4c71d002017-11-29 11:03:25 +0000166 // TODO: b/65872996 The intent is that polymorphic signature methods should
167 // be compiler intrinsics. At present, they are only interpreter intrinsics.
168 if (art_method->IsPolymorphicSignature()) {
169 return false;
Aart Bikf0010dd2017-11-21 16:31:53 -0800170 }
Orion Hodsonb1b52062017-11-27 11:51:42 +0000171
172 Intrinsics intrinsic = static_cast<Intrinsics>(art_method->GetIntrinsic());
173 if (CheckInvokeType(intrinsic, invoke) == false) {
174 *wrong_invoke_type = true;
175 return false;
176 }
177
178 invoke->SetIntrinsic(intrinsic,
179 NeedsEnvironmentOrCache(intrinsic),
180 GetSideEffects(intrinsic),
181 GetExceptions(intrinsic));
182 return true;
Aart Bikf0010dd2017-11-21 16:31:53 -0800183}
184
Aart Bik24773202018-04-26 10:28:51 -0700185bool IntrinsicsRecognizer::Run() {
186 bool didRecognize = false;
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100187 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko2c45bc92016-10-25 16:54:12 +0100188 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800189 for (HInstructionIterator inst_it(block->GetInstructions()); !inst_it.Done();
190 inst_it.Advance()) {
191 HInstruction* inst = inst_it.Current();
192 if (inst->IsInvoke()) {
Aart Bikf0010dd2017-11-21 16:31:53 -0800193 bool wrong_invoke_type = false;
Mingyao Yang6b1aebe2017-11-27 15:39:04 -0800194 if (Recognize(inst->AsInvoke(), /* art_method */ nullptr, &wrong_invoke_type)) {
Aart Bik24773202018-04-26 10:28:51 -0700195 didRecognize = true;
Aart Bikf0010dd2017-11-21 16:31:53 -0800196 MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized);
197 } else if (wrong_invoke_type) {
198 LOG(WARNING)
199 << "Found an intrinsic with unexpected invoke type: "
200 << inst->AsInvoke()->GetResolvedMethod()->PrettyMethod() << " "
201 << inst->DebugName();
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800202 }
203 }
204 }
205 }
Aart Bik24773202018-04-26 10:28:51 -0700206 return didRecognize;
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800207}
208
209std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic) {
210 switch (intrinsic) {
211 case Intrinsics::kNone:
David Brazdil109c89a2015-07-31 17:10:43 +0100212 os << "None";
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800213 break;
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100214#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800215 case Intrinsics::k ## Name: \
216 os << # Name; \
217 break;
218#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -0700219 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800220#undef STATIC_INTRINSICS_LIST
221#undef VIRTUAL_INTRINSICS_LIST
222#undef OPTIMIZING_INTRINSICS
223 }
224 return os;
225}
226
Vladimir Marko6fd16062018-06-26 11:02:04 +0100227static const char kIntegerCacheDescriptor[] = "Ljava/lang/Integer$IntegerCache;";
228static const char kIntegerDescriptor[] = "Ljava/lang/Integer;";
229static const char kIntegerArrayDescriptor[] = "[Ljava/lang/Integer;";
230static const char kLowFieldName[] = "low";
231static const char kHighFieldName[] = "high";
232static const char kValueFieldName[] = "value";
233
Vladimir Markoeebb8212018-06-05 14:57:24 +0100234static ObjPtr<mirror::ObjectArray<mirror::Object>> GetBootImageLiveObjects()
235 REQUIRES_SHARED(Locks::mutator_lock_) {
236 gc::Heap* heap = Runtime::Current()->GetHeap();
237 const std::vector<gc::space::ImageSpace*>& boot_image_spaces = heap->GetBootImageSpaces();
238 DCHECK(!boot_image_spaces.empty());
239 const ImageHeader& main_header = boot_image_spaces[0]->GetImageHeader();
240 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
241 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
242 main_header.GetImageRoot<kWithoutReadBarrier>(ImageHeader::kBootImageLiveObjects));
243 DCHECK(boot_image_live_objects != nullptr);
244 DCHECK(heap->ObjectIsInBootImageSpace(boot_image_live_objects));
245 return boot_image_live_objects;
246}
247
Vladimir Marko6fd16062018-06-26 11:02:04 +0100248static ObjPtr<mirror::Class> LookupInitializedClass(Thread* self,
249 ClassLinker* class_linker,
250 const char* descriptor)
251 REQUIRES_SHARED(Locks::mutator_lock_) {
252 ObjPtr<mirror::Class> klass =
253 class_linker->LookupClass(self, descriptor, /* class_loader */ nullptr);
254 DCHECK(klass != nullptr);
255 DCHECK(klass->IsInitialized());
256 return klass;
257}
258
259static ObjPtr<mirror::ObjectArray<mirror::Object>> GetIntegerCacheArray(
260 ObjPtr<mirror::Class> cache_class) REQUIRES_SHARED(Locks::mutator_lock_) {
261 ArtField* cache_field = cache_class->FindDeclaredStaticField("cache", kIntegerArrayDescriptor);
262 DCHECK(cache_field != nullptr);
263 return ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(cache_field->GetObject(cache_class));
264}
265
266static int32_t GetIntegerCacheField(ObjPtr<mirror::Class> cache_class, const char* field_name)
267 REQUIRES_SHARED(Locks::mutator_lock_) {
268 ArtField* field = cache_class->FindDeclaredStaticField(field_name, "I");
269 DCHECK(field != nullptr);
270 return field->GetInt(cache_class);
271}
272
Vladimir Markoeebb8212018-06-05 14:57:24 +0100273static bool CheckIntegerCache(Thread* self,
274 ClassLinker* class_linker,
275 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects,
276 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_cache)
277 REQUIRES_SHARED(Locks::mutator_lock_) {
278 DCHECK(boot_image_cache != nullptr);
279
280 // Since we have a cache in the boot image, both java.lang.Integer and
281 // java.lang.Integer$IntegerCache must be initialized in the boot image.
Vladimir Marko6fd16062018-06-26 11:02:04 +0100282 ObjPtr<mirror::Class> cache_class =
283 LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100284 ObjPtr<mirror::Class> integer_class =
Vladimir Marko6fd16062018-06-26 11:02:04 +0100285 LookupInitializedClass(self, class_linker, kIntegerDescriptor);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100286
287 // Check that the current cache is the same as the `boot_image_cache`.
Vladimir Marko6fd16062018-06-26 11:02:04 +0100288 ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = GetIntegerCacheArray(cache_class);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100289 if (current_cache != boot_image_cache) {
290 return false; // Messed up IntegerCache.cache.
291 }
292
293 // Check that the range matches the boot image cache length.
Vladimir Marko6fd16062018-06-26 11:02:04 +0100294 int32_t low = GetIntegerCacheField(cache_class, kLowFieldName);
295 int32_t high = GetIntegerCacheField(cache_class, kHighFieldName);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100296 if (boot_image_cache->GetLength() != high - low + 1) {
297 return false; // Messed up IntegerCache.low or IntegerCache.high.
298 }
299
300 // Check that the elements match the boot image intrinsic objects and check their values as well.
Vladimir Marko6fd16062018-06-26 11:02:04 +0100301 ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I");
Vladimir Markoeebb8212018-06-05 14:57:24 +0100302 DCHECK(value_field != nullptr);
303 for (int32_t i = 0, len = boot_image_cache->GetLength(); i != len; ++i) {
304 ObjPtr<mirror::Object> boot_image_object =
305 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, i);
306 DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boot_image_object));
307 // No need for read barrier for comparison with a boot image object.
308 ObjPtr<mirror::Object> current_object =
309 boot_image_cache->GetWithoutChecks<kVerifyNone, kWithoutReadBarrier>(i);
310 if (boot_image_object != current_object) {
311 return false; // Messed up IntegerCache.cache[i]
312 }
313 if (value_field->GetInt(boot_image_object) != low + i) {
314 return false; // Messed up IntegerCache.cache[i].value.
315 }
316 }
317
318 return true;
319}
320
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000321void IntrinsicVisitor::ComputeIntegerValueOfLocations(HInvoke* invoke,
322 CodeGenerator* codegen,
323 Location return_location,
324 Location first_argument_location) {
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000325 // The intrinsic will call if it needs to allocate a j.l.Integer.
Vladimir Markoeebb8212018-06-05 14:57:24 +0100326 LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly;
Vladimir Marko6fd16062018-06-26 11:02:04 +0100327 const CompilerOptions& compiler_options = codegen->GetCompilerOptions();
328 if (compiler_options.IsBootImage()) {
329 // Piggyback on the method load kind to determine whether we can use PC-relative addressing.
330 // This should cover both the testing config (non-PIC boot image) and codegens that reject
331 // PC-relative load kinds and fall back to the runtime call.
332 if (!invoke->AsInvokeStaticOrDirect()->HasPcRelativeMethodLoadKind()) {
333 return;
334 }
335 if (!compiler_options.IsImageClass(kIntegerCacheDescriptor) ||
336 !compiler_options.IsImageClass(kIntegerDescriptor)) {
337 return;
338 }
339 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
340 Thread* self = Thread::Current();
341 ScopedObjectAccess soa(self);
342 ObjPtr<mirror::Class> cache_class = class_linker->LookupClass(
343 self, kIntegerCacheDescriptor, /* class_loader */ nullptr);
344 DCHECK(cache_class != nullptr);
345 if (UNLIKELY(!cache_class->IsInitialized())) {
346 LOG(WARNING) << "Image class " << cache_class->PrettyDescriptor() << " is uninitialized.";
347 return;
348 }
349 ObjPtr<mirror::Class> integer_class =
350 class_linker->LookupClass(self, kIntegerDescriptor, /* class_loader */ nullptr);
351 DCHECK(integer_class != nullptr);
352 if (UNLIKELY(!integer_class->IsInitialized())) {
353 LOG(WARNING) << "Image class " << integer_class->PrettyDescriptor() << " is uninitialized.";
354 return;
355 }
356 int32_t low = GetIntegerCacheField(cache_class, kLowFieldName);
357 int32_t high = GetIntegerCacheField(cache_class, kHighFieldName);
358 if (kIsDebugBuild) {
359 ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = GetIntegerCacheArray(cache_class);
360 CHECK(current_cache != nullptr);
361 CHECK_EQ(current_cache->GetLength(), high - low + 1);
362 ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I");
363 CHECK(value_field != nullptr);
364 for (int32_t i = 0, len = current_cache->GetLength(); i != len; ++i) {
365 ObjPtr<mirror::Object> current_object = current_cache->GetWithoutChecks(i);
366 CHECK(current_object != nullptr);
367 CHECK_EQ(value_field->GetInt(current_object), low + i);
368 }
369 }
370 if (invoke->InputAt(0)->IsIntConstant()) {
371 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
372 if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) <
373 static_cast<uint32_t>(high - low + 1)) {
374 // No call, we shall use direct pointer to the Integer object.
375 call_kind = LocationSummary::kNoCall;
376 }
377 }
378 } else {
379 Runtime* runtime = Runtime::Current();
380 if (runtime->GetHeap()->GetBootImageSpaces().empty()) {
381 return; // Running without boot image, cannot use required boot image objects.
382 }
Vladimir Markoeebb8212018-06-05 14:57:24 +0100383 Thread* self = Thread::Current();
384 ScopedObjectAccess soa(self);
385 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects();
386 ObjPtr<mirror::ObjectArray<mirror::Object>> cache =
387 IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects);
388 if (cache == nullptr) {
389 return; // No cache in the boot image.
390 }
391 if (runtime->UseJitCompilation()) {
392 if (!CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache)) {
393 return; // The cache was somehow messed up, probably by using reflection.
394 }
395 } else {
396 DCHECK(runtime->IsAotCompiler());
397 DCHECK(CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache));
398 if (invoke->InputAt(0)->IsIntConstant()) {
399 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
400 // Retrieve the `value` from the lowest cached Integer.
401 ObjPtr<mirror::Object> low_integer =
402 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u);
403 ObjPtr<mirror::Class> integer_class =
404 low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>();
Vladimir Marko6fd16062018-06-26 11:02:04 +0100405 ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I");
Vladimir Markoeebb8212018-06-05 14:57:24 +0100406 DCHECK(value_field != nullptr);
407 int32_t low = value_field->GetInt(low_integer);
408 if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) <
409 static_cast<uint32_t>(cache->GetLength())) {
410 // No call, we shall use direct pointer to the Integer object. Note that we cannot
411 // do this for JIT as the "low" can change through reflection before emitting the code.
412 call_kind = LocationSummary::kNoCall;
413 }
414 }
415 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000416 }
Vladimir Markoeebb8212018-06-05 14:57:24 +0100417
418 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
419 LocationSummary* locations = new (allocator) LocationSummary(invoke, call_kind, kIntrinsified);
420 if (call_kind == LocationSummary::kCallOnMainOnly) {
421 locations->SetInAt(0, Location::RegisterOrConstant(invoke->InputAt(0)));
422 locations->AddTemp(first_argument_location);
423 locations->SetOut(return_location);
424 } else {
425 locations->SetInAt(0, Location::ConstantLocation(invoke->InputAt(0)->AsConstant()));
426 locations->SetOut(Location::RequiresRegister());
427 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000428}
429
Vladimir Marko6fd16062018-06-26 11:02:04 +0100430static int32_t GetIntegerCacheLowFromIntegerCache(Thread* self, ClassLinker* class_linker)
Vladimir Markoeebb8212018-06-05 14:57:24 +0100431 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko6fd16062018-06-26 11:02:04 +0100432 ObjPtr<mirror::Class> cache_class =
433 LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor);
434 return GetIntegerCacheField(cache_class, kLowFieldName);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100435}
436
437static uint32_t CalculateBootImageOffset(ObjPtr<mirror::Object> object)
438 REQUIRES_SHARED(Locks::mutator_lock_) {
439 gc::Heap* heap = Runtime::Current()->GetHeap();
440 DCHECK(heap->ObjectIsInBootImageSpace(object));
441 return reinterpret_cast<const uint8_t*>(object.Ptr()) - heap->GetBootImageSpaces()[0]->Begin();
442}
443
444inline IntrinsicVisitor::IntegerValueOfInfo::IntegerValueOfInfo()
Vladimir Marko6fd16062018-06-26 11:02:04 +0100445 : value_offset(0),
Vladimir Markoeebb8212018-06-05 14:57:24 +0100446 low(0),
447 length(0u),
Vladimir Marko6fd16062018-06-26 11:02:04 +0100448 integer_boot_image_offset(kInvalidReference),
449 value_boot_image_reference(kInvalidReference) {}
Vladimir Markoeebb8212018-06-05 14:57:24 +0100450
Vladimir Marko6fd16062018-06-26 11:02:04 +0100451IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo(
452 HInvoke* invoke, const CompilerOptions& compiler_options) {
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000453 // Note that we could cache all of the data looked up here. but there's no good
454 // location for it. We don't want to add it to WellKnownClasses, to avoid creating global
455 // jni values. Adding it as state to the compiler singleton seems like wrong
456 // separation of concerns.
457 // The need for this data should be pretty rare though.
458
Vladimir Markoeebb8212018-06-05 14:57:24 +0100459 // Note that at this point we can no longer abort the code generation. Therefore,
460 // we need to provide data that shall not lead to a crash even if the fields were
461 // modified through reflection since ComputeIntegerValueOfLocations() when JITting.
462
463 Runtime* runtime = Runtime::Current();
Vladimir Marko6fd16062018-06-26 11:02:04 +0100464 ClassLinker* class_linker = runtime->GetClassLinker();
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000465 Thread* self = Thread::Current();
466 ScopedObjectAccess soa(self);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100467
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000468 IntegerValueOfInfo info;
Vladimir Marko6fd16062018-06-26 11:02:04 +0100469 if (compiler_options.IsBootImage()) {
470 ObjPtr<mirror::Class> integer_class =
471 LookupInitializedClass(self, class_linker, kIntegerDescriptor);
472 ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I");
473 DCHECK(value_field != nullptr);
474 info.value_offset = value_field->GetOffset().Uint32Value();
475 ObjPtr<mirror::Class> cache_class =
476 LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor);
477 info.low = GetIntegerCacheField(cache_class, kLowFieldName);
478 int32_t high = GetIntegerCacheField(cache_class, kHighFieldName);
479 info.length = dchecked_integral_cast<uint32_t>(high - info.low + 1);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100480
Vladimir Marko6fd16062018-06-26 11:02:04 +0100481 info.integer_boot_image_offset = IntegerValueOfInfo::kInvalidReference;
482 if (invoke->InputAt(0)->IsIntConstant()) {
483 int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue();
484 uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low);
485 if (index < static_cast<uint32_t>(info.length)) {
486 info.value_boot_image_reference = IntrinsicObjects::EncodePatch(
487 IntrinsicObjects::PatchType::kIntegerValueOfObject, index);
488 } else {
489 // Not in the cache.
490 info.value_boot_image_reference = IntegerValueOfInfo::kInvalidReference;
491 }
Vladimir Markoeebb8212018-06-05 14:57:24 +0100492 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +0100493 info.array_data_boot_image_reference =
494 IntrinsicObjects::EncodePatch(IntrinsicObjects::PatchType::kIntegerValueOfArray);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100495 }
496 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +0100497 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects();
498 ObjPtr<mirror::Object> low_integer =
499 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u);
500 ObjPtr<mirror::Class> integer_class = low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>();
501 ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I");
502 DCHECK(value_field != nullptr);
503 info.value_offset = value_field->GetOffset().Uint32Value();
504 if (runtime->UseJitCompilation()) {
505 // Use the current `IntegerCache.low` for JIT to avoid truly surprising behavior if the
506 // code messes up the `value` field in the lowest cached Integer using reflection.
507 info.low = GetIntegerCacheLowFromIntegerCache(self, class_linker);
508 } else {
509 // For app AOT, the `low_integer->value` should be the same as `IntegerCache.low`.
510 info.low = value_field->GetInt(low_integer);
511 DCHECK_EQ(info.low, GetIntegerCacheLowFromIntegerCache(self, class_linker));
512 }
513 // Do not look at `IntegerCache.high`, use the immutable length of the cache array instead.
514 info.length = dchecked_integral_cast<uint32_t>(
515 IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects)->GetLength());
516
517 info.integer_boot_image_offset = CalculateBootImageOffset(integer_class);
518 if (invoke->InputAt(0)->IsIntConstant()) {
519 int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue();
520 uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low);
521 if (index < static_cast<uint32_t>(info.length)) {
522 ObjPtr<mirror::Object> integer =
523 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, index);
524 info.value_boot_image_reference = CalculateBootImageOffset(integer);
525 } else {
526 // Not in the cache.
527 info.value_boot_image_reference = IntegerValueOfInfo::kInvalidReference;
528 }
529 } else {
530 info.array_data_boot_image_reference =
531 CalculateBootImageOffset(boot_image_live_objects) +
532 IntrinsicObjects::GetIntegerValueOfArrayDataOffset(boot_image_live_objects).Uint32Value();
533 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000534 }
535
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000536 return info;
537}
538
Roland Levillain1d775d22018-09-07 13:56:57 +0100539void IntrinsicVisitor::AssertNonMovableStringClass() {
540 if (kIsDebugBuild) {
541 Thread* const self = Thread::Current();
542 ReaderMutexLock mu(self, *Locks::mutator_lock_);
543 ObjPtr<mirror::Class> string_class = GetClassRoot<art::mirror::String>();
544 CHECK(!art::Runtime::Current()->GetHeap()->IsMovableObject(string_class));
545 }
546}
547
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800548} // namespace art