blob: 21efe11f31091d9263d4eb5b6ab72af3a16808bc [file] [log] [blame]
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics.h"
18
Andreas Gampea1d2f952017-04-20 22:53:58 -070019#include "art_field-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080020#include "art_method-inl.h"
David Sehrc431b9d2018-03-02 12:01:51 -080021#include "base/utils.h"
Andreas Gampebfb5ba92015-09-01 15:45:02 +000022#include "class_linker.h"
David Sehr8c0961f2018-01-23 16:11:38 -080023#include "dex/invoke_type.h"
Nicolas Geoffray331605a2017-03-01 11:01:41 +000024#include "driver/compiler_options.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010025#include "gc/space/image_space.h"
26#include "image-inl.h"
27#include "intrinsic_objects.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080028#include "nodes.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010029#include "obj_ptr-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070030#include "scoped_thread_state_change-inl.h"
Andreas Gampeb486a982017-06-01 13:45:54 -070031#include "thread-current-inl.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080032
33namespace art {
34
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +010035// Check that intrinsic enum values fit within space set aside in ArtMethod modifier flags.
36#define CHECK_INTRINSICS_ENUM_VALUES(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
37 static_assert( \
38 static_cast<uint32_t>(Intrinsics::k ## Name) <= (kAccIntrinsicBits >> CTZ(kAccIntrinsicBits)), \
Orion Hodson4a4610a2017-09-28 16:57:55 +010039 "Instrinsics enumeration space overflow.");
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +010040#include "intrinsics_list.h"
41 INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)
42#undef INTRINSICS_LIST
43#undef CHECK_INTRINSICS_ENUM_VALUES
44
Andreas Gampe71fb52f2014-12-29 17:43:08 -080045// Function that returns whether an intrinsic is static/direct or virtual.
46static inline InvokeType GetIntrinsicInvokeType(Intrinsics i) {
47 switch (i) {
48 case Intrinsics::kNone:
49 return kInterface; // Non-sensical for intrinsic.
Nicolas Geoffray762869d2016-07-15 15:28:35 +010050#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080051 case Intrinsics::k ## Name: \
Andreas Gampe71fb52f2014-12-29 17:43:08 -080052 return IsStatic;
53#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070054 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Andreas Gampe71fb52f2014-12-29 17:43:08 -080055#undef INTRINSICS_LIST
56#undef OPTIMIZING_INTRINSICS
57 }
58 return kInterface;
59}
60
agicsaki57b81ec2015-08-11 17:39:37 -070061// Function that returns whether an intrinsic needs an environment or not.
Agi Csaki05f20562015-08-19 14:58:14 -070062static inline IntrinsicNeedsEnvironmentOrCache NeedsEnvironmentOrCache(Intrinsics i) {
agicsaki57b81ec2015-08-11 17:39:37 -070063 switch (i) {
64 case Intrinsics::kNone:
Agi Csaki05f20562015-08-19 14:58:14 -070065 return kNeedsEnvironmentOrCache; // Non-sensical for intrinsic.
Nicolas Geoffray762869d2016-07-15 15:28:35 +010066#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080067 case Intrinsics::k ## Name: \
Agi Csaki05f20562015-08-19 14:58:14 -070068 return NeedsEnvironmentOrCache;
agicsaki57b81ec2015-08-11 17:39:37 -070069#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070070 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
agicsaki57b81ec2015-08-11 17:39:37 -070071#undef INTRINSICS_LIST
72#undef OPTIMIZING_INTRINSICS
73 }
Agi Csaki05f20562015-08-19 14:58:14 -070074 return kNeedsEnvironmentOrCache;
agicsaki57b81ec2015-08-11 17:39:37 -070075}
Andreas Gampe71fb52f2014-12-29 17:43:08 -080076
Aart Bik5d75afe2015-12-14 11:57:01 -080077// Function that returns whether an intrinsic has side effects.
78static inline IntrinsicSideEffects GetSideEffects(Intrinsics i) {
79 switch (i) {
80 case Intrinsics::kNone:
81 return kAllSideEffects;
Nicolas Geoffray762869d2016-07-15 15:28:35 +010082#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080083 case Intrinsics::k ## Name: \
84 return SideEffects;
85#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070086 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Aart Bik5d75afe2015-12-14 11:57:01 -080087#undef INTRINSICS_LIST
88#undef OPTIMIZING_INTRINSICS
89 }
90 return kAllSideEffects;
91}
92
93// Function that returns whether an intrinsic can throw exceptions.
94static inline IntrinsicExceptions GetExceptions(Intrinsics i) {
95 switch (i) {
96 case Intrinsics::kNone:
97 return kCanThrow;
Nicolas Geoffray762869d2016-07-15 15:28:35 +010098#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080099 case Intrinsics::k ## Name: \
100 return Exceptions;
101#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -0700102 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Aart Bik5d75afe2015-12-14 11:57:01 -0800103#undef INTRINSICS_LIST
104#undef OPTIMIZING_INTRINSICS
105 }
106 return kCanThrow;
107}
108
Orion Hodson4c71d002017-11-29 11:03:25 +0000109static bool CheckInvokeType(Intrinsics intrinsic, HInvoke* invoke)
110 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000111 // Whenever the intrinsic is marked as static, report an error if we find an InvokeVirtual.
112 //
113 // Whenever the intrinsic is marked as direct and we find an InvokeVirtual, a devirtualization
114 // failure occured. We might be in a situation where we have inlined a method that calls an
115 // intrinsic, but that method is in a different dex file on which we do not have a
116 // verified_method that would have helped the compiler driver sharpen the call. In that case,
117 // make sure that the intrinsic is actually for some final method (or in a final class), as
118 // otherwise the intrinsics setup is broken.
119 //
120 // For the last direction, we have intrinsics for virtual functions that will perform a check
121 // inline. If the precise type is known, however, the instruction will be sharpened to an
122 // InvokeStaticOrDirect.
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800123 InvokeType intrinsic_type = GetIntrinsicInvokeType(intrinsic);
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100124 InvokeType invoke_type = invoke->GetInvokeType();
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100125
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800126 switch (intrinsic_type) {
127 case kStatic:
128 return (invoke_type == kStatic);
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000129
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800130 case kDirect:
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000131 if (invoke_type == kDirect) {
132 return true;
133 }
134 if (invoke_type == kVirtual) {
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100135 ArtMethod* art_method = invoke->GetResolvedMethod();
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100136 return (art_method->IsFinal() || art_method->GetDeclaringClass()->IsFinal());
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000137 }
138 return false;
139
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800140 case kVirtual:
141 // Call might be devirtualized.
Mingyao Yang6b1aebe2017-11-27 15:39:04 -0800142 return (invoke_type == kVirtual || invoke_type == kDirect || invoke_type == kInterface);
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800143
Orion Hodsonb1b52062017-11-27 11:51:42 +0000144 case kSuper:
145 case kInterface:
146 case kPolymorphic:
Orion Hodson4c8e12e2018-05-18 08:33:20 +0100147 case kCustom:
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800148 return false;
149 }
Orion Hodsonb1b52062017-11-27 11:51:42 +0000150 LOG(FATAL) << "Unknown intrinsic invoke type: " << intrinsic_type;
151 UNREACHABLE();
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800152}
153
Mingyao Yang6b1aebe2017-11-27 15:39:04 -0800154bool IntrinsicsRecognizer::Recognize(HInvoke* invoke,
155 ArtMethod* art_method,
156 /*out*/ bool* wrong_invoke_type) {
157 if (art_method == nullptr) {
158 art_method = invoke->GetResolvedMethod();
159 }
Orion Hodsonb1b52062017-11-27 11:51:42 +0000160 *wrong_invoke_type = false;
161 if (art_method == nullptr || !art_method->IsIntrinsic()) {
162 return false;
163 }
164
Orion Hodson4c71d002017-11-29 11:03:25 +0000165 // TODO: b/65872996 The intent is that polymorphic signature methods should
166 // be compiler intrinsics. At present, they are only interpreter intrinsics.
167 if (art_method->IsPolymorphicSignature()) {
168 return false;
Aart Bikf0010dd2017-11-21 16:31:53 -0800169 }
Orion Hodsonb1b52062017-11-27 11:51:42 +0000170
171 Intrinsics intrinsic = static_cast<Intrinsics>(art_method->GetIntrinsic());
172 if (CheckInvokeType(intrinsic, invoke) == false) {
173 *wrong_invoke_type = true;
174 return false;
175 }
176
177 invoke->SetIntrinsic(intrinsic,
178 NeedsEnvironmentOrCache(intrinsic),
179 GetSideEffects(intrinsic),
180 GetExceptions(intrinsic));
181 return true;
Aart Bikf0010dd2017-11-21 16:31:53 -0800182}
183
Aart Bik24773202018-04-26 10:28:51 -0700184bool IntrinsicsRecognizer::Run() {
185 bool didRecognize = false;
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100186 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko2c45bc92016-10-25 16:54:12 +0100187 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800188 for (HInstructionIterator inst_it(block->GetInstructions()); !inst_it.Done();
189 inst_it.Advance()) {
190 HInstruction* inst = inst_it.Current();
191 if (inst->IsInvoke()) {
Aart Bikf0010dd2017-11-21 16:31:53 -0800192 bool wrong_invoke_type = false;
Mingyao Yang6b1aebe2017-11-27 15:39:04 -0800193 if (Recognize(inst->AsInvoke(), /* art_method */ nullptr, &wrong_invoke_type)) {
Aart Bik24773202018-04-26 10:28:51 -0700194 didRecognize = true;
Aart Bikf0010dd2017-11-21 16:31:53 -0800195 MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized);
196 } else if (wrong_invoke_type) {
197 LOG(WARNING)
198 << "Found an intrinsic with unexpected invoke type: "
199 << inst->AsInvoke()->GetResolvedMethod()->PrettyMethod() << " "
200 << inst->DebugName();
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800201 }
202 }
203 }
204 }
Aart Bik24773202018-04-26 10:28:51 -0700205 return didRecognize;
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800206}
207
208std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic) {
209 switch (intrinsic) {
210 case Intrinsics::kNone:
David Brazdil109c89a2015-07-31 17:10:43 +0100211 os << "None";
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800212 break;
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100213#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800214 case Intrinsics::k ## Name: \
215 os << # Name; \
216 break;
217#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -0700218 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800219#undef STATIC_INTRINSICS_LIST
220#undef VIRTUAL_INTRINSICS_LIST
221#undef OPTIMIZING_INTRINSICS
222 }
223 return os;
224}
225
Vladimir Marko6fd16062018-06-26 11:02:04 +0100226static const char kIntegerCacheDescriptor[] = "Ljava/lang/Integer$IntegerCache;";
227static const char kIntegerDescriptor[] = "Ljava/lang/Integer;";
228static const char kIntegerArrayDescriptor[] = "[Ljava/lang/Integer;";
229static const char kLowFieldName[] = "low";
230static const char kHighFieldName[] = "high";
231static const char kValueFieldName[] = "value";
232
Vladimir Markoeebb8212018-06-05 14:57:24 +0100233static ObjPtr<mirror::ObjectArray<mirror::Object>> GetBootImageLiveObjects()
234 REQUIRES_SHARED(Locks::mutator_lock_) {
235 gc::Heap* heap = Runtime::Current()->GetHeap();
236 const std::vector<gc::space::ImageSpace*>& boot_image_spaces = heap->GetBootImageSpaces();
237 DCHECK(!boot_image_spaces.empty());
238 const ImageHeader& main_header = boot_image_spaces[0]->GetImageHeader();
239 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
240 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
241 main_header.GetImageRoot<kWithoutReadBarrier>(ImageHeader::kBootImageLiveObjects));
242 DCHECK(boot_image_live_objects != nullptr);
243 DCHECK(heap->ObjectIsInBootImageSpace(boot_image_live_objects));
244 return boot_image_live_objects;
245}
246
Vladimir Marko6fd16062018-06-26 11:02:04 +0100247static ObjPtr<mirror::Class> LookupInitializedClass(Thread* self,
248 ClassLinker* class_linker,
249 const char* descriptor)
250 REQUIRES_SHARED(Locks::mutator_lock_) {
251 ObjPtr<mirror::Class> klass =
252 class_linker->LookupClass(self, descriptor, /* class_loader */ nullptr);
253 DCHECK(klass != nullptr);
254 DCHECK(klass->IsInitialized());
255 return klass;
256}
257
258static ObjPtr<mirror::ObjectArray<mirror::Object>> GetIntegerCacheArray(
259 ObjPtr<mirror::Class> cache_class) REQUIRES_SHARED(Locks::mutator_lock_) {
260 ArtField* cache_field = cache_class->FindDeclaredStaticField("cache", kIntegerArrayDescriptor);
261 DCHECK(cache_field != nullptr);
262 return ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(cache_field->GetObject(cache_class));
263}
264
265static int32_t GetIntegerCacheField(ObjPtr<mirror::Class> cache_class, const char* field_name)
266 REQUIRES_SHARED(Locks::mutator_lock_) {
267 ArtField* field = cache_class->FindDeclaredStaticField(field_name, "I");
268 DCHECK(field != nullptr);
269 return field->GetInt(cache_class);
270}
271
Vladimir Markoeebb8212018-06-05 14:57:24 +0100272static bool CheckIntegerCache(Thread* self,
273 ClassLinker* class_linker,
274 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects,
275 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_cache)
276 REQUIRES_SHARED(Locks::mutator_lock_) {
277 DCHECK(boot_image_cache != nullptr);
278
279 // Since we have a cache in the boot image, both java.lang.Integer and
280 // java.lang.Integer$IntegerCache must be initialized in the boot image.
Vladimir Marko6fd16062018-06-26 11:02:04 +0100281 ObjPtr<mirror::Class> cache_class =
282 LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100283 ObjPtr<mirror::Class> integer_class =
Vladimir Marko6fd16062018-06-26 11:02:04 +0100284 LookupInitializedClass(self, class_linker, kIntegerDescriptor);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100285
286 // Check that the current cache is the same as the `boot_image_cache`.
Vladimir Marko6fd16062018-06-26 11:02:04 +0100287 ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = GetIntegerCacheArray(cache_class);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100288 if (current_cache != boot_image_cache) {
289 return false; // Messed up IntegerCache.cache.
290 }
291
292 // Check that the range matches the boot image cache length.
Vladimir Marko6fd16062018-06-26 11:02:04 +0100293 int32_t low = GetIntegerCacheField(cache_class, kLowFieldName);
294 int32_t high = GetIntegerCacheField(cache_class, kHighFieldName);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100295 if (boot_image_cache->GetLength() != high - low + 1) {
296 return false; // Messed up IntegerCache.low or IntegerCache.high.
297 }
298
299 // Check that the elements match the boot image intrinsic objects and check their values as well.
Vladimir Marko6fd16062018-06-26 11:02:04 +0100300 ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I");
Vladimir Markoeebb8212018-06-05 14:57:24 +0100301 DCHECK(value_field != nullptr);
302 for (int32_t i = 0, len = boot_image_cache->GetLength(); i != len; ++i) {
303 ObjPtr<mirror::Object> boot_image_object =
304 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, i);
305 DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boot_image_object));
306 // No need for read barrier for comparison with a boot image object.
307 ObjPtr<mirror::Object> current_object =
308 boot_image_cache->GetWithoutChecks<kVerifyNone, kWithoutReadBarrier>(i);
309 if (boot_image_object != current_object) {
310 return false; // Messed up IntegerCache.cache[i]
311 }
312 if (value_field->GetInt(boot_image_object) != low + i) {
313 return false; // Messed up IntegerCache.cache[i].value.
314 }
315 }
316
317 return true;
318}
319
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000320void IntrinsicVisitor::ComputeIntegerValueOfLocations(HInvoke* invoke,
321 CodeGenerator* codegen,
322 Location return_location,
323 Location first_argument_location) {
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000324 // The intrinsic will call if it needs to allocate a j.l.Integer.
Vladimir Markoeebb8212018-06-05 14:57:24 +0100325 LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly;
Vladimir Marko6fd16062018-06-26 11:02:04 +0100326 const CompilerOptions& compiler_options = codegen->GetCompilerOptions();
327 if (compiler_options.IsBootImage()) {
328 // Piggyback on the method load kind to determine whether we can use PC-relative addressing.
329 // This should cover both the testing config (non-PIC boot image) and codegens that reject
330 // PC-relative load kinds and fall back to the runtime call.
331 if (!invoke->AsInvokeStaticOrDirect()->HasPcRelativeMethodLoadKind()) {
332 return;
333 }
334 if (!compiler_options.IsImageClass(kIntegerCacheDescriptor) ||
335 !compiler_options.IsImageClass(kIntegerDescriptor)) {
336 return;
337 }
338 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
339 Thread* self = Thread::Current();
340 ScopedObjectAccess soa(self);
341 ObjPtr<mirror::Class> cache_class = class_linker->LookupClass(
342 self, kIntegerCacheDescriptor, /* class_loader */ nullptr);
343 DCHECK(cache_class != nullptr);
344 if (UNLIKELY(!cache_class->IsInitialized())) {
345 LOG(WARNING) << "Image class " << cache_class->PrettyDescriptor() << " is uninitialized.";
346 return;
347 }
348 ObjPtr<mirror::Class> integer_class =
349 class_linker->LookupClass(self, kIntegerDescriptor, /* class_loader */ nullptr);
350 DCHECK(integer_class != nullptr);
351 if (UNLIKELY(!integer_class->IsInitialized())) {
352 LOG(WARNING) << "Image class " << integer_class->PrettyDescriptor() << " is uninitialized.";
353 return;
354 }
355 int32_t low = GetIntegerCacheField(cache_class, kLowFieldName);
356 int32_t high = GetIntegerCacheField(cache_class, kHighFieldName);
357 if (kIsDebugBuild) {
358 ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = GetIntegerCacheArray(cache_class);
359 CHECK(current_cache != nullptr);
360 CHECK_EQ(current_cache->GetLength(), high - low + 1);
361 ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I");
362 CHECK(value_field != nullptr);
363 for (int32_t i = 0, len = current_cache->GetLength(); i != len; ++i) {
364 ObjPtr<mirror::Object> current_object = current_cache->GetWithoutChecks(i);
365 CHECK(current_object != nullptr);
366 CHECK_EQ(value_field->GetInt(current_object), low + i);
367 }
368 }
369 if (invoke->InputAt(0)->IsIntConstant()) {
370 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
371 if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) <
372 static_cast<uint32_t>(high - low + 1)) {
373 // No call, we shall use direct pointer to the Integer object.
374 call_kind = LocationSummary::kNoCall;
375 }
376 }
377 } else {
378 Runtime* runtime = Runtime::Current();
379 if (runtime->GetHeap()->GetBootImageSpaces().empty()) {
380 return; // Running without boot image, cannot use required boot image objects.
381 }
Vladimir Markoeebb8212018-06-05 14:57:24 +0100382 Thread* self = Thread::Current();
383 ScopedObjectAccess soa(self);
384 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects();
385 ObjPtr<mirror::ObjectArray<mirror::Object>> cache =
386 IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects);
387 if (cache == nullptr) {
388 return; // No cache in the boot image.
389 }
390 if (runtime->UseJitCompilation()) {
391 if (!CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache)) {
392 return; // The cache was somehow messed up, probably by using reflection.
393 }
394 } else {
395 DCHECK(runtime->IsAotCompiler());
396 DCHECK(CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache));
397 if (invoke->InputAt(0)->IsIntConstant()) {
398 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
399 // Retrieve the `value` from the lowest cached Integer.
400 ObjPtr<mirror::Object> low_integer =
401 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u);
402 ObjPtr<mirror::Class> integer_class =
403 low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>();
Vladimir Marko6fd16062018-06-26 11:02:04 +0100404 ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I");
Vladimir Markoeebb8212018-06-05 14:57:24 +0100405 DCHECK(value_field != nullptr);
406 int32_t low = value_field->GetInt(low_integer);
407 if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) <
408 static_cast<uint32_t>(cache->GetLength())) {
409 // No call, we shall use direct pointer to the Integer object. Note that we cannot
410 // do this for JIT as the "low" can change through reflection before emitting the code.
411 call_kind = LocationSummary::kNoCall;
412 }
413 }
414 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000415 }
Vladimir Markoeebb8212018-06-05 14:57:24 +0100416
417 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
418 LocationSummary* locations = new (allocator) LocationSummary(invoke, call_kind, kIntrinsified);
419 if (call_kind == LocationSummary::kCallOnMainOnly) {
420 locations->SetInAt(0, Location::RegisterOrConstant(invoke->InputAt(0)));
421 locations->AddTemp(first_argument_location);
422 locations->SetOut(return_location);
423 } else {
424 locations->SetInAt(0, Location::ConstantLocation(invoke->InputAt(0)->AsConstant()));
425 locations->SetOut(Location::RequiresRegister());
426 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000427}
428
Vladimir Marko6fd16062018-06-26 11:02:04 +0100429static int32_t GetIntegerCacheLowFromIntegerCache(Thread* self, ClassLinker* class_linker)
Vladimir Markoeebb8212018-06-05 14:57:24 +0100430 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko6fd16062018-06-26 11:02:04 +0100431 ObjPtr<mirror::Class> cache_class =
432 LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor);
433 return GetIntegerCacheField(cache_class, kLowFieldName);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100434}
435
436static uint32_t CalculateBootImageOffset(ObjPtr<mirror::Object> object)
437 REQUIRES_SHARED(Locks::mutator_lock_) {
438 gc::Heap* heap = Runtime::Current()->GetHeap();
439 DCHECK(heap->ObjectIsInBootImageSpace(object));
440 return reinterpret_cast<const uint8_t*>(object.Ptr()) - heap->GetBootImageSpaces()[0]->Begin();
441}
442
443inline IntrinsicVisitor::IntegerValueOfInfo::IntegerValueOfInfo()
Vladimir Marko6fd16062018-06-26 11:02:04 +0100444 : value_offset(0),
Vladimir Markoeebb8212018-06-05 14:57:24 +0100445 low(0),
446 length(0u),
Vladimir Marko6fd16062018-06-26 11:02:04 +0100447 integer_boot_image_offset(kInvalidReference),
448 value_boot_image_reference(kInvalidReference) {}
Vladimir Markoeebb8212018-06-05 14:57:24 +0100449
Vladimir Marko6fd16062018-06-26 11:02:04 +0100450IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo(
451 HInvoke* invoke, const CompilerOptions& compiler_options) {
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000452 // Note that we could cache all of the data looked up here. but there's no good
453 // location for it. We don't want to add it to WellKnownClasses, to avoid creating global
454 // jni values. Adding it as state to the compiler singleton seems like wrong
455 // separation of concerns.
456 // The need for this data should be pretty rare though.
457
Vladimir Markoeebb8212018-06-05 14:57:24 +0100458 // Note that at this point we can no longer abort the code generation. Therefore,
459 // we need to provide data that shall not lead to a crash even if the fields were
460 // modified through reflection since ComputeIntegerValueOfLocations() when JITting.
461
462 Runtime* runtime = Runtime::Current();
Vladimir Marko6fd16062018-06-26 11:02:04 +0100463 ClassLinker* class_linker = runtime->GetClassLinker();
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000464 Thread* self = Thread::Current();
465 ScopedObjectAccess soa(self);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100466
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000467 IntegerValueOfInfo info;
Vladimir Marko6fd16062018-06-26 11:02:04 +0100468 if (compiler_options.IsBootImage()) {
469 ObjPtr<mirror::Class> integer_class =
470 LookupInitializedClass(self, class_linker, kIntegerDescriptor);
471 ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I");
472 DCHECK(value_field != nullptr);
473 info.value_offset = value_field->GetOffset().Uint32Value();
474 ObjPtr<mirror::Class> cache_class =
475 LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor);
476 info.low = GetIntegerCacheField(cache_class, kLowFieldName);
477 int32_t high = GetIntegerCacheField(cache_class, kHighFieldName);
478 info.length = dchecked_integral_cast<uint32_t>(high - info.low + 1);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100479
Vladimir Marko6fd16062018-06-26 11:02:04 +0100480 info.integer_boot_image_offset = IntegerValueOfInfo::kInvalidReference;
481 if (invoke->InputAt(0)->IsIntConstant()) {
482 int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue();
483 uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low);
484 if (index < static_cast<uint32_t>(info.length)) {
485 info.value_boot_image_reference = IntrinsicObjects::EncodePatch(
486 IntrinsicObjects::PatchType::kIntegerValueOfObject, index);
487 } else {
488 // Not in the cache.
489 info.value_boot_image_reference = IntegerValueOfInfo::kInvalidReference;
490 }
Vladimir Markoeebb8212018-06-05 14:57:24 +0100491 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +0100492 info.array_data_boot_image_reference =
493 IntrinsicObjects::EncodePatch(IntrinsicObjects::PatchType::kIntegerValueOfArray);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100494 }
495 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +0100496 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects();
497 ObjPtr<mirror::Object> low_integer =
498 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u);
499 ObjPtr<mirror::Class> integer_class = low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>();
500 ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I");
501 DCHECK(value_field != nullptr);
502 info.value_offset = value_field->GetOffset().Uint32Value();
503 if (runtime->UseJitCompilation()) {
504 // Use the current `IntegerCache.low` for JIT to avoid truly surprising behavior if the
505 // code messes up the `value` field in the lowest cached Integer using reflection.
506 info.low = GetIntegerCacheLowFromIntegerCache(self, class_linker);
507 } else {
508 // For app AOT, the `low_integer->value` should be the same as `IntegerCache.low`.
509 info.low = value_field->GetInt(low_integer);
510 DCHECK_EQ(info.low, GetIntegerCacheLowFromIntegerCache(self, class_linker));
511 }
512 // Do not look at `IntegerCache.high`, use the immutable length of the cache array instead.
513 info.length = dchecked_integral_cast<uint32_t>(
514 IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects)->GetLength());
515
516 info.integer_boot_image_offset = CalculateBootImageOffset(integer_class);
517 if (invoke->InputAt(0)->IsIntConstant()) {
518 int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue();
519 uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low);
520 if (index < static_cast<uint32_t>(info.length)) {
521 ObjPtr<mirror::Object> integer =
522 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, index);
523 info.value_boot_image_reference = CalculateBootImageOffset(integer);
524 } else {
525 // Not in the cache.
526 info.value_boot_image_reference = IntegerValueOfInfo::kInvalidReference;
527 }
528 } else {
529 info.array_data_boot_image_reference =
530 CalculateBootImageOffset(boot_image_live_objects) +
531 IntrinsicObjects::GetIntegerValueOfArrayDataOffset(boot_image_live_objects).Uint32Value();
532 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000533 }
534
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000535 return info;
536}
537
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800538} // namespace art