blob: 81b2b7b581422c0a2531f4d32e9b5195772ef6c2 [file] [log] [blame]
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics.h"
18
Andreas Gampea1d2f952017-04-20 22:53:58 -070019#include "art_field-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080020#include "art_method-inl.h"
David Sehrc431b9d2018-03-02 12:01:51 -080021#include "base/utils.h"
Andreas Gampebfb5ba92015-09-01 15:45:02 +000022#include "class_linker.h"
David Sehr8c0961f2018-01-23 16:11:38 -080023#include "dex/invoke_type.h"
Nicolas Geoffray331605a2017-03-01 11:01:41 +000024#include "driver/compiler_options.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010025#include "gc/space/image_space.h"
26#include "image-inl.h"
27#include "intrinsic_objects.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080028#include "nodes.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010029#include "obj_ptr-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070030#include "scoped_thread_state_change-inl.h"
Andreas Gampeb486a982017-06-01 13:45:54 -070031#include "thread-current-inl.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080032
33namespace art {
34
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +010035// Check that intrinsic enum values fit within space set aside in ArtMethod modifier flags.
36#define CHECK_INTRINSICS_ENUM_VALUES(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
37 static_assert( \
38 static_cast<uint32_t>(Intrinsics::k ## Name) <= (kAccIntrinsicBits >> CTZ(kAccIntrinsicBits)), \
Orion Hodson4a4610a2017-09-28 16:57:55 +010039 "Instrinsics enumeration space overflow.");
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +010040#include "intrinsics_list.h"
41 INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES)
42#undef INTRINSICS_LIST
43#undef CHECK_INTRINSICS_ENUM_VALUES
44
Andreas Gampe71fb52f2014-12-29 17:43:08 -080045// Function that returns whether an intrinsic is static/direct or virtual.
46static inline InvokeType GetIntrinsicInvokeType(Intrinsics i) {
47 switch (i) {
48 case Intrinsics::kNone:
49 return kInterface; // Non-sensical for intrinsic.
Nicolas Geoffray762869d2016-07-15 15:28:35 +010050#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080051 case Intrinsics::k ## Name: \
Andreas Gampe71fb52f2014-12-29 17:43:08 -080052 return IsStatic;
53#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070054 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Andreas Gampe71fb52f2014-12-29 17:43:08 -080055#undef INTRINSICS_LIST
56#undef OPTIMIZING_INTRINSICS
57 }
58 return kInterface;
59}
60
agicsaki57b81ec2015-08-11 17:39:37 -070061// Function that returns whether an intrinsic needs an environment or not.
Agi Csaki05f20562015-08-19 14:58:14 -070062static inline IntrinsicNeedsEnvironmentOrCache NeedsEnvironmentOrCache(Intrinsics i) {
agicsaki57b81ec2015-08-11 17:39:37 -070063 switch (i) {
64 case Intrinsics::kNone:
Agi Csaki05f20562015-08-19 14:58:14 -070065 return kNeedsEnvironmentOrCache; // Non-sensical for intrinsic.
Nicolas Geoffray762869d2016-07-15 15:28:35 +010066#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080067 case Intrinsics::k ## Name: \
Agi Csaki05f20562015-08-19 14:58:14 -070068 return NeedsEnvironmentOrCache;
agicsaki57b81ec2015-08-11 17:39:37 -070069#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070070 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
agicsaki57b81ec2015-08-11 17:39:37 -070071#undef INTRINSICS_LIST
72#undef OPTIMIZING_INTRINSICS
73 }
Agi Csaki05f20562015-08-19 14:58:14 -070074 return kNeedsEnvironmentOrCache;
agicsaki57b81ec2015-08-11 17:39:37 -070075}
Andreas Gampe71fb52f2014-12-29 17:43:08 -080076
Aart Bik5d75afe2015-12-14 11:57:01 -080077// Function that returns whether an intrinsic has side effects.
78static inline IntrinsicSideEffects GetSideEffects(Intrinsics i) {
79 switch (i) {
80 case Intrinsics::kNone:
81 return kAllSideEffects;
Nicolas Geoffray762869d2016-07-15 15:28:35 +010082#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080083 case Intrinsics::k ## Name: \
84 return SideEffects;
85#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070086 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Aart Bik5d75afe2015-12-14 11:57:01 -080087#undef INTRINSICS_LIST
88#undef OPTIMIZING_INTRINSICS
89 }
90 return kAllSideEffects;
91}
92
93// Function that returns whether an intrinsic can throw exceptions.
94static inline IntrinsicExceptions GetExceptions(Intrinsics i) {
95 switch (i) {
96 case Intrinsics::kNone:
97 return kCanThrow;
Nicolas Geoffray762869d2016-07-15 15:28:35 +010098#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080099 case Intrinsics::k ## Name: \
100 return Exceptions;
101#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -0700102 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Aart Bik5d75afe2015-12-14 11:57:01 -0800103#undef INTRINSICS_LIST
104#undef OPTIMIZING_INTRINSICS
105 }
106 return kCanThrow;
107}
108
Orion Hodson4c71d002017-11-29 11:03:25 +0000109static bool CheckInvokeType(Intrinsics intrinsic, HInvoke* invoke)
110 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000111 // Whenever the intrinsic is marked as static, report an error if we find an InvokeVirtual.
112 //
113 // Whenever the intrinsic is marked as direct and we find an InvokeVirtual, a devirtualization
114 // failure occured. We might be in a situation where we have inlined a method that calls an
115 // intrinsic, but that method is in a different dex file on which we do not have a
116 // verified_method that would have helped the compiler driver sharpen the call. In that case,
117 // make sure that the intrinsic is actually for some final method (or in a final class), as
118 // otherwise the intrinsics setup is broken.
119 //
120 // For the last direction, we have intrinsics for virtual functions that will perform a check
121 // inline. If the precise type is known, however, the instruction will be sharpened to an
122 // InvokeStaticOrDirect.
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800123 InvokeType intrinsic_type = GetIntrinsicInvokeType(intrinsic);
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100124 InvokeType invoke_type = invoke->GetInvokeType();
Orion Hodsoncfcc9cf2017-09-29 15:07:27 +0100125
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800126 switch (intrinsic_type) {
127 case kStatic:
128 return (invoke_type == kStatic);
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000129
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800130 case kDirect:
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000131 if (invoke_type == kDirect) {
132 return true;
133 }
134 if (invoke_type == kVirtual) {
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100135 ArtMethod* art_method = invoke->GetResolvedMethod();
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100136 return (art_method->IsFinal() || art_method->GetDeclaringClass()->IsFinal());
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000137 }
138 return false;
139
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800140 case kVirtual:
141 // Call might be devirtualized.
Mingyao Yang6b1aebe2017-11-27 15:39:04 -0800142 return (invoke_type == kVirtual || invoke_type == kDirect || invoke_type == kInterface);
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800143
Orion Hodsonb1b52062017-11-27 11:51:42 +0000144 case kSuper:
145 case kInterface:
146 case kPolymorphic:
Orion Hodson4c8e12e2018-05-18 08:33:20 +0100147 case kCustom:
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800148 return false;
149 }
Orion Hodsonb1b52062017-11-27 11:51:42 +0000150 LOG(FATAL) << "Unknown intrinsic invoke type: " << intrinsic_type;
151 UNREACHABLE();
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800152}
153
Mingyao Yang6b1aebe2017-11-27 15:39:04 -0800154bool IntrinsicsRecognizer::Recognize(HInvoke* invoke,
155 ArtMethod* art_method,
156 /*out*/ bool* wrong_invoke_type) {
157 if (art_method == nullptr) {
158 art_method = invoke->GetResolvedMethod();
159 }
Orion Hodsonb1b52062017-11-27 11:51:42 +0000160 *wrong_invoke_type = false;
161 if (art_method == nullptr || !art_method->IsIntrinsic()) {
162 return false;
163 }
164
Orion Hodson4c71d002017-11-29 11:03:25 +0000165 // TODO: b/65872996 The intent is that polymorphic signature methods should
166 // be compiler intrinsics. At present, they are only interpreter intrinsics.
167 if (art_method->IsPolymorphicSignature()) {
168 return false;
Aart Bikf0010dd2017-11-21 16:31:53 -0800169 }
Orion Hodsonb1b52062017-11-27 11:51:42 +0000170
171 Intrinsics intrinsic = static_cast<Intrinsics>(art_method->GetIntrinsic());
172 if (CheckInvokeType(intrinsic, invoke) == false) {
173 *wrong_invoke_type = true;
174 return false;
175 }
176
177 invoke->SetIntrinsic(intrinsic,
178 NeedsEnvironmentOrCache(intrinsic),
179 GetSideEffects(intrinsic),
180 GetExceptions(intrinsic));
181 return true;
Aart Bikf0010dd2017-11-21 16:31:53 -0800182}
183
Aart Bik24773202018-04-26 10:28:51 -0700184bool IntrinsicsRecognizer::Run() {
185 bool didRecognize = false;
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100186 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko2c45bc92016-10-25 16:54:12 +0100187 for (HBasicBlock* block : graph_->GetReversePostOrder()) {
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800188 for (HInstructionIterator inst_it(block->GetInstructions()); !inst_it.Done();
189 inst_it.Advance()) {
190 HInstruction* inst = inst_it.Current();
191 if (inst->IsInvoke()) {
Aart Bikf0010dd2017-11-21 16:31:53 -0800192 bool wrong_invoke_type = false;
Mingyao Yang6b1aebe2017-11-27 15:39:04 -0800193 if (Recognize(inst->AsInvoke(), /* art_method */ nullptr, &wrong_invoke_type)) {
Aart Bik24773202018-04-26 10:28:51 -0700194 didRecognize = true;
Aart Bikf0010dd2017-11-21 16:31:53 -0800195 MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized);
196 } else if (wrong_invoke_type) {
197 LOG(WARNING)
198 << "Found an intrinsic with unexpected invoke type: "
199 << inst->AsInvoke()->GetResolvedMethod()->PrettyMethod() << " "
200 << inst->DebugName();
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800201 }
202 }
203 }
204 }
Aart Bik24773202018-04-26 10:28:51 -0700205 return didRecognize;
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800206}
207
208std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic) {
209 switch (intrinsic) {
210 case Intrinsics::kNone:
David Brazdil109c89a2015-07-31 17:10:43 +0100211 os << "None";
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800212 break;
Nicolas Geoffray762869d2016-07-15 15:28:35 +0100213#define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800214 case Intrinsics::k ## Name: \
215 os << # Name; \
216 break;
217#include "intrinsics_list.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -0700218 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800219#undef STATIC_INTRINSICS_LIST
220#undef VIRTUAL_INTRINSICS_LIST
221#undef OPTIMIZING_INTRINSICS
222 }
223 return os;
224}
225
Vladimir Markoeebb8212018-06-05 14:57:24 +0100226static ObjPtr<mirror::ObjectArray<mirror::Object>> GetBootImageLiveObjects()
227 REQUIRES_SHARED(Locks::mutator_lock_) {
228 gc::Heap* heap = Runtime::Current()->GetHeap();
229 const std::vector<gc::space::ImageSpace*>& boot_image_spaces = heap->GetBootImageSpaces();
230 DCHECK(!boot_image_spaces.empty());
231 const ImageHeader& main_header = boot_image_spaces[0]->GetImageHeader();
232 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects =
233 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(
234 main_header.GetImageRoot<kWithoutReadBarrier>(ImageHeader::kBootImageLiveObjects));
235 DCHECK(boot_image_live_objects != nullptr);
236 DCHECK(heap->ObjectIsInBootImageSpace(boot_image_live_objects));
237 return boot_image_live_objects;
238}
239
240static bool CheckIntegerCache(Thread* self,
241 ClassLinker* class_linker,
242 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects,
243 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_cache)
244 REQUIRES_SHARED(Locks::mutator_lock_) {
245 DCHECK(boot_image_cache != nullptr);
246
247 // Since we have a cache in the boot image, both java.lang.Integer and
248 // java.lang.Integer$IntegerCache must be initialized in the boot image.
249 ObjPtr<mirror::Class> cache_class = class_linker->LookupClass(
250 self, "Ljava/lang/Integer$IntegerCache;", /* class_loader */ nullptr);
251 DCHECK(cache_class != nullptr);
252 DCHECK(cache_class->IsInitialized());
253 ObjPtr<mirror::Class> integer_class =
254 class_linker->LookupClass(self, "Ljava/lang/Integer;", /* class_loader */ nullptr);
255 DCHECK(integer_class != nullptr);
256 DCHECK(integer_class->IsInitialized());
257
258 // Check that the current cache is the same as the `boot_image_cache`.
259 ArtField* cache_field = cache_class->FindDeclaredStaticField("cache", "[Ljava/lang/Integer;");
260 DCHECK(cache_field != nullptr);
261 ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache =
262 ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(cache_field->GetObject(cache_class));
263 if (current_cache != boot_image_cache) {
264 return false; // Messed up IntegerCache.cache.
265 }
266
267 // Check that the range matches the boot image cache length.
268 ArtField* low_field = cache_class->FindDeclaredStaticField("low", "I");
269 DCHECK(low_field != nullptr);
270 int32_t low = low_field->GetInt(cache_class);
271 ArtField* high_field = cache_class->FindDeclaredStaticField("high", "I");
272 DCHECK(high_field != nullptr);
273 int32_t high = high_field->GetInt(cache_class);
274 if (boot_image_cache->GetLength() != high - low + 1) {
275 return false; // Messed up IntegerCache.low or IntegerCache.high.
276 }
277
278 // Check that the elements match the boot image intrinsic objects and check their values as well.
279 ArtField* value_field = integer_class->FindDeclaredInstanceField("value", "I");
280 DCHECK(value_field != nullptr);
281 for (int32_t i = 0, len = boot_image_cache->GetLength(); i != len; ++i) {
282 ObjPtr<mirror::Object> boot_image_object =
283 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, i);
284 DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boot_image_object));
285 // No need for read barrier for comparison with a boot image object.
286 ObjPtr<mirror::Object> current_object =
287 boot_image_cache->GetWithoutChecks<kVerifyNone, kWithoutReadBarrier>(i);
288 if (boot_image_object != current_object) {
289 return false; // Messed up IntegerCache.cache[i]
290 }
291 if (value_field->GetInt(boot_image_object) != low + i) {
292 return false; // Messed up IntegerCache.cache[i].value.
293 }
294 }
295
296 return true;
297}
298
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000299void IntrinsicVisitor::ComputeIntegerValueOfLocations(HInvoke* invoke,
300 CodeGenerator* codegen,
301 Location return_location,
302 Location first_argument_location) {
Vladimir Markoeebb8212018-06-05 14:57:24 +0100303 if (codegen->GetCompilerOptions().IsBootImage()) {
304 // TODO: Implement for boot image. We need access to CompilerDriver::IsImageClass()
305 // to verify that the IntegerCache shall be in the image.
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000306 return;
307 }
Vladimir Markoeebb8212018-06-05 14:57:24 +0100308 Runtime* runtime = Runtime::Current();
309 gc::Heap* heap = runtime->GetHeap();
310 if (heap->GetBootImageSpaces().empty()) {
311 return; // Running without boot image, cannot use required boot image objects.
312 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000313
314 // The intrinsic will call if it needs to allocate a j.l.Integer.
Vladimir Markoeebb8212018-06-05 14:57:24 +0100315 LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly;
316 {
317 Thread* self = Thread::Current();
318 ScopedObjectAccess soa(self);
319 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects();
320 ObjPtr<mirror::ObjectArray<mirror::Object>> cache =
321 IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects);
322 if (cache == nullptr) {
323 return; // No cache in the boot image.
324 }
325 if (runtime->UseJitCompilation()) {
326 if (!CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache)) {
327 return; // The cache was somehow messed up, probably by using reflection.
328 }
329 } else {
330 DCHECK(runtime->IsAotCompiler());
331 DCHECK(CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache));
332 if (invoke->InputAt(0)->IsIntConstant()) {
333 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
334 // Retrieve the `value` from the lowest cached Integer.
335 ObjPtr<mirror::Object> low_integer =
336 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u);
337 ObjPtr<mirror::Class> integer_class =
338 low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>();
339 ArtField* value_field = integer_class->FindDeclaredInstanceField("value", "I");
340 DCHECK(value_field != nullptr);
341 int32_t low = value_field->GetInt(low_integer);
342 if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) <
343 static_cast<uint32_t>(cache->GetLength())) {
344 // No call, we shall use direct pointer to the Integer object. Note that we cannot
345 // do this for JIT as the "low" can change through reflection before emitting the code.
346 call_kind = LocationSummary::kNoCall;
347 }
348 }
349 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000350 }
Vladimir Markoeebb8212018-06-05 14:57:24 +0100351
352 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
353 LocationSummary* locations = new (allocator) LocationSummary(invoke, call_kind, kIntrinsified);
354 if (call_kind == LocationSummary::kCallOnMainOnly) {
355 locations->SetInAt(0, Location::RegisterOrConstant(invoke->InputAt(0)));
356 locations->AddTemp(first_argument_location);
357 locations->SetOut(return_location);
358 } else {
359 locations->SetInAt(0, Location::ConstantLocation(invoke->InputAt(0)->AsConstant()));
360 locations->SetOut(Location::RequiresRegister());
361 }
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000362}
363
Vladimir Markoeebb8212018-06-05 14:57:24 +0100364static int32_t GetIntegerCacheLowFromIntegerCache(Thread* self)
365 REQUIRES_SHARED(Locks::mutator_lock_) {
366 ObjPtr<mirror::Class> cache_class = Runtime::Current()->GetClassLinker()->LookupClass(
367 self, "Ljava/lang/Integer$IntegerCache;", /* class_loader */ nullptr);
368 DCHECK(cache_class != nullptr);
369 DCHECK(cache_class->IsInitialized());
370 ArtField* low_field = cache_class->FindDeclaredStaticField("low", "I");
371 DCHECK(low_field != nullptr);
372 return low_field->GetInt(cache_class);
373}
374
375static uint32_t CalculateBootImageOffset(ObjPtr<mirror::Object> object)
376 REQUIRES_SHARED(Locks::mutator_lock_) {
377 gc::Heap* heap = Runtime::Current()->GetHeap();
378 DCHECK(heap->ObjectIsInBootImageSpace(object));
379 return reinterpret_cast<const uint8_t*>(object.Ptr()) - heap->GetBootImageSpaces()[0]->Begin();
380}
381
382inline IntrinsicVisitor::IntegerValueOfInfo::IntegerValueOfInfo()
383 : integer_boot_image_offset(0u),
384 value_offset(0),
385 low(0),
386 length(0u),
387 value_boot_image_offset(0u) {}
388
389IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo(HInvoke* invoke) {
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000390 // Note that we could cache all of the data looked up here. but there's no good
391 // location for it. We don't want to add it to WellKnownClasses, to avoid creating global
392 // jni values. Adding it as state to the compiler singleton seems like wrong
393 // separation of concerns.
394 // The need for this data should be pretty rare though.
395
Vladimir Markoeebb8212018-06-05 14:57:24 +0100396 // Note that at this point we can no longer abort the code generation. Therefore,
397 // we need to provide data that shall not lead to a crash even if the fields were
398 // modified through reflection since ComputeIntegerValueOfLocations() when JITting.
399
400 Runtime* runtime = Runtime::Current();
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000401 Thread* self = Thread::Current();
402 ScopedObjectAccess soa(self);
Vladimir Markoeebb8212018-06-05 14:57:24 +0100403 ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects();
404 ObjPtr<mirror::Object> low_integer =
405 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u);
406 ObjPtr<mirror::Class> integer_class = low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>();
407 ArtField* value_field = integer_class->FindDeclaredInstanceField("value", "I");
408 DCHECK(value_field != nullptr);
409
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000410 IntegerValueOfInfo info;
Vladimir Markoeebb8212018-06-05 14:57:24 +0100411 info.integer_boot_image_offset = CalculateBootImageOffset(integer_class);
412 info.value_offset = value_field->GetOffset().Uint32Value();
413 if (runtime->UseJitCompilation()) {
414 // Use the current `IntegerCache.low` for JIT to avoid truly surprising behavior if the
415 // code messes up the `value` field in the lowest cached Integer using reflection.
416 info.low = GetIntegerCacheLowFromIntegerCache(self);
417 } else {
418 // For AOT, the `low_integer->value` should be the same as `IntegerCache.low`.
419 info.low = value_field->GetInt(low_integer);
420 DCHECK_EQ(info.low, GetIntegerCacheLowFromIntegerCache(self));
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000421 }
Vladimir Markoeebb8212018-06-05 14:57:24 +0100422 // Do not look at `IntegerCache.high`, use the immutable length of the cache array instead.
423 info.length = dchecked_integral_cast<uint32_t>(
424 IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects)->GetLength());
425
426 if (invoke->InputAt(0)->IsIntConstant()) {
427 int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue();
428 uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low);
429 if (index < static_cast<uint32_t>(info.length)) {
430 ObjPtr<mirror::Object> integer =
431 IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, index);
432 DCHECK(runtime->GetHeap()->ObjectIsInBootImageSpace(integer));
433 info.value_boot_image_offset = CalculateBootImageOffset(integer);
434 } else {
435 info.value_boot_image_offset = 0u; // Not in the cache.
436 }
437 } else {
438 info.array_data_boot_image_offset =
439 CalculateBootImageOffset(boot_image_live_objects) +
440 IntrinsicObjects::GetIntegerValueOfArrayDataOffset(boot_image_live_objects).Uint32Value();
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000441 }
442
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000443 return info;
444}
445
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800446} // namespace art