Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "intrinsics.h" |
| 18 | |
Andreas Gampe | a1d2f95 | 2017-04-20 22:53:58 -0700 | [diff] [blame] | 19 | #include "art_field-inl.h" |
Andreas Gampe | c6ea7d0 | 2017-02-01 16:46:28 -0800 | [diff] [blame] | 20 | #include "art_method-inl.h" |
David Sehr | c431b9d | 2018-03-02 12:01:51 -0800 | [diff] [blame] | 21 | #include "base/utils.h" |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 22 | #include "class_linker.h" |
David Sehr | 8c0961f | 2018-01-23 16:11:38 -0800 | [diff] [blame] | 23 | #include "dex/invoke_type.h" |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 24 | #include "driver/compiler_options.h" |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 25 | #include "gc/space/image_space.h" |
| 26 | #include "image-inl.h" |
| 27 | #include "intrinsic_objects.h" |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 28 | #include "nodes.h" |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 29 | #include "obj_ptr-inl.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 30 | #include "scoped_thread_state_change-inl.h" |
Andreas Gampe | b486a98 | 2017-06-01 13:45:54 -0700 | [diff] [blame] | 31 | #include "thread-current-inl.h" |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 32 | |
| 33 | namespace art { |
| 34 | |
Orion Hodson | cfcc9cf | 2017-09-29 15:07:27 +0100 | [diff] [blame] | 35 | // Check that intrinsic enum values fit within space set aside in ArtMethod modifier flags. |
| 36 | #define CHECK_INTRINSICS_ENUM_VALUES(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
| 37 | static_assert( \ |
| 38 | static_cast<uint32_t>(Intrinsics::k ## Name) <= (kAccIntrinsicBits >> CTZ(kAccIntrinsicBits)), \ |
Orion Hodson | 4a4610a | 2017-09-28 16:57:55 +0100 | [diff] [blame] | 39 | "Instrinsics enumeration space overflow."); |
Orion Hodson | cfcc9cf | 2017-09-29 15:07:27 +0100 | [diff] [blame] | 40 | #include "intrinsics_list.h" |
| 41 | INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES) |
| 42 | #undef INTRINSICS_LIST |
| 43 | #undef CHECK_INTRINSICS_ENUM_VALUES |
| 44 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 45 | // Function that returns whether an intrinsic is static/direct or virtual. |
| 46 | static inline InvokeType GetIntrinsicInvokeType(Intrinsics i) { |
| 47 | switch (i) { |
| 48 | case Intrinsics::kNone: |
| 49 | return kInterface; // Non-sensical for intrinsic. |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 50 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 51 | case Intrinsics::k ## Name: \ |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 52 | return IsStatic; |
| 53 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 54 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 55 | #undef INTRINSICS_LIST |
| 56 | #undef OPTIMIZING_INTRINSICS |
| 57 | } |
| 58 | return kInterface; |
| 59 | } |
| 60 | |
agicsaki | 57b81ec | 2015-08-11 17:39:37 -0700 | [diff] [blame] | 61 | // Function that returns whether an intrinsic needs an environment or not. |
Agi Csaki | 05f2056 | 2015-08-19 14:58:14 -0700 | [diff] [blame] | 62 | static inline IntrinsicNeedsEnvironmentOrCache NeedsEnvironmentOrCache(Intrinsics i) { |
agicsaki | 57b81ec | 2015-08-11 17:39:37 -0700 | [diff] [blame] | 63 | switch (i) { |
| 64 | case Intrinsics::kNone: |
Agi Csaki | 05f2056 | 2015-08-19 14:58:14 -0700 | [diff] [blame] | 65 | return kNeedsEnvironmentOrCache; // Non-sensical for intrinsic. |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 66 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 67 | case Intrinsics::k ## Name: \ |
Agi Csaki | 05f2056 | 2015-08-19 14:58:14 -0700 | [diff] [blame] | 68 | return NeedsEnvironmentOrCache; |
agicsaki | 57b81ec | 2015-08-11 17:39:37 -0700 | [diff] [blame] | 69 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 70 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
agicsaki | 57b81ec | 2015-08-11 17:39:37 -0700 | [diff] [blame] | 71 | #undef INTRINSICS_LIST |
| 72 | #undef OPTIMIZING_INTRINSICS |
| 73 | } |
Agi Csaki | 05f2056 | 2015-08-19 14:58:14 -0700 | [diff] [blame] | 74 | return kNeedsEnvironmentOrCache; |
agicsaki | 57b81ec | 2015-08-11 17:39:37 -0700 | [diff] [blame] | 75 | } |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 76 | |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 77 | // Function that returns whether an intrinsic has side effects. |
| 78 | static inline IntrinsicSideEffects GetSideEffects(Intrinsics i) { |
| 79 | switch (i) { |
| 80 | case Intrinsics::kNone: |
| 81 | return kAllSideEffects; |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 82 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 83 | case Intrinsics::k ## Name: \ |
| 84 | return SideEffects; |
| 85 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 86 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 87 | #undef INTRINSICS_LIST |
| 88 | #undef OPTIMIZING_INTRINSICS |
| 89 | } |
| 90 | return kAllSideEffects; |
| 91 | } |
| 92 | |
| 93 | // Function that returns whether an intrinsic can throw exceptions. |
| 94 | static inline IntrinsicExceptions GetExceptions(Intrinsics i) { |
| 95 | switch (i) { |
| 96 | case Intrinsics::kNone: |
| 97 | return kCanThrow; |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 98 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 99 | case Intrinsics::k ## Name: \ |
| 100 | return Exceptions; |
| 101 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 102 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 103 | #undef INTRINSICS_LIST |
| 104 | #undef OPTIMIZING_INTRINSICS |
| 105 | } |
| 106 | return kCanThrow; |
| 107 | } |
| 108 | |
Orion Hodson | 4c71d00 | 2017-11-29 11:03:25 +0000 | [diff] [blame] | 109 | static bool CheckInvokeType(Intrinsics intrinsic, HInvoke* invoke) |
| 110 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 111 | // Whenever the intrinsic is marked as static, report an error if we find an InvokeVirtual. |
| 112 | // |
| 113 | // Whenever the intrinsic is marked as direct and we find an InvokeVirtual, a devirtualization |
| 114 | // failure occured. We might be in a situation where we have inlined a method that calls an |
| 115 | // intrinsic, but that method is in a different dex file on which we do not have a |
| 116 | // verified_method that would have helped the compiler driver sharpen the call. In that case, |
| 117 | // make sure that the intrinsic is actually for some final method (or in a final class), as |
| 118 | // otherwise the intrinsics setup is broken. |
| 119 | // |
| 120 | // For the last direction, we have intrinsics for virtual functions that will perform a check |
| 121 | // inline. If the precise type is known, however, the instruction will be sharpened to an |
| 122 | // InvokeStaticOrDirect. |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 123 | InvokeType intrinsic_type = GetIntrinsicInvokeType(intrinsic); |
Nicolas Geoffray | 5e4e11e | 2016-09-22 13:17:41 +0100 | [diff] [blame] | 124 | InvokeType invoke_type = invoke->GetInvokeType(); |
Orion Hodson | cfcc9cf | 2017-09-29 15:07:27 +0100 | [diff] [blame] | 125 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 126 | switch (intrinsic_type) { |
| 127 | case kStatic: |
| 128 | return (invoke_type == kStatic); |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 129 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 130 | case kDirect: |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 131 | if (invoke_type == kDirect) { |
| 132 | return true; |
| 133 | } |
| 134 | if (invoke_type == kVirtual) { |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 135 | ArtMethod* art_method = invoke->GetResolvedMethod(); |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 136 | return (art_method->IsFinal() || art_method->GetDeclaringClass()->IsFinal()); |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 137 | } |
| 138 | return false; |
| 139 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 140 | case kVirtual: |
| 141 | // Call might be devirtualized. |
Mingyao Yang | 6b1aebe | 2017-11-27 15:39:04 -0800 | [diff] [blame] | 142 | return (invoke_type == kVirtual || invoke_type == kDirect || invoke_type == kInterface); |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 143 | |
Orion Hodson | b1b5206 | 2017-11-27 11:51:42 +0000 | [diff] [blame] | 144 | case kSuper: |
| 145 | case kInterface: |
| 146 | case kPolymorphic: |
Orion Hodson | 4c8e12e | 2018-05-18 08:33:20 +0100 | [diff] [blame] | 147 | case kCustom: |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 148 | return false; |
| 149 | } |
Orion Hodson | b1b5206 | 2017-11-27 11:51:42 +0000 | [diff] [blame] | 150 | LOG(FATAL) << "Unknown intrinsic invoke type: " << intrinsic_type; |
| 151 | UNREACHABLE(); |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 152 | } |
| 153 | |
Mingyao Yang | 6b1aebe | 2017-11-27 15:39:04 -0800 | [diff] [blame] | 154 | bool IntrinsicsRecognizer::Recognize(HInvoke* invoke, |
| 155 | ArtMethod* art_method, |
| 156 | /*out*/ bool* wrong_invoke_type) { |
| 157 | if (art_method == nullptr) { |
| 158 | art_method = invoke->GetResolvedMethod(); |
| 159 | } |
Orion Hodson | b1b5206 | 2017-11-27 11:51:42 +0000 | [diff] [blame] | 160 | *wrong_invoke_type = false; |
| 161 | if (art_method == nullptr || !art_method->IsIntrinsic()) { |
| 162 | return false; |
| 163 | } |
| 164 | |
Orion Hodson | 4c71d00 | 2017-11-29 11:03:25 +0000 | [diff] [blame] | 165 | // TODO: b/65872996 The intent is that polymorphic signature methods should |
| 166 | // be compiler intrinsics. At present, they are only interpreter intrinsics. |
| 167 | if (art_method->IsPolymorphicSignature()) { |
| 168 | return false; |
Aart Bik | f0010dd | 2017-11-21 16:31:53 -0800 | [diff] [blame] | 169 | } |
Orion Hodson | b1b5206 | 2017-11-27 11:51:42 +0000 | [diff] [blame] | 170 | |
| 171 | Intrinsics intrinsic = static_cast<Intrinsics>(art_method->GetIntrinsic()); |
| 172 | if (CheckInvokeType(intrinsic, invoke) == false) { |
| 173 | *wrong_invoke_type = true; |
| 174 | return false; |
| 175 | } |
| 176 | |
| 177 | invoke->SetIntrinsic(intrinsic, |
| 178 | NeedsEnvironmentOrCache(intrinsic), |
| 179 | GetSideEffects(intrinsic), |
| 180 | GetExceptions(intrinsic)); |
| 181 | return true; |
Aart Bik | f0010dd | 2017-11-21 16:31:53 -0800 | [diff] [blame] | 182 | } |
| 183 | |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 184 | bool IntrinsicsRecognizer::Run() { |
| 185 | bool didRecognize = false; |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 186 | ScopedObjectAccess soa(Thread::Current()); |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 187 | for (HBasicBlock* block : graph_->GetReversePostOrder()) { |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 188 | for (HInstructionIterator inst_it(block->GetInstructions()); !inst_it.Done(); |
| 189 | inst_it.Advance()) { |
| 190 | HInstruction* inst = inst_it.Current(); |
| 191 | if (inst->IsInvoke()) { |
Aart Bik | f0010dd | 2017-11-21 16:31:53 -0800 | [diff] [blame] | 192 | bool wrong_invoke_type = false; |
Mingyao Yang | 6b1aebe | 2017-11-27 15:39:04 -0800 | [diff] [blame] | 193 | if (Recognize(inst->AsInvoke(), /* art_method */ nullptr, &wrong_invoke_type)) { |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 194 | didRecognize = true; |
Aart Bik | f0010dd | 2017-11-21 16:31:53 -0800 | [diff] [blame] | 195 | MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized); |
| 196 | } else if (wrong_invoke_type) { |
| 197 | LOG(WARNING) |
| 198 | << "Found an intrinsic with unexpected invoke type: " |
| 199 | << inst->AsInvoke()->GetResolvedMethod()->PrettyMethod() << " " |
| 200 | << inst->DebugName(); |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 201 | } |
| 202 | } |
| 203 | } |
| 204 | } |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 205 | return didRecognize; |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 206 | } |
| 207 | |
| 208 | std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic) { |
| 209 | switch (intrinsic) { |
| 210 | case Intrinsics::kNone: |
David Brazdil | 109c89a | 2015-07-31 17:10:43 +0100 | [diff] [blame] | 211 | os << "None"; |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 212 | break; |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 213 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 214 | case Intrinsics::k ## Name: \ |
| 215 | os << # Name; \ |
| 216 | break; |
| 217 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 218 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 219 | #undef STATIC_INTRINSICS_LIST |
| 220 | #undef VIRTUAL_INTRINSICS_LIST |
| 221 | #undef OPTIMIZING_INTRINSICS |
| 222 | } |
| 223 | return os; |
| 224 | } |
| 225 | |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 226 | static ObjPtr<mirror::ObjectArray<mirror::Object>> GetBootImageLiveObjects() |
| 227 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 228 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 229 | const std::vector<gc::space::ImageSpace*>& boot_image_spaces = heap->GetBootImageSpaces(); |
| 230 | DCHECK(!boot_image_spaces.empty()); |
| 231 | const ImageHeader& main_header = boot_image_spaces[0]->GetImageHeader(); |
| 232 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = |
| 233 | ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast( |
| 234 | main_header.GetImageRoot<kWithoutReadBarrier>(ImageHeader::kBootImageLiveObjects)); |
| 235 | DCHECK(boot_image_live_objects != nullptr); |
| 236 | DCHECK(heap->ObjectIsInBootImageSpace(boot_image_live_objects)); |
| 237 | return boot_image_live_objects; |
| 238 | } |
| 239 | |
| 240 | static bool CheckIntegerCache(Thread* self, |
| 241 | ClassLinker* class_linker, |
| 242 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects, |
| 243 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_cache) |
| 244 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 245 | DCHECK(boot_image_cache != nullptr); |
| 246 | |
| 247 | // Since we have a cache in the boot image, both java.lang.Integer and |
| 248 | // java.lang.Integer$IntegerCache must be initialized in the boot image. |
| 249 | ObjPtr<mirror::Class> cache_class = class_linker->LookupClass( |
| 250 | self, "Ljava/lang/Integer$IntegerCache;", /* class_loader */ nullptr); |
| 251 | DCHECK(cache_class != nullptr); |
| 252 | DCHECK(cache_class->IsInitialized()); |
| 253 | ObjPtr<mirror::Class> integer_class = |
| 254 | class_linker->LookupClass(self, "Ljava/lang/Integer;", /* class_loader */ nullptr); |
| 255 | DCHECK(integer_class != nullptr); |
| 256 | DCHECK(integer_class->IsInitialized()); |
| 257 | |
| 258 | // Check that the current cache is the same as the `boot_image_cache`. |
| 259 | ArtField* cache_field = cache_class->FindDeclaredStaticField("cache", "[Ljava/lang/Integer;"); |
| 260 | DCHECK(cache_field != nullptr); |
| 261 | ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = |
| 262 | ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(cache_field->GetObject(cache_class)); |
| 263 | if (current_cache != boot_image_cache) { |
| 264 | return false; // Messed up IntegerCache.cache. |
| 265 | } |
| 266 | |
| 267 | // Check that the range matches the boot image cache length. |
| 268 | ArtField* low_field = cache_class->FindDeclaredStaticField("low", "I"); |
| 269 | DCHECK(low_field != nullptr); |
| 270 | int32_t low = low_field->GetInt(cache_class); |
| 271 | ArtField* high_field = cache_class->FindDeclaredStaticField("high", "I"); |
| 272 | DCHECK(high_field != nullptr); |
| 273 | int32_t high = high_field->GetInt(cache_class); |
| 274 | if (boot_image_cache->GetLength() != high - low + 1) { |
| 275 | return false; // Messed up IntegerCache.low or IntegerCache.high. |
| 276 | } |
| 277 | |
| 278 | // Check that the elements match the boot image intrinsic objects and check their values as well. |
| 279 | ArtField* value_field = integer_class->FindDeclaredInstanceField("value", "I"); |
| 280 | DCHECK(value_field != nullptr); |
| 281 | for (int32_t i = 0, len = boot_image_cache->GetLength(); i != len; ++i) { |
| 282 | ObjPtr<mirror::Object> boot_image_object = |
| 283 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, i); |
| 284 | DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boot_image_object)); |
| 285 | // No need for read barrier for comparison with a boot image object. |
| 286 | ObjPtr<mirror::Object> current_object = |
| 287 | boot_image_cache->GetWithoutChecks<kVerifyNone, kWithoutReadBarrier>(i); |
| 288 | if (boot_image_object != current_object) { |
| 289 | return false; // Messed up IntegerCache.cache[i] |
| 290 | } |
| 291 | if (value_field->GetInt(boot_image_object) != low + i) { |
| 292 | return false; // Messed up IntegerCache.cache[i].value. |
| 293 | } |
| 294 | } |
| 295 | |
| 296 | return true; |
| 297 | } |
| 298 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 299 | void IntrinsicVisitor::ComputeIntegerValueOfLocations(HInvoke* invoke, |
| 300 | CodeGenerator* codegen, |
| 301 | Location return_location, |
| 302 | Location first_argument_location) { |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 303 | if (codegen->GetCompilerOptions().IsBootImage()) { |
| 304 | // TODO: Implement for boot image. We need access to CompilerDriver::IsImageClass() |
| 305 | // to verify that the IntegerCache shall be in the image. |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 306 | return; |
| 307 | } |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 308 | Runtime* runtime = Runtime::Current(); |
| 309 | gc::Heap* heap = runtime->GetHeap(); |
| 310 | if (heap->GetBootImageSpaces().empty()) { |
| 311 | return; // Running without boot image, cannot use required boot image objects. |
| 312 | } |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 313 | |
| 314 | // The intrinsic will call if it needs to allocate a j.l.Integer. |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 315 | LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly; |
| 316 | { |
| 317 | Thread* self = Thread::Current(); |
| 318 | ScopedObjectAccess soa(self); |
| 319 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects(); |
| 320 | ObjPtr<mirror::ObjectArray<mirror::Object>> cache = |
| 321 | IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects); |
| 322 | if (cache == nullptr) { |
| 323 | return; // No cache in the boot image. |
| 324 | } |
| 325 | if (runtime->UseJitCompilation()) { |
| 326 | if (!CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache)) { |
| 327 | return; // The cache was somehow messed up, probably by using reflection. |
| 328 | } |
| 329 | } else { |
| 330 | DCHECK(runtime->IsAotCompiler()); |
| 331 | DCHECK(CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache)); |
| 332 | if (invoke->InputAt(0)->IsIntConstant()) { |
| 333 | int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); |
| 334 | // Retrieve the `value` from the lowest cached Integer. |
| 335 | ObjPtr<mirror::Object> low_integer = |
| 336 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u); |
| 337 | ObjPtr<mirror::Class> integer_class = |
| 338 | low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>(); |
| 339 | ArtField* value_field = integer_class->FindDeclaredInstanceField("value", "I"); |
| 340 | DCHECK(value_field != nullptr); |
| 341 | int32_t low = value_field->GetInt(low_integer); |
| 342 | if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) < |
| 343 | static_cast<uint32_t>(cache->GetLength())) { |
| 344 | // No call, we shall use direct pointer to the Integer object. Note that we cannot |
| 345 | // do this for JIT as the "low" can change through reflection before emitting the code. |
| 346 | call_kind = LocationSummary::kNoCall; |
| 347 | } |
| 348 | } |
| 349 | } |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 350 | } |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 351 | |
| 352 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); |
| 353 | LocationSummary* locations = new (allocator) LocationSummary(invoke, call_kind, kIntrinsified); |
| 354 | if (call_kind == LocationSummary::kCallOnMainOnly) { |
| 355 | locations->SetInAt(0, Location::RegisterOrConstant(invoke->InputAt(0))); |
| 356 | locations->AddTemp(first_argument_location); |
| 357 | locations->SetOut(return_location); |
| 358 | } else { |
| 359 | locations->SetInAt(0, Location::ConstantLocation(invoke->InputAt(0)->AsConstant())); |
| 360 | locations->SetOut(Location::RequiresRegister()); |
| 361 | } |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 362 | } |
| 363 | |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 364 | static int32_t GetIntegerCacheLowFromIntegerCache(Thread* self) |
| 365 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 366 | ObjPtr<mirror::Class> cache_class = Runtime::Current()->GetClassLinker()->LookupClass( |
| 367 | self, "Ljava/lang/Integer$IntegerCache;", /* class_loader */ nullptr); |
| 368 | DCHECK(cache_class != nullptr); |
| 369 | DCHECK(cache_class->IsInitialized()); |
| 370 | ArtField* low_field = cache_class->FindDeclaredStaticField("low", "I"); |
| 371 | DCHECK(low_field != nullptr); |
| 372 | return low_field->GetInt(cache_class); |
| 373 | } |
| 374 | |
| 375 | static uint32_t CalculateBootImageOffset(ObjPtr<mirror::Object> object) |
| 376 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 377 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 378 | DCHECK(heap->ObjectIsInBootImageSpace(object)); |
| 379 | return reinterpret_cast<const uint8_t*>(object.Ptr()) - heap->GetBootImageSpaces()[0]->Begin(); |
| 380 | } |
| 381 | |
| 382 | inline IntrinsicVisitor::IntegerValueOfInfo::IntegerValueOfInfo() |
| 383 | : integer_boot_image_offset(0u), |
| 384 | value_offset(0), |
| 385 | low(0), |
| 386 | length(0u), |
| 387 | value_boot_image_offset(0u) {} |
| 388 | |
| 389 | IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo(HInvoke* invoke) { |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 390 | // Note that we could cache all of the data looked up here. but there's no good |
| 391 | // location for it. We don't want to add it to WellKnownClasses, to avoid creating global |
| 392 | // jni values. Adding it as state to the compiler singleton seems like wrong |
| 393 | // separation of concerns. |
| 394 | // The need for this data should be pretty rare though. |
| 395 | |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 396 | // Note that at this point we can no longer abort the code generation. Therefore, |
| 397 | // we need to provide data that shall not lead to a crash even if the fields were |
| 398 | // modified through reflection since ComputeIntegerValueOfLocations() when JITting. |
| 399 | |
| 400 | Runtime* runtime = Runtime::Current(); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 401 | Thread* self = Thread::Current(); |
| 402 | ScopedObjectAccess soa(self); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 403 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects(); |
| 404 | ObjPtr<mirror::Object> low_integer = |
| 405 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u); |
| 406 | ObjPtr<mirror::Class> integer_class = low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>(); |
| 407 | ArtField* value_field = integer_class->FindDeclaredInstanceField("value", "I"); |
| 408 | DCHECK(value_field != nullptr); |
| 409 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 410 | IntegerValueOfInfo info; |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 411 | info.integer_boot_image_offset = CalculateBootImageOffset(integer_class); |
| 412 | info.value_offset = value_field->GetOffset().Uint32Value(); |
| 413 | if (runtime->UseJitCompilation()) { |
| 414 | // Use the current `IntegerCache.low` for JIT to avoid truly surprising behavior if the |
| 415 | // code messes up the `value` field in the lowest cached Integer using reflection. |
| 416 | info.low = GetIntegerCacheLowFromIntegerCache(self); |
| 417 | } else { |
| 418 | // For AOT, the `low_integer->value` should be the same as `IntegerCache.low`. |
| 419 | info.low = value_field->GetInt(low_integer); |
| 420 | DCHECK_EQ(info.low, GetIntegerCacheLowFromIntegerCache(self)); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 421 | } |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 422 | // Do not look at `IntegerCache.high`, use the immutable length of the cache array instead. |
| 423 | info.length = dchecked_integral_cast<uint32_t>( |
| 424 | IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects)->GetLength()); |
| 425 | |
| 426 | if (invoke->InputAt(0)->IsIntConstant()) { |
| 427 | int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue(); |
| 428 | uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low); |
| 429 | if (index < static_cast<uint32_t>(info.length)) { |
| 430 | ObjPtr<mirror::Object> integer = |
| 431 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, index); |
| 432 | DCHECK(runtime->GetHeap()->ObjectIsInBootImageSpace(integer)); |
| 433 | info.value_boot_image_offset = CalculateBootImageOffset(integer); |
| 434 | } else { |
| 435 | info.value_boot_image_offset = 0u; // Not in the cache. |
| 436 | } |
| 437 | } else { |
| 438 | info.array_data_boot_image_offset = |
| 439 | CalculateBootImageOffset(boot_image_live_objects) + |
| 440 | IntrinsicObjects::GetIntegerValueOfArrayDataOffset(boot_image_live_objects).Uint32Value(); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 441 | } |
| 442 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 443 | return info; |
| 444 | } |
| 445 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 446 | } // namespace art |