Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "intrinsics.h" |
| 18 | |
Andreas Gampe | a1d2f95 | 2017-04-20 22:53:58 -0700 | [diff] [blame] | 19 | #include "art_field-inl.h" |
Andreas Gampe | c6ea7d0 | 2017-02-01 16:46:28 -0800 | [diff] [blame] | 20 | #include "art_method-inl.h" |
David Sehr | c431b9d | 2018-03-02 12:01:51 -0800 | [diff] [blame] | 21 | #include "base/utils.h" |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 22 | #include "class_linker.h" |
David Sehr | 8c0961f | 2018-01-23 16:11:38 -0800 | [diff] [blame] | 23 | #include "dex/invoke_type.h" |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 24 | #include "driver/compiler_options.h" |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 25 | #include "gc/space/image_space.h" |
| 26 | #include "image-inl.h" |
| 27 | #include "intrinsic_objects.h" |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 28 | #include "nodes.h" |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 29 | #include "obj_ptr-inl.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 30 | #include "scoped_thread_state_change-inl.h" |
Andreas Gampe | b486a98 | 2017-06-01 13:45:54 -0700 | [diff] [blame] | 31 | #include "thread-current-inl.h" |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 32 | |
| 33 | namespace art { |
| 34 | |
Orion Hodson | cfcc9cf | 2017-09-29 15:07:27 +0100 | [diff] [blame] | 35 | // Check that intrinsic enum values fit within space set aside in ArtMethod modifier flags. |
| 36 | #define CHECK_INTRINSICS_ENUM_VALUES(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
| 37 | static_assert( \ |
| 38 | static_cast<uint32_t>(Intrinsics::k ## Name) <= (kAccIntrinsicBits >> CTZ(kAccIntrinsicBits)), \ |
Orion Hodson | 4a4610a | 2017-09-28 16:57:55 +0100 | [diff] [blame] | 39 | "Instrinsics enumeration space overflow."); |
Orion Hodson | cfcc9cf | 2017-09-29 15:07:27 +0100 | [diff] [blame] | 40 | #include "intrinsics_list.h" |
| 41 | INTRINSICS_LIST(CHECK_INTRINSICS_ENUM_VALUES) |
| 42 | #undef INTRINSICS_LIST |
| 43 | #undef CHECK_INTRINSICS_ENUM_VALUES |
| 44 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 45 | // Function that returns whether an intrinsic is static/direct or virtual. |
| 46 | static inline InvokeType GetIntrinsicInvokeType(Intrinsics i) { |
| 47 | switch (i) { |
| 48 | case Intrinsics::kNone: |
| 49 | return kInterface; // Non-sensical for intrinsic. |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 50 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 51 | case Intrinsics::k ## Name: \ |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 52 | return IsStatic; |
| 53 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 54 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 55 | #undef INTRINSICS_LIST |
| 56 | #undef OPTIMIZING_INTRINSICS |
| 57 | } |
| 58 | return kInterface; |
| 59 | } |
| 60 | |
agicsaki | 57b81ec | 2015-08-11 17:39:37 -0700 | [diff] [blame] | 61 | // Function that returns whether an intrinsic needs an environment or not. |
Agi Csaki | 05f2056 | 2015-08-19 14:58:14 -0700 | [diff] [blame] | 62 | static inline IntrinsicNeedsEnvironmentOrCache NeedsEnvironmentOrCache(Intrinsics i) { |
agicsaki | 57b81ec | 2015-08-11 17:39:37 -0700 | [diff] [blame] | 63 | switch (i) { |
| 64 | case Intrinsics::kNone: |
Agi Csaki | 05f2056 | 2015-08-19 14:58:14 -0700 | [diff] [blame] | 65 | return kNeedsEnvironmentOrCache; // Non-sensical for intrinsic. |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 66 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 67 | case Intrinsics::k ## Name: \ |
Agi Csaki | 05f2056 | 2015-08-19 14:58:14 -0700 | [diff] [blame] | 68 | return NeedsEnvironmentOrCache; |
agicsaki | 57b81ec | 2015-08-11 17:39:37 -0700 | [diff] [blame] | 69 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 70 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
agicsaki | 57b81ec | 2015-08-11 17:39:37 -0700 | [diff] [blame] | 71 | #undef INTRINSICS_LIST |
| 72 | #undef OPTIMIZING_INTRINSICS |
| 73 | } |
Agi Csaki | 05f2056 | 2015-08-19 14:58:14 -0700 | [diff] [blame] | 74 | return kNeedsEnvironmentOrCache; |
agicsaki | 57b81ec | 2015-08-11 17:39:37 -0700 | [diff] [blame] | 75 | } |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 76 | |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 77 | // Function that returns whether an intrinsic has side effects. |
| 78 | static inline IntrinsicSideEffects GetSideEffects(Intrinsics i) { |
| 79 | switch (i) { |
| 80 | case Intrinsics::kNone: |
| 81 | return kAllSideEffects; |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 82 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 83 | case Intrinsics::k ## Name: \ |
| 84 | return SideEffects; |
| 85 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 86 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 87 | #undef INTRINSICS_LIST |
| 88 | #undef OPTIMIZING_INTRINSICS |
| 89 | } |
| 90 | return kAllSideEffects; |
| 91 | } |
| 92 | |
| 93 | // Function that returns whether an intrinsic can throw exceptions. |
| 94 | static inline IntrinsicExceptions GetExceptions(Intrinsics i) { |
| 95 | switch (i) { |
| 96 | case Intrinsics::kNone: |
| 97 | return kCanThrow; |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 98 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 99 | case Intrinsics::k ## Name: \ |
| 100 | return Exceptions; |
| 101 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 102 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Aart Bik | 5d75afe | 2015-12-14 11:57:01 -0800 | [diff] [blame] | 103 | #undef INTRINSICS_LIST |
| 104 | #undef OPTIMIZING_INTRINSICS |
| 105 | } |
| 106 | return kCanThrow; |
| 107 | } |
| 108 | |
Orion Hodson | 4c71d00 | 2017-11-29 11:03:25 +0000 | [diff] [blame] | 109 | static bool CheckInvokeType(Intrinsics intrinsic, HInvoke* invoke) |
| 110 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 111 | // Whenever the intrinsic is marked as static, report an error if we find an InvokeVirtual. |
| 112 | // |
| 113 | // Whenever the intrinsic is marked as direct and we find an InvokeVirtual, a devirtualization |
| 114 | // failure occured. We might be in a situation where we have inlined a method that calls an |
| 115 | // intrinsic, but that method is in a different dex file on which we do not have a |
| 116 | // verified_method that would have helped the compiler driver sharpen the call. In that case, |
| 117 | // make sure that the intrinsic is actually for some final method (or in a final class), as |
| 118 | // otherwise the intrinsics setup is broken. |
| 119 | // |
| 120 | // For the last direction, we have intrinsics for virtual functions that will perform a check |
| 121 | // inline. If the precise type is known, however, the instruction will be sharpened to an |
| 122 | // InvokeStaticOrDirect. |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 123 | InvokeType intrinsic_type = GetIntrinsicInvokeType(intrinsic); |
Nicolas Geoffray | 5e4e11e | 2016-09-22 13:17:41 +0100 | [diff] [blame] | 124 | InvokeType invoke_type = invoke->GetInvokeType(); |
Orion Hodson | cfcc9cf | 2017-09-29 15:07:27 +0100 | [diff] [blame] | 125 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 126 | switch (intrinsic_type) { |
| 127 | case kStatic: |
| 128 | return (invoke_type == kStatic); |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 129 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 130 | case kDirect: |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 131 | if (invoke_type == kDirect) { |
| 132 | return true; |
| 133 | } |
| 134 | if (invoke_type == kVirtual) { |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 135 | ArtMethod* art_method = invoke->GetResolvedMethod(); |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 136 | return (art_method->IsFinal() || art_method->GetDeclaringClass()->IsFinal()); |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 137 | } |
| 138 | return false; |
| 139 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 140 | case kVirtual: |
| 141 | // Call might be devirtualized. |
Mingyao Yang | 6b1aebe | 2017-11-27 15:39:04 -0800 | [diff] [blame] | 142 | return (invoke_type == kVirtual || invoke_type == kDirect || invoke_type == kInterface); |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 143 | |
Orion Hodson | b1b5206 | 2017-11-27 11:51:42 +0000 | [diff] [blame] | 144 | case kSuper: |
| 145 | case kInterface: |
| 146 | case kPolymorphic: |
Orion Hodson | 4c8e12e | 2018-05-18 08:33:20 +0100 | [diff] [blame] | 147 | case kCustom: |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 148 | return false; |
| 149 | } |
Orion Hodson | b1b5206 | 2017-11-27 11:51:42 +0000 | [diff] [blame] | 150 | LOG(FATAL) << "Unknown intrinsic invoke type: " << intrinsic_type; |
| 151 | UNREACHABLE(); |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 152 | } |
| 153 | |
Mingyao Yang | 6b1aebe | 2017-11-27 15:39:04 -0800 | [diff] [blame] | 154 | bool IntrinsicsRecognizer::Recognize(HInvoke* invoke, |
| 155 | ArtMethod* art_method, |
| 156 | /*out*/ bool* wrong_invoke_type) { |
| 157 | if (art_method == nullptr) { |
| 158 | art_method = invoke->GetResolvedMethod(); |
| 159 | } |
Orion Hodson | b1b5206 | 2017-11-27 11:51:42 +0000 | [diff] [blame] | 160 | *wrong_invoke_type = false; |
| 161 | if (art_method == nullptr || !art_method->IsIntrinsic()) { |
| 162 | return false; |
| 163 | } |
| 164 | |
Orion Hodson | 4c71d00 | 2017-11-29 11:03:25 +0000 | [diff] [blame] | 165 | // TODO: b/65872996 The intent is that polymorphic signature methods should |
| 166 | // be compiler intrinsics. At present, they are only interpreter intrinsics. |
| 167 | if (art_method->IsPolymorphicSignature()) { |
| 168 | return false; |
Aart Bik | f0010dd | 2017-11-21 16:31:53 -0800 | [diff] [blame] | 169 | } |
Orion Hodson | b1b5206 | 2017-11-27 11:51:42 +0000 | [diff] [blame] | 170 | |
| 171 | Intrinsics intrinsic = static_cast<Intrinsics>(art_method->GetIntrinsic()); |
| 172 | if (CheckInvokeType(intrinsic, invoke) == false) { |
| 173 | *wrong_invoke_type = true; |
| 174 | return false; |
| 175 | } |
| 176 | |
| 177 | invoke->SetIntrinsic(intrinsic, |
| 178 | NeedsEnvironmentOrCache(intrinsic), |
| 179 | GetSideEffects(intrinsic), |
| 180 | GetExceptions(intrinsic)); |
| 181 | return true; |
Aart Bik | f0010dd | 2017-11-21 16:31:53 -0800 | [diff] [blame] | 182 | } |
| 183 | |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 184 | bool IntrinsicsRecognizer::Run() { |
| 185 | bool didRecognize = false; |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 186 | ScopedObjectAccess soa(Thread::Current()); |
Vladimir Marko | 2c45bc9 | 2016-10-25 16:54:12 +0100 | [diff] [blame] | 187 | for (HBasicBlock* block : graph_->GetReversePostOrder()) { |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 188 | for (HInstructionIterator inst_it(block->GetInstructions()); !inst_it.Done(); |
| 189 | inst_it.Advance()) { |
| 190 | HInstruction* inst = inst_it.Current(); |
| 191 | if (inst->IsInvoke()) { |
Aart Bik | f0010dd | 2017-11-21 16:31:53 -0800 | [diff] [blame] | 192 | bool wrong_invoke_type = false; |
Mingyao Yang | 6b1aebe | 2017-11-27 15:39:04 -0800 | [diff] [blame] | 193 | if (Recognize(inst->AsInvoke(), /* art_method */ nullptr, &wrong_invoke_type)) { |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 194 | didRecognize = true; |
Aart Bik | f0010dd | 2017-11-21 16:31:53 -0800 | [diff] [blame] | 195 | MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized); |
| 196 | } else if (wrong_invoke_type) { |
| 197 | LOG(WARNING) |
| 198 | << "Found an intrinsic with unexpected invoke type: " |
| 199 | << inst->AsInvoke()->GetResolvedMethod()->PrettyMethod() << " " |
| 200 | << inst->DebugName(); |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 201 | } |
| 202 | } |
| 203 | } |
| 204 | } |
Aart Bik | 2477320 | 2018-04-26 10:28:51 -0700 | [diff] [blame] | 205 | return didRecognize; |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 206 | } |
| 207 | |
| 208 | std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic) { |
| 209 | switch (intrinsic) { |
| 210 | case Intrinsics::kNone: |
David Brazdil | 109c89a | 2015-07-31 17:10:43 +0100 | [diff] [blame] | 211 | os << "None"; |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 212 | break; |
Nicolas Geoffray | 762869d | 2016-07-15 15:28:35 +0100 | [diff] [blame] | 213 | #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions, ...) \ |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 214 | case Intrinsics::k ## Name: \ |
| 215 | os << # Name; \ |
| 216 | break; |
| 217 | #include "intrinsics_list.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 218 | INTRINSICS_LIST(OPTIMIZING_INTRINSICS) |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 219 | #undef STATIC_INTRINSICS_LIST |
| 220 | #undef VIRTUAL_INTRINSICS_LIST |
| 221 | #undef OPTIMIZING_INTRINSICS |
| 222 | } |
| 223 | return os; |
| 224 | } |
| 225 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 226 | static const char kIntegerCacheDescriptor[] = "Ljava/lang/Integer$IntegerCache;"; |
| 227 | static const char kIntegerDescriptor[] = "Ljava/lang/Integer;"; |
| 228 | static const char kIntegerArrayDescriptor[] = "[Ljava/lang/Integer;"; |
| 229 | static const char kLowFieldName[] = "low"; |
| 230 | static const char kHighFieldName[] = "high"; |
| 231 | static const char kValueFieldName[] = "value"; |
| 232 | |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 233 | static ObjPtr<mirror::ObjectArray<mirror::Object>> GetBootImageLiveObjects() |
| 234 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 235 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 236 | const std::vector<gc::space::ImageSpace*>& boot_image_spaces = heap->GetBootImageSpaces(); |
| 237 | DCHECK(!boot_image_spaces.empty()); |
| 238 | const ImageHeader& main_header = boot_image_spaces[0]->GetImageHeader(); |
| 239 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = |
| 240 | ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast( |
| 241 | main_header.GetImageRoot<kWithoutReadBarrier>(ImageHeader::kBootImageLiveObjects)); |
| 242 | DCHECK(boot_image_live_objects != nullptr); |
| 243 | DCHECK(heap->ObjectIsInBootImageSpace(boot_image_live_objects)); |
| 244 | return boot_image_live_objects; |
| 245 | } |
| 246 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 247 | static ObjPtr<mirror::Class> LookupInitializedClass(Thread* self, |
| 248 | ClassLinker* class_linker, |
| 249 | const char* descriptor) |
| 250 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 251 | ObjPtr<mirror::Class> klass = |
| 252 | class_linker->LookupClass(self, descriptor, /* class_loader */ nullptr); |
| 253 | DCHECK(klass != nullptr); |
| 254 | DCHECK(klass->IsInitialized()); |
| 255 | return klass; |
| 256 | } |
| 257 | |
| 258 | static ObjPtr<mirror::ObjectArray<mirror::Object>> GetIntegerCacheArray( |
| 259 | ObjPtr<mirror::Class> cache_class) REQUIRES_SHARED(Locks::mutator_lock_) { |
| 260 | ArtField* cache_field = cache_class->FindDeclaredStaticField("cache", kIntegerArrayDescriptor); |
| 261 | DCHECK(cache_field != nullptr); |
| 262 | return ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(cache_field->GetObject(cache_class)); |
| 263 | } |
| 264 | |
| 265 | static int32_t GetIntegerCacheField(ObjPtr<mirror::Class> cache_class, const char* field_name) |
| 266 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 267 | ArtField* field = cache_class->FindDeclaredStaticField(field_name, "I"); |
| 268 | DCHECK(field != nullptr); |
| 269 | return field->GetInt(cache_class); |
| 270 | } |
| 271 | |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 272 | static bool CheckIntegerCache(Thread* self, |
| 273 | ClassLinker* class_linker, |
| 274 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects, |
| 275 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_cache) |
| 276 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 277 | DCHECK(boot_image_cache != nullptr); |
| 278 | |
| 279 | // Since we have a cache in the boot image, both java.lang.Integer and |
| 280 | // java.lang.Integer$IntegerCache must be initialized in the boot image. |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 281 | ObjPtr<mirror::Class> cache_class = |
| 282 | LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 283 | ObjPtr<mirror::Class> integer_class = |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 284 | LookupInitializedClass(self, class_linker, kIntegerDescriptor); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 285 | |
| 286 | // Check that the current cache is the same as the `boot_image_cache`. |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 287 | ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = GetIntegerCacheArray(cache_class); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 288 | if (current_cache != boot_image_cache) { |
| 289 | return false; // Messed up IntegerCache.cache. |
| 290 | } |
| 291 | |
| 292 | // Check that the range matches the boot image cache length. |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 293 | int32_t low = GetIntegerCacheField(cache_class, kLowFieldName); |
| 294 | int32_t high = GetIntegerCacheField(cache_class, kHighFieldName); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 295 | if (boot_image_cache->GetLength() != high - low + 1) { |
| 296 | return false; // Messed up IntegerCache.low or IntegerCache.high. |
| 297 | } |
| 298 | |
| 299 | // Check that the elements match the boot image intrinsic objects and check their values as well. |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 300 | ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 301 | DCHECK(value_field != nullptr); |
| 302 | for (int32_t i = 0, len = boot_image_cache->GetLength(); i != len; ++i) { |
| 303 | ObjPtr<mirror::Object> boot_image_object = |
| 304 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, i); |
| 305 | DCHECK(Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boot_image_object)); |
| 306 | // No need for read barrier for comparison with a boot image object. |
| 307 | ObjPtr<mirror::Object> current_object = |
| 308 | boot_image_cache->GetWithoutChecks<kVerifyNone, kWithoutReadBarrier>(i); |
| 309 | if (boot_image_object != current_object) { |
| 310 | return false; // Messed up IntegerCache.cache[i] |
| 311 | } |
| 312 | if (value_field->GetInt(boot_image_object) != low + i) { |
| 313 | return false; // Messed up IntegerCache.cache[i].value. |
| 314 | } |
| 315 | } |
| 316 | |
| 317 | return true; |
| 318 | } |
| 319 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 320 | void IntrinsicVisitor::ComputeIntegerValueOfLocations(HInvoke* invoke, |
| 321 | CodeGenerator* codegen, |
| 322 | Location return_location, |
| 323 | Location first_argument_location) { |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 324 | // The intrinsic will call if it needs to allocate a j.l.Integer. |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 325 | LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly; |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 326 | const CompilerOptions& compiler_options = codegen->GetCompilerOptions(); |
| 327 | if (compiler_options.IsBootImage()) { |
| 328 | // Piggyback on the method load kind to determine whether we can use PC-relative addressing. |
| 329 | // This should cover both the testing config (non-PIC boot image) and codegens that reject |
| 330 | // PC-relative load kinds and fall back to the runtime call. |
| 331 | if (!invoke->AsInvokeStaticOrDirect()->HasPcRelativeMethodLoadKind()) { |
| 332 | return; |
| 333 | } |
| 334 | if (!compiler_options.IsImageClass(kIntegerCacheDescriptor) || |
| 335 | !compiler_options.IsImageClass(kIntegerDescriptor)) { |
| 336 | return; |
| 337 | } |
| 338 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
| 339 | Thread* self = Thread::Current(); |
| 340 | ScopedObjectAccess soa(self); |
| 341 | ObjPtr<mirror::Class> cache_class = class_linker->LookupClass( |
| 342 | self, kIntegerCacheDescriptor, /* class_loader */ nullptr); |
| 343 | DCHECK(cache_class != nullptr); |
| 344 | if (UNLIKELY(!cache_class->IsInitialized())) { |
| 345 | LOG(WARNING) << "Image class " << cache_class->PrettyDescriptor() << " is uninitialized."; |
| 346 | return; |
| 347 | } |
| 348 | ObjPtr<mirror::Class> integer_class = |
| 349 | class_linker->LookupClass(self, kIntegerDescriptor, /* class_loader */ nullptr); |
| 350 | DCHECK(integer_class != nullptr); |
| 351 | if (UNLIKELY(!integer_class->IsInitialized())) { |
| 352 | LOG(WARNING) << "Image class " << integer_class->PrettyDescriptor() << " is uninitialized."; |
| 353 | return; |
| 354 | } |
| 355 | int32_t low = GetIntegerCacheField(cache_class, kLowFieldName); |
| 356 | int32_t high = GetIntegerCacheField(cache_class, kHighFieldName); |
| 357 | if (kIsDebugBuild) { |
| 358 | ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = GetIntegerCacheArray(cache_class); |
| 359 | CHECK(current_cache != nullptr); |
| 360 | CHECK_EQ(current_cache->GetLength(), high - low + 1); |
| 361 | ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); |
| 362 | CHECK(value_field != nullptr); |
| 363 | for (int32_t i = 0, len = current_cache->GetLength(); i != len; ++i) { |
| 364 | ObjPtr<mirror::Object> current_object = current_cache->GetWithoutChecks(i); |
| 365 | CHECK(current_object != nullptr); |
| 366 | CHECK_EQ(value_field->GetInt(current_object), low + i); |
| 367 | } |
| 368 | } |
| 369 | if (invoke->InputAt(0)->IsIntConstant()) { |
| 370 | int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); |
| 371 | if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) < |
| 372 | static_cast<uint32_t>(high - low + 1)) { |
| 373 | // No call, we shall use direct pointer to the Integer object. |
| 374 | call_kind = LocationSummary::kNoCall; |
| 375 | } |
| 376 | } |
| 377 | } else { |
| 378 | Runtime* runtime = Runtime::Current(); |
| 379 | if (runtime->GetHeap()->GetBootImageSpaces().empty()) { |
| 380 | return; // Running without boot image, cannot use required boot image objects. |
| 381 | } |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 382 | Thread* self = Thread::Current(); |
| 383 | ScopedObjectAccess soa(self); |
| 384 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects(); |
| 385 | ObjPtr<mirror::ObjectArray<mirror::Object>> cache = |
| 386 | IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects); |
| 387 | if (cache == nullptr) { |
| 388 | return; // No cache in the boot image. |
| 389 | } |
| 390 | if (runtime->UseJitCompilation()) { |
| 391 | if (!CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache)) { |
| 392 | return; // The cache was somehow messed up, probably by using reflection. |
| 393 | } |
| 394 | } else { |
| 395 | DCHECK(runtime->IsAotCompiler()); |
| 396 | DCHECK(CheckIntegerCache(self, runtime->GetClassLinker(), boot_image_live_objects, cache)); |
| 397 | if (invoke->InputAt(0)->IsIntConstant()) { |
| 398 | int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); |
| 399 | // Retrieve the `value` from the lowest cached Integer. |
| 400 | ObjPtr<mirror::Object> low_integer = |
| 401 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u); |
| 402 | ObjPtr<mirror::Class> integer_class = |
| 403 | low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>(); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 404 | ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 405 | DCHECK(value_field != nullptr); |
| 406 | int32_t low = value_field->GetInt(low_integer); |
| 407 | if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) < |
| 408 | static_cast<uint32_t>(cache->GetLength())) { |
| 409 | // No call, we shall use direct pointer to the Integer object. Note that we cannot |
| 410 | // do this for JIT as the "low" can change through reflection before emitting the code. |
| 411 | call_kind = LocationSummary::kNoCall; |
| 412 | } |
| 413 | } |
| 414 | } |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 415 | } |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 416 | |
| 417 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator(); |
| 418 | LocationSummary* locations = new (allocator) LocationSummary(invoke, call_kind, kIntrinsified); |
| 419 | if (call_kind == LocationSummary::kCallOnMainOnly) { |
| 420 | locations->SetInAt(0, Location::RegisterOrConstant(invoke->InputAt(0))); |
| 421 | locations->AddTemp(first_argument_location); |
| 422 | locations->SetOut(return_location); |
| 423 | } else { |
| 424 | locations->SetInAt(0, Location::ConstantLocation(invoke->InputAt(0)->AsConstant())); |
| 425 | locations->SetOut(Location::RequiresRegister()); |
| 426 | } |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 427 | } |
| 428 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 429 | static int32_t GetIntegerCacheLowFromIntegerCache(Thread* self, ClassLinker* class_linker) |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 430 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 431 | ObjPtr<mirror::Class> cache_class = |
| 432 | LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor); |
| 433 | return GetIntegerCacheField(cache_class, kLowFieldName); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 434 | } |
| 435 | |
| 436 | static uint32_t CalculateBootImageOffset(ObjPtr<mirror::Object> object) |
| 437 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 438 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 439 | DCHECK(heap->ObjectIsInBootImageSpace(object)); |
| 440 | return reinterpret_cast<const uint8_t*>(object.Ptr()) - heap->GetBootImageSpaces()[0]->Begin(); |
| 441 | } |
| 442 | |
| 443 | inline IntrinsicVisitor::IntegerValueOfInfo::IntegerValueOfInfo() |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 444 | : value_offset(0), |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 445 | low(0), |
| 446 | length(0u), |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 447 | integer_boot_image_offset(kInvalidReference), |
| 448 | value_boot_image_reference(kInvalidReference) {} |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 449 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 450 | IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo( |
| 451 | HInvoke* invoke, const CompilerOptions& compiler_options) { |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 452 | // Note that we could cache all of the data looked up here. but there's no good |
| 453 | // location for it. We don't want to add it to WellKnownClasses, to avoid creating global |
| 454 | // jni values. Adding it as state to the compiler singleton seems like wrong |
| 455 | // separation of concerns. |
| 456 | // The need for this data should be pretty rare though. |
| 457 | |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 458 | // Note that at this point we can no longer abort the code generation. Therefore, |
| 459 | // we need to provide data that shall not lead to a crash even if the fields were |
| 460 | // modified through reflection since ComputeIntegerValueOfLocations() when JITting. |
| 461 | |
| 462 | Runtime* runtime = Runtime::Current(); |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 463 | ClassLinker* class_linker = runtime->GetClassLinker(); |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 464 | Thread* self = Thread::Current(); |
| 465 | ScopedObjectAccess soa(self); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 466 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 467 | IntegerValueOfInfo info; |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 468 | if (compiler_options.IsBootImage()) { |
| 469 | ObjPtr<mirror::Class> integer_class = |
| 470 | LookupInitializedClass(self, class_linker, kIntegerDescriptor); |
| 471 | ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); |
| 472 | DCHECK(value_field != nullptr); |
| 473 | info.value_offset = value_field->GetOffset().Uint32Value(); |
| 474 | ObjPtr<mirror::Class> cache_class = |
| 475 | LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor); |
| 476 | info.low = GetIntegerCacheField(cache_class, kLowFieldName); |
| 477 | int32_t high = GetIntegerCacheField(cache_class, kHighFieldName); |
| 478 | info.length = dchecked_integral_cast<uint32_t>(high - info.low + 1); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 479 | |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 480 | info.integer_boot_image_offset = IntegerValueOfInfo::kInvalidReference; |
| 481 | if (invoke->InputAt(0)->IsIntConstant()) { |
| 482 | int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue(); |
| 483 | uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low); |
| 484 | if (index < static_cast<uint32_t>(info.length)) { |
| 485 | info.value_boot_image_reference = IntrinsicObjects::EncodePatch( |
| 486 | IntrinsicObjects::PatchType::kIntegerValueOfObject, index); |
| 487 | } else { |
| 488 | // Not in the cache. |
| 489 | info.value_boot_image_reference = IntegerValueOfInfo::kInvalidReference; |
| 490 | } |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 491 | } else { |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 492 | info.array_data_boot_image_reference = |
| 493 | IntrinsicObjects::EncodePatch(IntrinsicObjects::PatchType::kIntegerValueOfArray); |
Vladimir Marko | eebb821 | 2018-06-05 14:57:24 +0100 | [diff] [blame] | 494 | } |
| 495 | } else { |
Vladimir Marko | 6fd1606 | 2018-06-26 11:02:04 +0100 | [diff] [blame^] | 496 | ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects(); |
| 497 | ObjPtr<mirror::Object> low_integer = |
| 498 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u); |
| 499 | ObjPtr<mirror::Class> integer_class = low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>(); |
| 500 | ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); |
| 501 | DCHECK(value_field != nullptr); |
| 502 | info.value_offset = value_field->GetOffset().Uint32Value(); |
| 503 | if (runtime->UseJitCompilation()) { |
| 504 | // Use the current `IntegerCache.low` for JIT to avoid truly surprising behavior if the |
| 505 | // code messes up the `value` field in the lowest cached Integer using reflection. |
| 506 | info.low = GetIntegerCacheLowFromIntegerCache(self, class_linker); |
| 507 | } else { |
| 508 | // For app AOT, the `low_integer->value` should be the same as `IntegerCache.low`. |
| 509 | info.low = value_field->GetInt(low_integer); |
| 510 | DCHECK_EQ(info.low, GetIntegerCacheLowFromIntegerCache(self, class_linker)); |
| 511 | } |
| 512 | // Do not look at `IntegerCache.high`, use the immutable length of the cache array instead. |
| 513 | info.length = dchecked_integral_cast<uint32_t>( |
| 514 | IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects)->GetLength()); |
| 515 | |
| 516 | info.integer_boot_image_offset = CalculateBootImageOffset(integer_class); |
| 517 | if (invoke->InputAt(0)->IsIntConstant()) { |
| 518 | int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue(); |
| 519 | uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low); |
| 520 | if (index < static_cast<uint32_t>(info.length)) { |
| 521 | ObjPtr<mirror::Object> integer = |
| 522 | IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, index); |
| 523 | info.value_boot_image_reference = CalculateBootImageOffset(integer); |
| 524 | } else { |
| 525 | // Not in the cache. |
| 526 | info.value_boot_image_reference = IntegerValueOfInfo::kInvalidReference; |
| 527 | } |
| 528 | } else { |
| 529 | info.array_data_boot_image_reference = |
| 530 | CalculateBootImageOffset(boot_image_live_objects) + |
| 531 | IntrinsicObjects::GetIntegerValueOfArrayDataOffset(boot_image_live_objects).Uint32Value(); |
| 532 | } |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 533 | } |
| 534 | |
Nicolas Geoffray | 331605a | 2017-03-01 11:01:41 +0000 | [diff] [blame] | 535 | return info; |
| 536 | } |
| 537 | |
Andreas Gampe | 71fb52f | 2014-12-29 17:43:08 -0800 | [diff] [blame] | 538 | } // namespace art |