Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 17 | #include <ctime> |
| 18 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 19 | #include "object.h" |
| 20 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 21 | #include "art_field.h" |
| 22 | #include "art_field-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 23 | #include "array-inl.h" |
| 24 | #include "class.h" |
| 25 | #include "class-inl.h" |
Ian Rogers | 04d7aa9 | 2013-03-16 14:29:17 -0700 | [diff] [blame] | 26 | #include "class_linker-inl.h" |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 27 | #include "gc/accounting/card_table-inl.h" |
| 28 | #include "gc/heap.h" |
Ian Rogers | 04d7aa9 | 2013-03-16 14:29:17 -0700 | [diff] [blame] | 29 | #include "iftable-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 30 | #include "monitor.h" |
| 31 | #include "object-inl.h" |
Ian Rogers | 04d7aa9 | 2013-03-16 14:29:17 -0700 | [diff] [blame] | 32 | #include "object_array-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 33 | #include "runtime.h" |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 34 | #include "handle_scope-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 35 | #include "throwable.h" |
| 36 | #include "well_known_classes.h" |
| 37 | |
| 38 | namespace art { |
| 39 | namespace mirror { |
| 40 | |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 41 | class CopyReferenceFieldsWithReadBarrierVisitor { |
| 42 | public: |
| 43 | explicit CopyReferenceFieldsWithReadBarrierVisitor(Object* dest_obj) |
| 44 | : dest_obj_(dest_obj) {} |
| 45 | |
| 46 | void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const |
| 47 | ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 48 | // GetFieldObject() contains a RB. |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 49 | Object* ref = obj->GetFieldObject<Object>(offset); |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 50 | // No WB here as a large object space does not have a card table |
| 51 | // coverage. Instead, cards will be marked separately. |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 52 | dest_obj_->SetFieldObjectWithoutWriteBarrier<false, false>(offset, ref); |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 53 | } |
| 54 | |
| 55 | void operator()(mirror::Class* klass, mirror::Reference* ref) const |
| 56 | ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 57 | // Copy java.lang.ref.Reference.referent which isn't visited in |
| 58 | // Object::VisitReferences(). |
Fred Shih | 4ee7a66 | 2014-07-11 09:59:27 -0700 | [diff] [blame] | 59 | DCHECK(klass->IsTypeOfReferenceClass()); |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 60 | this->operator()(ref, mirror::Reference::ReferentOffset(), false); |
| 61 | } |
| 62 | |
| 63 | private: |
| 64 | Object* const dest_obj_; |
| 65 | }; |
| 66 | |
Hiroshi Yamauchi | 0fbd6e6 | 2014-07-17 16:16:31 -0700 | [diff] [blame] | 67 | Object* Object::CopyObject(Thread* self, mirror::Object* dest, mirror::Object* src, |
| 68 | size_t num_bytes) { |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 69 | // Copy instance data. We assume memcpy copies by words. |
| 70 | // TODO: expose and use move32. |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 71 | uint8_t* src_bytes = reinterpret_cast<uint8_t*>(src); |
| 72 | uint8_t* dst_bytes = reinterpret_cast<uint8_t*>(dest); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 73 | size_t offset = sizeof(Object); |
| 74 | memcpy(dst_bytes + offset, src_bytes + offset, num_bytes - offset); |
Hiroshi Yamauchi | 7971928 | 2014-04-10 12:46:22 -0700 | [diff] [blame] | 75 | if (kUseBakerOrBrooksReadBarrier) { |
| 76 | // We need a RB here. After the memcpy that covers the whole |
| 77 | // object above, copy references fields one by one again with a |
| 78 | // RB. TODO: Optimize this later? |
| 79 | CopyReferenceFieldsWithReadBarrierVisitor visitor(dest); |
| 80 | src->VisitReferences<true>(visitor, visitor); |
| 81 | } |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 82 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 83 | // Perform write barriers on copied object references. |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 84 | Class* c = src->GetClass(); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 85 | if (c->IsArrayClass()) { |
| 86 | if (!c->GetComponentType()->IsPrimitive()) { |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 87 | ObjectArray<Object>* array = dest->AsObjectArray<Object>(); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 88 | heap->WriteBarrierArray(dest, 0, array->GetLength()); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 89 | } |
| 90 | } else { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 91 | heap->WriteBarrierEveryFieldOf(dest); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 92 | } |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 93 | if (c->IsFinalizable()) { |
Mathieu Chartier | 8668c3c | 2014-04-24 16:48:11 -0700 | [diff] [blame] | 94 | heap->AddFinalizerReference(self, &dest); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 95 | } |
| 96 | return dest; |
| 97 | } |
| 98 | |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 99 | // An allocation pre-fence visitor that copies the object. |
| 100 | class CopyObjectVisitor { |
| 101 | public: |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 102 | explicit CopyObjectVisitor(Thread* self, Handle<Object>* orig, size_t num_bytes) |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 103 | : self_(self), orig_(orig), num_bytes_(num_bytes) { |
| 104 | } |
| 105 | |
| 106 | void operator()(Object* obj, size_t usable_size) const |
| 107 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 108 | UNUSED(usable_size); |
Hiroshi Yamauchi | 0fbd6e6 | 2014-07-17 16:16:31 -0700 | [diff] [blame] | 109 | Object::CopyObject(self_, obj, orig_->Get(), num_bytes_); |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 110 | } |
| 111 | |
| 112 | private: |
| 113 | Thread* const self_; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 114 | Handle<Object>* const orig_; |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 115 | const size_t num_bytes_; |
| 116 | DISALLOW_COPY_AND_ASSIGN(CopyObjectVisitor); |
| 117 | }; |
| 118 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 119 | Object* Object::Clone(Thread* self) { |
| 120 | CHECK(!IsClass()) << "Can't clone classes."; |
| 121 | // Object::SizeOf gets the right size even if we're an array. Using c->AllocObject() here would |
| 122 | // be wrong. |
| 123 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 124 | size_t num_bytes = SizeOf(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 125 | StackHandleScope<1> hs(self); |
| 126 | Handle<Object> this_object(hs.NewHandle(this)); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 127 | Object* copy; |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 128 | CopyObjectVisitor visitor(self, &this_object, num_bytes); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 129 | if (heap->IsMovableObject(this)) { |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 130 | copy = heap->AllocObject<true>(self, GetClass(), num_bytes, visitor); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 131 | } else { |
Hiroshi Yamauchi | 4cd662e | 2014-04-03 16:28:10 -0700 | [diff] [blame] | 132 | copy = heap->AllocNonMovableObject<true>(self, GetClass(), num_bytes, visitor); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 133 | } |
Mathieu Chartier | 0732d59 | 2013-11-06 11:02:50 -0800 | [diff] [blame] | 134 | return copy; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 135 | } |
| 136 | |
Ian Rogers | bbcd30b | 2014-10-30 15:25:36 -0700 | [diff] [blame] | 137 | uint32_t Object::GenerateIdentityHashCode() { |
| 138 | static Atomic<uint32_t> seed(987654321U + std::time(nullptr)); |
| 139 | uint32_t expected_value, new_value; |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 140 | do { |
Ian Rogers | bbcd30b | 2014-10-30 15:25:36 -0700 | [diff] [blame] | 141 | expected_value = seed.LoadRelaxed(); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 142 | new_value = expected_value * 1103515245 + 12345; |
Mathieu Chartier | 4e6a31e | 2013-10-31 10:35:05 -0700 | [diff] [blame] | 143 | } while ((expected_value & LockWord::kHashMask) == 0 || |
Ian Rogers | 3e5cf30 | 2014-05-20 16:40:37 -0700 | [diff] [blame] | 144 | !seed.CompareExchangeWeakRelaxed(expected_value, new_value)); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 145 | return expected_value & LockWord::kHashMask; |
| 146 | } |
| 147 | |
| 148 | int32_t Object::IdentityHashCode() const { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 149 | mirror::Object* current_this = const_cast<mirror::Object*>(this); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 150 | while (true) { |
Mathieu Chartier | 4d7f61d | 2014-04-17 14:43:39 -0700 | [diff] [blame] | 151 | LockWord lw = current_this->GetLockWord(false); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 152 | switch (lw.GetState()) { |
| 153 | case LockWord::kUnlocked: { |
| 154 | // Try to compare and swap in a new hash, if we succeed we will return the hash on the next |
| 155 | // loop iteration. |
| 156 | LockWord hash_word(LockWord::FromHashCode(GenerateIdentityHashCode())); |
| 157 | DCHECK_EQ(hash_word.GetState(), LockWord::kHashCode); |
Hans Boehm | d843443 | 2014-07-11 09:56:07 -0700 | [diff] [blame] | 158 | if (const_cast<Object*>(this)->CasLockWordWeakRelaxed(lw, hash_word)) { |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 159 | return hash_word.GetHashCode(); |
| 160 | } |
| 161 | break; |
| 162 | } |
| 163 | case LockWord::kThinLocked: { |
Ian Rogers | 43c69cc | 2014-08-15 11:09:28 -0700 | [diff] [blame] | 164 | // Inflate the thin lock to a monitor and stick the hash code inside of the monitor. May |
| 165 | // fail spuriously. |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 166 | Thread* self = Thread::Current(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 167 | StackHandleScope<1> hs(self); |
| 168 | Handle<mirror::Object> h_this(hs.NewHandle(current_this)); |
| 169 | Monitor::InflateThinLocked(self, h_this, lw, GenerateIdentityHashCode()); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 170 | // A GC may have occurred when we switched to kBlocked. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 171 | current_this = h_this.Get(); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 172 | break; |
| 173 | } |
| 174 | case LockWord::kFatLocked: { |
| 175 | // Already inflated, return the has stored in the monitor. |
| 176 | Monitor* monitor = lw.FatLockMonitor(); |
| 177 | DCHECK(monitor != nullptr); |
| 178 | return monitor->GetHashCode(); |
| 179 | } |
| 180 | case LockWord::kHashCode: { |
| 181 | return lw.GetHashCode(); |
| 182 | } |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 183 | default: { |
| 184 | LOG(FATAL) << "Invalid state during hashcode " << lw.GetState(); |
| 185 | break; |
| 186 | } |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 187 | } |
| 188 | } |
Ian Rogers | 0714083 | 2014-09-30 15:43:59 -0700 | [diff] [blame] | 189 | UNREACHABLE(); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 190 | } |
| 191 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 192 | void Object::CheckFieldAssignmentImpl(MemberOffset field_offset, Object* new_value) { |
| 193 | Class* c = GetClass(); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 194 | Runtime* runtime = Runtime::Current(); |
| 195 | if (runtime->GetClassLinker() == nullptr || !runtime->IsStarted() || |
| 196 | !runtime->GetHeap()->IsObjectValidationEnabled() || !c->IsResolved()) { |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 197 | return; |
| 198 | } |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 199 | for (Class* cur = c; cur != NULL; cur = cur->GetSuperClass()) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 200 | ObjectArray<ArtField>* fields = cur->GetIFields(); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 201 | if (fields != NULL) { |
Vladimir Marko | 76649e8 | 2014-11-10 18:32:59 +0000 | [diff] [blame] | 202 | size_t num_ifields = fields->GetLength(); |
| 203 | for (size_t i = 0; i < num_ifields; ++i) { |
Ian Rogers | 08f1f50 | 2014-12-02 15:04:37 -0800 | [diff] [blame^] | 204 | StackHandleScope<1> hs(Thread::Current()); |
| 205 | Handle<Object> h_object(hs.NewHandle(new_value)); |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 206 | ArtField* field = fields->Get(i); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 207 | if (field->GetOffset().Int32Value() == field_offset.Int32Value()) { |
Vladimir Marko | 76649e8 | 2014-11-10 18:32:59 +0000 | [diff] [blame] | 208 | CHECK_NE(field->GetTypeAsPrimitiveType(), Primitive::kPrimNot); |
Ian Rogers | 08f1f50 | 2014-12-02 15:04:37 -0800 | [diff] [blame^] | 209 | // TODO: resolve the field type for moving GC. |
| 210 | mirror::Class* field_type = field->GetType(!kMovingCollector); |
| 211 | if (field_type != nullptr) { |
| 212 | CHECK(field_type->IsAssignableFrom(new_value->GetClass())); |
| 213 | } |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 214 | return; |
| 215 | } |
| 216 | } |
| 217 | } |
| 218 | } |
| 219 | if (c->IsArrayClass()) { |
| 220 | // Bounds and assign-ability done in the array setter. |
| 221 | return; |
| 222 | } |
| 223 | if (IsClass()) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 224 | ObjectArray<ArtField>* fields = AsClass()->GetSFields(); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 225 | if (fields != NULL) { |
Vladimir Marko | 76649e8 | 2014-11-10 18:32:59 +0000 | [diff] [blame] | 226 | size_t num_sfields = fields->GetLength(); |
| 227 | for (size_t i = 0; i < num_sfields; ++i) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 228 | ArtField* field = fields->Get(i); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 229 | if (field->GetOffset().Int32Value() == field_offset.Int32Value()) { |
Vladimir Marko | 76649e8 | 2014-11-10 18:32:59 +0000 | [diff] [blame] | 230 | CHECK_NE(field->GetTypeAsPrimitiveType(), Primitive::kPrimNot); |
Ian Rogers | 08f1f50 | 2014-12-02 15:04:37 -0800 | [diff] [blame^] | 231 | // TODO: resolve the field type for moving GC. |
| 232 | mirror::Class* field_type = field->GetType(!kMovingCollector); |
| 233 | if (field_type != nullptr) { |
| 234 | CHECK(field_type->IsAssignableFrom(new_value->GetClass())); |
| 235 | } |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 236 | return; |
| 237 | } |
| 238 | } |
| 239 | } |
| 240 | } |
| 241 | LOG(FATAL) << "Failed to find field for assignment to " << reinterpret_cast<void*>(this) |
| 242 | << " of type " << PrettyDescriptor(c) << " at offset " << field_offset; |
Ian Rogers | 08f1f50 | 2014-12-02 15:04:37 -0800 | [diff] [blame^] | 243 | UNREACHABLE(); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 244 | } |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 245 | |
| 246 | } // namespace mirror |
| 247 | } // namespace art |