blob: 6fbfbdd539d648c4b04f92816bbe28436988d594 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Brian Carlstrom7e93b502011-08-04 14:16:22 -070016
17#include "intern_table.h"
18
Ian Rogers700a4022014-05-19 16:49:03 -070019#include <memory>
20
David Sehr0225f8e2018-01-31 08:52:24 +000021#include "dex/utf.h"
Mathieu Chartier97509952015-07-13 14:35:43 -070022#include "gc/collector/garbage_collector.h"
Ian Rogers7dfb28c2013-08-22 08:18:36 -070023#include "gc/space/image_space.h"
Mathieu Chartier14c3bf92015-07-13 14:35:43 -070024#include "gc/weak_root_state.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070025#include "gc_root-inl.h"
Mathieu Chartier4a26f172016-01-26 14:26:18 -080026#include "image-inl.h"
Vladimir Marko05792b92015-08-03 11:56:49 +010027#include "mirror/dex_cache-inl.h"
Ian Rogers7dfb28c2013-08-22 08:18:36 -070028#include "mirror/object-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070029#include "mirror/object_array-inl.h"
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070030#include "mirror/string-inl.h"
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070031#include "object_callbacks.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070032#include "scoped_thread_state_change-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080033#include "thread.h"
Brian Carlstrom7e93b502011-08-04 14:16:22 -070034
35namespace art {
36
Ian Rogers7dfb28c2013-08-22 08:18:36 -070037InternTable::InternTable()
Vladimir Marko1a1de672016-10-13 12:53:15 +010038 : log_new_roots_(false),
Mathieu Chartier14c3bf92015-07-13 14:35:43 -070039 weak_intern_condition_("New intern condition", *Locks::intern_table_lock_),
40 weak_root_state_(gc::kWeakRootStateNormal) {
Mathieu Chartierc11d9b82013-09-19 10:01:59 -070041}
Elliott Hughesde69d7f2011-08-18 16:49:37 -070042
Brian Carlstroma663ea52011-08-19 23:33:41 -070043size_t InternTable::Size() const {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010044 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070045 return strong_interns_.Size() + weak_interns_.Size();
Brian Carlstroma663ea52011-08-19 23:33:41 -070046}
47
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070048size_t InternTable::StrongSize() const {
49 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070050 return strong_interns_.Size();
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070051}
52
53size_t InternTable::WeakSize() const {
54 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070055 return weak_interns_.Size();
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070056}
57
Elliott Hughescac6cc72011-11-03 20:31:21 -070058void InternTable::DumpForSigQuit(std::ostream& os) const {
Mathieu Chartiereb175f72014-10-31 11:49:27 -070059 os << "Intern table: " << StrongSize() << " strong; " << WeakSize() << " weak\n";
Elliott Hughescac6cc72011-11-03 20:31:21 -070060}
61
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070062void InternTable::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010063 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartier893263b2014-03-04 11:07:42 -080064 if ((flags & kVisitRootFlagAllRoots) != 0) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070065 strong_interns_.VisitRoots(visitor);
Mathieu Chartier893263b2014-03-04 11:07:42 -080066 } else if ((flags & kVisitRootFlagNewRoots) != 0) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070067 for (auto& root : new_strong_intern_roots_) {
Mathieu Chartier9e868092016-10-31 14:58:04 -070068 ObjPtr<mirror::String> old_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070069 root.VisitRoot(visitor, RootInfo(kRootInternedString));
Mathieu Chartier9e868092016-10-31 14:58:04 -070070 ObjPtr<mirror::String> new_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierc2e20622014-11-03 11:41:47 -080071 if (new_ref != old_ref) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070072 // The GC moved a root in the log. Need to search the strong interns and update the
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070073 // corresponding object. This is slow, but luckily for us, this may only happen with a
74 // concurrent moving GC.
Mathieu Chartiereb175f72014-10-31 11:49:27 -070075 strong_interns_.Remove(old_ref);
76 strong_interns_.Insert(new_ref);
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070077 }
78 }
Mathieu Chartier893263b2014-03-04 11:07:42 -080079 }
Mathieu Chartier893263b2014-03-04 11:07:42 -080080 if ((flags & kVisitRootFlagClearRootLog) != 0) {
81 new_strong_intern_roots_.clear();
82 }
83 if ((flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
84 log_new_roots_ = true;
85 } else if ((flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
86 log_new_roots_ = false;
Ian Rogers1d54e732013-05-02 21:10:01 -070087 }
Mathieu Chartier423d2a32013-09-12 17:33:56 -070088 // Note: we deliberately don't visit the weak_interns_ table and the immutable image roots.
Brian Carlstrom7e93b502011-08-04 14:16:22 -070089}
90
Mathieu Chartier9e868092016-10-31 14:58:04 -070091ObjPtr<mirror::String> InternTable::LookupWeak(Thread* self, ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -080092 MutexLock mu(self, *Locks::intern_table_lock_);
93 return LookupWeakLocked(s);
Mathieu Chartierf7fd9702015-11-09 11:16:49 -080094}
95
Mathieu Chartier9e868092016-10-31 14:58:04 -070096ObjPtr<mirror::String> InternTable::LookupStrong(Thread* self, ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -080097 MutexLock mu(self, *Locks::intern_table_lock_);
98 return LookupStrongLocked(s);
99}
100
Mathieu Chartier9e868092016-10-31 14:58:04 -0700101ObjPtr<mirror::String> InternTable::LookupStrong(Thread* self,
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000102 uint32_t utf16_length,
103 const char* utf8_data) {
104 DCHECK_EQ(utf16_length, CountModifiedUtf8Chars(utf8_data));
105 Utf8String string(utf16_length,
106 utf8_data,
107 ComputeUtf16HashFromModifiedUtf8(utf8_data, utf16_length));
108 MutexLock mu(self, *Locks::intern_table_lock_);
109 return strong_interns_.Find(string);
110}
111
Mathieu Chartier9e868092016-10-31 14:58:04 -0700112ObjPtr<mirror::String> InternTable::LookupWeakLocked(ObjPtr<mirror::String> s) {
Nicolas Geoffray1bc977c2016-01-23 14:15:49 +0000113 return weak_interns_.Find(s);
114}
115
Mathieu Chartier9e868092016-10-31 14:58:04 -0700116ObjPtr<mirror::String> InternTable::LookupStrongLocked(ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800117 return strong_interns_.Find(s);
118}
119
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800120void InternTable::AddNewTable() {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700121 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800122 weak_interns_.AddNewTable();
123 strong_interns_.AddNewTable();
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700124}
125
Mathieu Chartier9e868092016-10-31 14:58:04 -0700126ObjPtr<mirror::String> InternTable::InsertStrong(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100127 Runtime* runtime = Runtime::Current();
128 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700129 runtime->RecordStrongStringInsertion(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100130 }
Mathieu Chartier893263b2014-03-04 11:07:42 -0800131 if (log_new_roots_) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700132 new_strong_intern_roots_.push_back(GcRoot<mirror::String>(s));
Mathieu Chartier893263b2014-03-04 11:07:42 -0800133 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700134 strong_interns_.Insert(s);
Mathieu Chartier893263b2014-03-04 11:07:42 -0800135 return s;
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100136}
137
Mathieu Chartier9e868092016-10-31 14:58:04 -0700138ObjPtr<mirror::String> InternTable::InsertWeak(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100139 Runtime* runtime = Runtime::Current();
140 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700141 runtime->RecordWeakStringInsertion(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100142 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700143 weak_interns_.Insert(s);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700144 return s;
145}
146
Mathieu Chartier9e868092016-10-31 14:58:04 -0700147void InternTable::RemoveStrong(ObjPtr<mirror::String> s) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700148 strong_interns_.Remove(s);
Hiroshi Yamauchi1bd48722014-05-23 19:58:15 -0700149}
150
Mathieu Chartier9e868092016-10-31 14:58:04 -0700151void InternTable::RemoveWeak(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100152 Runtime* runtime = Runtime::Current();
153 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700154 runtime->RecordWeakStringRemoval(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100155 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700156 weak_interns_.Remove(s);
Brian Carlstrom7e93b502011-08-04 14:16:22 -0700157}
158
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100159// Insert/remove methods used to undo changes made during an aborted transaction.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700160ObjPtr<mirror::String> InternTable::InsertStrongFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100161 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700162 return InsertStrong(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100163}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700164
165ObjPtr<mirror::String> InternTable::InsertWeakFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100166 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700167 return InsertWeak(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100168}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700169
170void InternTable::RemoveStrongFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100171 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700172 RemoveStrong(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100173}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700174
175void InternTable::RemoveWeakFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100176 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700177 RemoveWeak(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100178}
179
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700180void InternTable::BroadcastForNewInterns() {
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700181 Thread* self = Thread::Current();
182 MutexLock mu(self, *Locks::intern_table_lock_);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700183 weak_intern_condition_.Broadcast(self);
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700184}
185
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700186void InternTable::WaitUntilAccessible(Thread* self) {
187 Locks::intern_table_lock_->ExclusiveUnlock(self);
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700188 {
189 ScopedThreadSuspension sts(self, kWaitingWeakGcRootRead);
190 MutexLock mu(self, *Locks::intern_table_lock_);
Hiroshi Yamauchi9e6f0972016-11-03 13:03:20 -0700191 while ((!kUseReadBarrier && weak_root_state_ == gc::kWeakRootStateNoReadsOrWrites) ||
192 (kUseReadBarrier && !self->GetWeakRefAccessEnabled())) {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700193 weak_intern_condition_.Wait(self);
194 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700195 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700196 Locks::intern_table_lock_->ExclusiveLock(self);
197}
198
Mathieu Chartier9e868092016-10-31 14:58:04 -0700199ObjPtr<mirror::String> InternTable::Insert(ObjPtr<mirror::String> s,
200 bool is_strong,
201 bool holding_locks) {
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800202 if (s == nullptr) {
203 return nullptr;
204 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700205 Thread* const self = Thread::Current();
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100206 MutexLock mu(self, *Locks::intern_table_lock_);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700207 if (kDebugLocking && !holding_locks) {
208 Locks::mutator_lock_->AssertSharedHeld(self);
209 CHECK_EQ(2u, self->NumberOfHeldMutexes()) << "may only safely hold the mutator lock";
Mathieu Chartierc11d9b82013-09-19 10:01:59 -0700210 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700211 while (true) {
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700212 if (holding_locks) {
213 if (!kUseReadBarrier) {
214 CHECK_EQ(weak_root_state_, gc::kWeakRootStateNormal);
215 } else {
216 CHECK(self->GetWeakRefAccessEnabled());
217 }
218 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700219 // Check the strong table for a match.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700220 ObjPtr<mirror::String> strong = LookupStrongLocked(s);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700221 if (strong != nullptr) {
222 return strong;
223 }
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700224 if ((!kUseReadBarrier && weak_root_state_ != gc::kWeakRootStateNoReadsOrWrites) ||
225 (kUseReadBarrier && self->GetWeakRefAccessEnabled())) {
226 break;
227 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700228 // weak_root_state_ is set to gc::kWeakRootStateNoReadsOrWrites in the GC pause but is only
229 // cleared after SweepSystemWeaks has completed. This is why we need to wait until it is
230 // cleared.
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700231 CHECK(!holding_locks);
232 StackHandleScope<1> hs(self);
233 auto h = hs.NewHandleWrapper(&s);
234 WaitUntilAccessible(self);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700235 }
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700236 if (!kUseReadBarrier) {
237 CHECK_EQ(weak_root_state_, gc::kWeakRootStateNormal);
238 } else {
239 CHECK(self->GetWeakRefAccessEnabled());
240 }
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800241 // There is no match in the strong table, check the weak table.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700242 ObjPtr<mirror::String> weak = LookupWeakLocked(s);
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800243 if (weak != nullptr) {
244 if (is_strong) {
245 // A match was found in the weak table. Promote to the strong table.
246 RemoveWeak(weak);
247 return InsertStrong(weak);
248 }
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700249 return weak;
250 }
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800251 // No match in the strong table or the weak table. Insert into the strong / weak table.
252 return is_strong ? InsertStrong(s) : InsertWeak(s);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700253}
254
Mathieu Chartier9e868092016-10-31 14:58:04 -0700255ObjPtr<mirror::String> InternTable::InternStrong(int32_t utf16_length, const char* utf8_data) {
Mathieu Chartiered0fc1d2014-03-21 14:09:35 -0700256 DCHECK(utf8_data != nullptr);
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100257 Thread* self = Thread::Current();
258 // Try to avoid allocation.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700259 ObjPtr<mirror::String> s = LookupStrong(self, utf16_length, utf8_data);
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100260 if (s != nullptr) {
261 return s;
262 }
Ian Rogers7dfb28c2013-08-22 08:18:36 -0700263 return InternStrong(mirror::String::AllocFromModifiedUtf8(
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100264 self, utf16_length, utf8_data));
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700265}
266
Mathieu Chartier9e868092016-10-31 14:58:04 -0700267ObjPtr<mirror::String> InternTable::InternStrong(const char* utf8_data) {
Mathieu Chartiered0fc1d2014-03-21 14:09:35 -0700268 DCHECK(utf8_data != nullptr);
269 return InternStrong(mirror::String::AllocFromModifiedUtf8(Thread::Current(), utf8_data));
Brian Carlstromc74255f2011-09-11 22:47:39 -0700270}
271
Mathieu Chartier9e868092016-10-31 14:58:04 -0700272ObjPtr<mirror::String> InternTable::InternStrongImageString(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700273 // May be holding the heap bitmap lock.
274 return Insert(s, true, true);
275}
276
Mathieu Chartier9e868092016-10-31 14:58:04 -0700277ObjPtr<mirror::String> InternTable::InternStrong(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700278 return Insert(s, true, false);
Brian Carlstromc74255f2011-09-11 22:47:39 -0700279}
280
Mathieu Chartier9e868092016-10-31 14:58:04 -0700281ObjPtr<mirror::String> InternTable::InternWeak(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700282 return Insert(s, false, false);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700283}
284
Mathieu Chartier9e868092016-10-31 14:58:04 -0700285bool InternTable::ContainsWeak(ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800286 return LookupWeak(Thread::Current(), s) == s;
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700287}
288
Mathieu Chartier97509952015-07-13 14:35:43 -0700289void InternTable::SweepInternTableWeaks(IsMarkedVisitor* visitor) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100290 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartier97509952015-07-13 14:35:43 -0700291 weak_interns_.SweepWeaks(visitor);
Brian Carlstroma663ea52011-08-19 23:33:41 -0700292}
293
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700294size_t InternTable::WriteToMemory(uint8_t* ptr) {
295 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800296 return strong_interns_.WriteToMemory(ptr);
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700297}
298
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800299std::size_t InternTable::StringHashEquals::operator()(const GcRoot<mirror::String>& root) const {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700300 if (kIsDebugBuild) {
301 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
302 }
Alexey Grebenkin21f23642016-12-02 17:44:54 +0300303 // An additional cast to prevent undesired sign extension.
304 return static_cast<size_t>(
305 static_cast<uint32_t>(root.Read<kWithoutReadBarrier>()->GetHashCode()));
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700306}
307
308bool InternTable::StringHashEquals::operator()(const GcRoot<mirror::String>& a,
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800309 const GcRoot<mirror::String>& b) const {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700310 if (kIsDebugBuild) {
311 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
312 }
Mathieu Chartier9e868092016-10-31 14:58:04 -0700313 return a.Read<kWithoutReadBarrier>()->Equals(b.Read<kWithoutReadBarrier>());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700314}
315
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000316bool InternTable::StringHashEquals::operator()(const GcRoot<mirror::String>& a,
317 const Utf8String& b) const {
318 if (kIsDebugBuild) {
319 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
320 }
Mathieu Chartier9e868092016-10-31 14:58:04 -0700321 ObjPtr<mirror::String> a_string = a.Read<kWithoutReadBarrier>();
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000322 uint32_t a_length = static_cast<uint32_t>(a_string->GetLength());
323 if (a_length != b.GetUtf16Length()) {
324 return false;
325 }
jessicahandojo3aaa37b2016-07-29 14:46:37 -0700326 if (a_string->IsCompressed()) {
327 size_t b_byte_count = strlen(b.GetUtf8Data());
328 size_t b_utf8_length = CountModifiedUtf8Chars(b.GetUtf8Data(), b_byte_count);
329 // Modified UTF-8 single byte character range is 0x01 .. 0x7f
330 // The string compression occurs on regular ASCII with same exact range,
331 // not on extended ASCII which up to 0xff
332 const bool is_b_regular_ascii = (b_byte_count == b_utf8_length);
333 if (is_b_regular_ascii) {
334 return memcmp(b.GetUtf8Data(),
335 a_string->GetValueCompressed(), a_length * sizeof(uint8_t)) == 0;
336 } else {
337 return false;
338 }
339 } else {
340 const uint16_t* a_value = a_string->GetValue();
341 return CompareModifiedUtf8ToUtf16AsCodePointValues(b.GetUtf8Data(), a_value, a_length) == 0;
342 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000343}
344
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800345size_t InternTable::Table::WriteToMemory(uint8_t* ptr) {
346 if (tables_.empty()) {
347 return 0;
348 }
349 UnorderedSet* table_to_write;
350 UnorderedSet combined;
351 if (tables_.size() > 1) {
352 table_to_write = &combined;
353 for (UnorderedSet& table : tables_) {
354 for (GcRoot<mirror::String>& string : table) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100355 combined.insert(string);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800356 }
357 }
358 } else {
359 table_to_write = &tables_.back();
360 }
361 return table_to_write->WriteToMemory(ptr);
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700362}
363
Mathieu Chartier9e868092016-10-31 14:58:04 -0700364void InternTable::Table::Remove(ObjPtr<mirror::String> s) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800365 for (UnorderedSet& table : tables_) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100366 auto it = table.find(GcRoot<mirror::String>(s));
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800367 if (it != table.end()) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100368 table.erase(it);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800369 return;
370 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700371 }
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800372 LOG(FATAL) << "Attempting to remove non-interned string " << s->ToModifiedUtf8();
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700373}
374
Mathieu Chartier9e868092016-10-31 14:58:04 -0700375ObjPtr<mirror::String> InternTable::Table::Find(ObjPtr<mirror::String> s) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700376 Locks::intern_table_lock_->AssertHeld(Thread::Current());
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800377 for (UnorderedSet& table : tables_) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100378 auto it = table.find(GcRoot<mirror::String>(s));
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800379 if (it != table.end()) {
380 return it->Read();
381 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700382 }
383 return nullptr;
384}
385
Mathieu Chartier9e868092016-10-31 14:58:04 -0700386ObjPtr<mirror::String> InternTable::Table::Find(const Utf8String& string) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000387 Locks::intern_table_lock_->AssertHeld(Thread::Current());
388 for (UnorderedSet& table : tables_) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100389 auto it = table.find(string);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000390 if (it != table.end()) {
391 return it->Read();
392 }
393 }
394 return nullptr;
395}
396
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800397void InternTable::Table::AddNewTable() {
398 tables_.push_back(UnorderedSet());
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700399}
400
Mathieu Chartier9e868092016-10-31 14:58:04 -0700401void InternTable::Table::Insert(ObjPtr<mirror::String> s) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800402 // Always insert the last table, the image tables are before and we avoid inserting into these
403 // to prevent dirty pages.
404 DCHECK(!tables_.empty());
Vladimir Marko54159c62018-06-20 14:30:08 +0100405 tables_.back().insert(GcRoot<mirror::String>(s));
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700406}
407
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700408void InternTable::Table::VisitRoots(RootVisitor* visitor) {
Mathieu Chartier4809d0a2015-04-07 10:39:04 -0700409 BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(
410 visitor, RootInfo(kRootInternedString));
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800411 for (UnorderedSet& table : tables_) {
412 for (auto& intern : table) {
413 buffered_visitor.VisitRoot(intern);
414 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700415 }
416}
417
Mathieu Chartier97509952015-07-13 14:35:43 -0700418void InternTable::Table::SweepWeaks(IsMarkedVisitor* visitor) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800419 for (UnorderedSet& table : tables_) {
420 SweepWeaks(&table, visitor);
421 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700422}
423
Mathieu Chartier97509952015-07-13 14:35:43 -0700424void InternTable::Table::SweepWeaks(UnorderedSet* set, IsMarkedVisitor* visitor) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700425 for (auto it = set->begin(), end = set->end(); it != end;) {
426 // This does not need a read barrier because this is called by GC.
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800427 mirror::Object* object = it->Read<kWithoutReadBarrier>();
Mathieu Chartier97509952015-07-13 14:35:43 -0700428 mirror::Object* new_object = visitor->IsMarked(object);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700429 if (new_object == nullptr) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100430 it = set->erase(it);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700431 } else {
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800432 *it = GcRoot<mirror::String>(new_object->AsString());
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700433 ++it;
434 }
435 }
436}
437
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800438size_t InternTable::Table::Size() const {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800439 return std::accumulate(tables_.begin(),
440 tables_.end(),
Mathieu Chartier205b7622016-01-06 15:47:09 -0800441 0U,
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800442 [](size_t sum, const UnorderedSet& set) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100443 return sum + set.size();
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800444 });
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800445}
446
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700447void InternTable::ChangeWeakRootState(gc::WeakRootState new_state) {
448 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
449 ChangeWeakRootStateLocked(new_state);
450}
451
452void InternTable::ChangeWeakRootStateLocked(gc::WeakRootState new_state) {
Hiroshi Yamauchifdbd13c2015-09-02 16:16:58 -0700453 CHECK(!kUseReadBarrier);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700454 weak_root_state_ = new_state;
455 if (new_state != gc::kWeakRootStateNoReadsOrWrites) {
456 weak_intern_condition_.Broadcast(Thread::Current());
457 }
458}
459
Mathieu Chartier32cc9ee2015-10-15 09:19:15 -0700460InternTable::Table::Table() {
461 Runtime* const runtime = Runtime::Current();
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800462 // Initial table.
463 tables_.push_back(UnorderedSet());
464 tables_.back().SetLoadFactor(runtime->GetHashTableMinLoadFactor(),
465 runtime->GetHashTableMaxLoadFactor());
Mathieu Chartier32cc9ee2015-10-15 09:19:15 -0700466}
467
Brian Carlstrom7e93b502011-08-04 14:16:22 -0700468} // namespace art