blob: 96f70d1b80c3502470c8d4013daab91e48124613 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Brian Carlstrom7e93b502011-08-04 14:16:22 -070016
17#include "intern_table.h"
18
Ian Rogers700a4022014-05-19 16:49:03 -070019#include <memory>
20
David Sehr0225f8e2018-01-31 08:52:24 +000021#include "dex/utf.h"
Mathieu Chartier97509952015-07-13 14:35:43 -070022#include "gc/collector/garbage_collector.h"
Ian Rogers7dfb28c2013-08-22 08:18:36 -070023#include "gc/space/image_space.h"
Mathieu Chartier14c3bf92015-07-13 14:35:43 -070024#include "gc/weak_root_state.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070025#include "gc_root-inl.h"
Andreas Gampee15b9b12018-10-29 12:54:27 -070026#include "handle_scope-inl.h"
Mathieu Chartier4a26f172016-01-26 14:26:18 -080027#include "image-inl.h"
Vladimir Marko05792b92015-08-03 11:56:49 +010028#include "mirror/dex_cache-inl.h"
Ian Rogers7dfb28c2013-08-22 08:18:36 -070029#include "mirror/object-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070030#include "mirror/object_array-inl.h"
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070031#include "mirror/string-inl.h"
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070032#include "object_callbacks.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070033#include "scoped_thread_state_change-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034#include "thread.h"
Brian Carlstrom7e93b502011-08-04 14:16:22 -070035
36namespace art {
37
Ian Rogers7dfb28c2013-08-22 08:18:36 -070038InternTable::InternTable()
Vladimir Marko1a1de672016-10-13 12:53:15 +010039 : log_new_roots_(false),
Mathieu Chartier14c3bf92015-07-13 14:35:43 -070040 weak_intern_condition_("New intern condition", *Locks::intern_table_lock_),
41 weak_root_state_(gc::kWeakRootStateNormal) {
Mathieu Chartierc11d9b82013-09-19 10:01:59 -070042}
Elliott Hughesde69d7f2011-08-18 16:49:37 -070043
Brian Carlstroma663ea52011-08-19 23:33:41 -070044size_t InternTable::Size() const {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010045 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070046 return strong_interns_.Size() + weak_interns_.Size();
Brian Carlstroma663ea52011-08-19 23:33:41 -070047}
48
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070049size_t InternTable::StrongSize() const {
50 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070051 return strong_interns_.Size();
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070052}
53
54size_t InternTable::WeakSize() const {
55 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070056 return weak_interns_.Size();
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070057}
58
Elliott Hughescac6cc72011-11-03 20:31:21 -070059void InternTable::DumpForSigQuit(std::ostream& os) const {
Mathieu Chartiereb175f72014-10-31 11:49:27 -070060 os << "Intern table: " << StrongSize() << " strong; " << WeakSize() << " weak\n";
Elliott Hughescac6cc72011-11-03 20:31:21 -070061}
62
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070063void InternTable::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010064 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartier893263b2014-03-04 11:07:42 -080065 if ((flags & kVisitRootFlagAllRoots) != 0) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070066 strong_interns_.VisitRoots(visitor);
Mathieu Chartier893263b2014-03-04 11:07:42 -080067 } else if ((flags & kVisitRootFlagNewRoots) != 0) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070068 for (auto& root : new_strong_intern_roots_) {
Mathieu Chartier9e868092016-10-31 14:58:04 -070069 ObjPtr<mirror::String> old_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070070 root.VisitRoot(visitor, RootInfo(kRootInternedString));
Mathieu Chartier9e868092016-10-31 14:58:04 -070071 ObjPtr<mirror::String> new_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierc2e20622014-11-03 11:41:47 -080072 if (new_ref != old_ref) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070073 // The GC moved a root in the log. Need to search the strong interns and update the
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070074 // corresponding object. This is slow, but luckily for us, this may only happen with a
75 // concurrent moving GC.
Mathieu Chartiereb175f72014-10-31 11:49:27 -070076 strong_interns_.Remove(old_ref);
77 strong_interns_.Insert(new_ref);
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070078 }
79 }
Mathieu Chartier893263b2014-03-04 11:07:42 -080080 }
Mathieu Chartier893263b2014-03-04 11:07:42 -080081 if ((flags & kVisitRootFlagClearRootLog) != 0) {
82 new_strong_intern_roots_.clear();
83 }
84 if ((flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
85 log_new_roots_ = true;
86 } else if ((flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
87 log_new_roots_ = false;
Ian Rogers1d54e732013-05-02 21:10:01 -070088 }
Mathieu Chartier423d2a32013-09-12 17:33:56 -070089 // Note: we deliberately don't visit the weak_interns_ table and the immutable image roots.
Brian Carlstrom7e93b502011-08-04 14:16:22 -070090}
91
Mathieu Chartier9e868092016-10-31 14:58:04 -070092ObjPtr<mirror::String> InternTable::LookupWeak(Thread* self, ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -080093 MutexLock mu(self, *Locks::intern_table_lock_);
94 return LookupWeakLocked(s);
Mathieu Chartierf7fd9702015-11-09 11:16:49 -080095}
96
Mathieu Chartier9e868092016-10-31 14:58:04 -070097ObjPtr<mirror::String> InternTable::LookupStrong(Thread* self, ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -080098 MutexLock mu(self, *Locks::intern_table_lock_);
99 return LookupStrongLocked(s);
100}
101
Mathieu Chartier9e868092016-10-31 14:58:04 -0700102ObjPtr<mirror::String> InternTable::LookupStrong(Thread* self,
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000103 uint32_t utf16_length,
104 const char* utf8_data) {
105 DCHECK_EQ(utf16_length, CountModifiedUtf8Chars(utf8_data));
106 Utf8String string(utf16_length,
107 utf8_data,
108 ComputeUtf16HashFromModifiedUtf8(utf8_data, utf16_length));
109 MutexLock mu(self, *Locks::intern_table_lock_);
110 return strong_interns_.Find(string);
111}
112
Mathieu Chartier9e868092016-10-31 14:58:04 -0700113ObjPtr<mirror::String> InternTable::LookupWeakLocked(ObjPtr<mirror::String> s) {
Nicolas Geoffray1bc977c2016-01-23 14:15:49 +0000114 return weak_interns_.Find(s);
115}
116
Mathieu Chartier9e868092016-10-31 14:58:04 -0700117ObjPtr<mirror::String> InternTable::LookupStrongLocked(ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800118 return strong_interns_.Find(s);
119}
120
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800121void InternTable::AddNewTable() {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700122 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800123 weak_interns_.AddNewTable();
124 strong_interns_.AddNewTable();
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700125}
126
Mathieu Chartier9e868092016-10-31 14:58:04 -0700127ObjPtr<mirror::String> InternTable::InsertStrong(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100128 Runtime* runtime = Runtime::Current();
129 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700130 runtime->RecordStrongStringInsertion(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100131 }
Mathieu Chartier893263b2014-03-04 11:07:42 -0800132 if (log_new_roots_) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700133 new_strong_intern_roots_.push_back(GcRoot<mirror::String>(s));
Mathieu Chartier893263b2014-03-04 11:07:42 -0800134 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700135 strong_interns_.Insert(s);
Mathieu Chartier893263b2014-03-04 11:07:42 -0800136 return s;
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100137}
138
Mathieu Chartier9e868092016-10-31 14:58:04 -0700139ObjPtr<mirror::String> InternTable::InsertWeak(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100140 Runtime* runtime = Runtime::Current();
141 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700142 runtime->RecordWeakStringInsertion(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100143 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700144 weak_interns_.Insert(s);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700145 return s;
146}
147
Mathieu Chartier9e868092016-10-31 14:58:04 -0700148void InternTable::RemoveStrong(ObjPtr<mirror::String> s) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700149 strong_interns_.Remove(s);
Hiroshi Yamauchi1bd48722014-05-23 19:58:15 -0700150}
151
Mathieu Chartier9e868092016-10-31 14:58:04 -0700152void InternTable::RemoveWeak(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100153 Runtime* runtime = Runtime::Current();
154 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700155 runtime->RecordWeakStringRemoval(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100156 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700157 weak_interns_.Remove(s);
Brian Carlstrom7e93b502011-08-04 14:16:22 -0700158}
159
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100160// Insert/remove methods used to undo changes made during an aborted transaction.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700161ObjPtr<mirror::String> InternTable::InsertStrongFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100162 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700163 return InsertStrong(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100164}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700165
166ObjPtr<mirror::String> InternTable::InsertWeakFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100167 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700168 return InsertWeak(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100169}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700170
171void InternTable::RemoveStrongFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100172 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700173 RemoveStrong(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100174}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700175
176void InternTable::RemoveWeakFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100177 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700178 RemoveWeak(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100179}
180
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700181void InternTable::BroadcastForNewInterns() {
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700182 Thread* self = Thread::Current();
183 MutexLock mu(self, *Locks::intern_table_lock_);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700184 weak_intern_condition_.Broadcast(self);
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700185}
186
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700187void InternTable::WaitUntilAccessible(Thread* self) {
188 Locks::intern_table_lock_->ExclusiveUnlock(self);
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700189 {
190 ScopedThreadSuspension sts(self, kWaitingWeakGcRootRead);
191 MutexLock mu(self, *Locks::intern_table_lock_);
Hiroshi Yamauchi9e6f0972016-11-03 13:03:20 -0700192 while ((!kUseReadBarrier && weak_root_state_ == gc::kWeakRootStateNoReadsOrWrites) ||
193 (kUseReadBarrier && !self->GetWeakRefAccessEnabled())) {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700194 weak_intern_condition_.Wait(self);
195 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700196 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700197 Locks::intern_table_lock_->ExclusiveLock(self);
198}
199
Mathieu Chartier9e868092016-10-31 14:58:04 -0700200ObjPtr<mirror::String> InternTable::Insert(ObjPtr<mirror::String> s,
201 bool is_strong,
202 bool holding_locks) {
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800203 if (s == nullptr) {
204 return nullptr;
205 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700206 Thread* const self = Thread::Current();
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100207 MutexLock mu(self, *Locks::intern_table_lock_);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700208 if (kDebugLocking && !holding_locks) {
209 Locks::mutator_lock_->AssertSharedHeld(self);
210 CHECK_EQ(2u, self->NumberOfHeldMutexes()) << "may only safely hold the mutator lock";
Mathieu Chartierc11d9b82013-09-19 10:01:59 -0700211 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700212 while (true) {
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700213 if (holding_locks) {
214 if (!kUseReadBarrier) {
215 CHECK_EQ(weak_root_state_, gc::kWeakRootStateNormal);
216 } else {
217 CHECK(self->GetWeakRefAccessEnabled());
218 }
219 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700220 // Check the strong table for a match.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700221 ObjPtr<mirror::String> strong = LookupStrongLocked(s);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700222 if (strong != nullptr) {
223 return strong;
224 }
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700225 if ((!kUseReadBarrier && weak_root_state_ != gc::kWeakRootStateNoReadsOrWrites) ||
226 (kUseReadBarrier && self->GetWeakRefAccessEnabled())) {
227 break;
228 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700229 // weak_root_state_ is set to gc::kWeakRootStateNoReadsOrWrites in the GC pause but is only
230 // cleared after SweepSystemWeaks has completed. This is why we need to wait until it is
231 // cleared.
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700232 CHECK(!holding_locks);
233 StackHandleScope<1> hs(self);
234 auto h = hs.NewHandleWrapper(&s);
235 WaitUntilAccessible(self);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700236 }
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700237 if (!kUseReadBarrier) {
238 CHECK_EQ(weak_root_state_, gc::kWeakRootStateNormal);
239 } else {
240 CHECK(self->GetWeakRefAccessEnabled());
241 }
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800242 // There is no match in the strong table, check the weak table.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700243 ObjPtr<mirror::String> weak = LookupWeakLocked(s);
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800244 if (weak != nullptr) {
245 if (is_strong) {
246 // A match was found in the weak table. Promote to the strong table.
247 RemoveWeak(weak);
248 return InsertStrong(weak);
249 }
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700250 return weak;
251 }
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800252 // No match in the strong table or the weak table. Insert into the strong / weak table.
253 return is_strong ? InsertStrong(s) : InsertWeak(s);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700254}
255
Mathieu Chartier9e868092016-10-31 14:58:04 -0700256ObjPtr<mirror::String> InternTable::InternStrong(int32_t utf16_length, const char* utf8_data) {
Mathieu Chartiered0fc1d2014-03-21 14:09:35 -0700257 DCHECK(utf8_data != nullptr);
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100258 Thread* self = Thread::Current();
259 // Try to avoid allocation.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700260 ObjPtr<mirror::String> s = LookupStrong(self, utf16_length, utf8_data);
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100261 if (s != nullptr) {
262 return s;
263 }
Ian Rogers7dfb28c2013-08-22 08:18:36 -0700264 return InternStrong(mirror::String::AllocFromModifiedUtf8(
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100265 self, utf16_length, utf8_data));
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700266}
267
Mathieu Chartier9e868092016-10-31 14:58:04 -0700268ObjPtr<mirror::String> InternTable::InternStrong(const char* utf8_data) {
Mathieu Chartiered0fc1d2014-03-21 14:09:35 -0700269 DCHECK(utf8_data != nullptr);
270 return InternStrong(mirror::String::AllocFromModifiedUtf8(Thread::Current(), utf8_data));
Brian Carlstromc74255f2011-09-11 22:47:39 -0700271}
272
Mathieu Chartier9e868092016-10-31 14:58:04 -0700273ObjPtr<mirror::String> InternTable::InternStrongImageString(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700274 // May be holding the heap bitmap lock.
275 return Insert(s, true, true);
276}
277
Vladimir Marko8e05f092019-06-10 11:10:38 +0100278void InternTable::PromoteWeakToStrong() {
279 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
280 DCHECK_EQ(weak_interns_.tables_.size(), 1u);
281 for (GcRoot<mirror::String>& entry : weak_interns_.tables_.front().set_) {
282 DCHECK(LookupStrongLocked(entry.Read()) == nullptr);
283 InsertStrong(entry.Read());
284 }
285 weak_interns_.tables_.front().set_.clear();
286}
287
Mathieu Chartier9e868092016-10-31 14:58:04 -0700288ObjPtr<mirror::String> InternTable::InternStrong(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700289 return Insert(s, true, false);
Brian Carlstromc74255f2011-09-11 22:47:39 -0700290}
291
Vladimir Marko31c3daa2019-06-13 12:18:37 +0100292ObjPtr<mirror::String> InternTable::InternWeak(const char* utf8_data) {
293 DCHECK(utf8_data != nullptr);
294 return InternWeak(mirror::String::AllocFromModifiedUtf8(Thread::Current(), utf8_data));
295}
296
Mathieu Chartier9e868092016-10-31 14:58:04 -0700297ObjPtr<mirror::String> InternTable::InternWeak(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700298 return Insert(s, false, false);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700299}
300
Mathieu Chartier9e868092016-10-31 14:58:04 -0700301bool InternTable::ContainsWeak(ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800302 return LookupWeak(Thread::Current(), s) == s;
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700303}
304
Mathieu Chartier97509952015-07-13 14:35:43 -0700305void InternTable::SweepInternTableWeaks(IsMarkedVisitor* visitor) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100306 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartier97509952015-07-13 14:35:43 -0700307 weak_interns_.SweepWeaks(visitor);
Brian Carlstroma663ea52011-08-19 23:33:41 -0700308}
309
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700310size_t InternTable::WriteToMemory(uint8_t* ptr) {
311 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800312 return strong_interns_.WriteToMemory(ptr);
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700313}
314
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800315std::size_t InternTable::StringHashEquals::operator()(const GcRoot<mirror::String>& root) const {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700316 if (kIsDebugBuild) {
317 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
318 }
Alexey Grebenkin21f23642016-12-02 17:44:54 +0300319 // An additional cast to prevent undesired sign extension.
320 return static_cast<size_t>(
321 static_cast<uint32_t>(root.Read<kWithoutReadBarrier>()->GetHashCode()));
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700322}
323
324bool InternTable::StringHashEquals::operator()(const GcRoot<mirror::String>& a,
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800325 const GcRoot<mirror::String>& b) const {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700326 if (kIsDebugBuild) {
327 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
328 }
Mathieu Chartier9e868092016-10-31 14:58:04 -0700329 return a.Read<kWithoutReadBarrier>()->Equals(b.Read<kWithoutReadBarrier>());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700330}
331
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000332bool InternTable::StringHashEquals::operator()(const GcRoot<mirror::String>& a,
333 const Utf8String& b) const {
334 if (kIsDebugBuild) {
335 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
336 }
Mathieu Chartier9e868092016-10-31 14:58:04 -0700337 ObjPtr<mirror::String> a_string = a.Read<kWithoutReadBarrier>();
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000338 uint32_t a_length = static_cast<uint32_t>(a_string->GetLength());
339 if (a_length != b.GetUtf16Length()) {
340 return false;
341 }
jessicahandojo3aaa37b2016-07-29 14:46:37 -0700342 if (a_string->IsCompressed()) {
343 size_t b_byte_count = strlen(b.GetUtf8Data());
344 size_t b_utf8_length = CountModifiedUtf8Chars(b.GetUtf8Data(), b_byte_count);
345 // Modified UTF-8 single byte character range is 0x01 .. 0x7f
346 // The string compression occurs on regular ASCII with same exact range,
347 // not on extended ASCII which up to 0xff
348 const bool is_b_regular_ascii = (b_byte_count == b_utf8_length);
349 if (is_b_regular_ascii) {
350 return memcmp(b.GetUtf8Data(),
351 a_string->GetValueCompressed(), a_length * sizeof(uint8_t)) == 0;
352 } else {
353 return false;
354 }
355 } else {
356 const uint16_t* a_value = a_string->GetValue();
357 return CompareModifiedUtf8ToUtf16AsCodePointValues(b.GetUtf8Data(), a_value, a_length) == 0;
358 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000359}
360
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800361size_t InternTable::Table::WriteToMemory(uint8_t* ptr) {
362 if (tables_.empty()) {
363 return 0;
364 }
365 UnorderedSet* table_to_write;
366 UnorderedSet combined;
367 if (tables_.size() > 1) {
368 table_to_write = &combined;
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700369 for (InternalTable& table : tables_) {
370 for (GcRoot<mirror::String>& string : table.set_) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100371 combined.insert(string);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800372 }
373 }
374 } else {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700375 table_to_write = &tables_.back().set_;
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800376 }
377 return table_to_write->WriteToMemory(ptr);
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700378}
379
Mathieu Chartier9e868092016-10-31 14:58:04 -0700380void InternTable::Table::Remove(ObjPtr<mirror::String> s) {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700381 for (InternalTable& table : tables_) {
382 auto it = table.set_.find(GcRoot<mirror::String>(s));
383 if (it != table.set_.end()) {
384 table.set_.erase(it);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800385 return;
386 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700387 }
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800388 LOG(FATAL) << "Attempting to remove non-interned string " << s->ToModifiedUtf8();
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700389}
390
Mathieu Chartier9e868092016-10-31 14:58:04 -0700391ObjPtr<mirror::String> InternTable::Table::Find(ObjPtr<mirror::String> s) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700392 Locks::intern_table_lock_->AssertHeld(Thread::Current());
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700393 for (InternalTable& table : tables_) {
394 auto it = table.set_.find(GcRoot<mirror::String>(s));
395 if (it != table.set_.end()) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800396 return it->Read();
397 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700398 }
399 return nullptr;
400}
401
Mathieu Chartier9e868092016-10-31 14:58:04 -0700402ObjPtr<mirror::String> InternTable::Table::Find(const Utf8String& string) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000403 Locks::intern_table_lock_->AssertHeld(Thread::Current());
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700404 for (InternalTable& table : tables_) {
405 auto it = table.set_.find(string);
406 if (it != table.set_.end()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000407 return it->Read();
408 }
409 }
410 return nullptr;
411}
412
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800413void InternTable::Table::AddNewTable() {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700414 tables_.push_back(InternalTable());
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700415}
416
Mathieu Chartier9e868092016-10-31 14:58:04 -0700417void InternTable::Table::Insert(ObjPtr<mirror::String> s) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800418 // Always insert the last table, the image tables are before and we avoid inserting into these
419 // to prevent dirty pages.
420 DCHECK(!tables_.empty());
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700421 tables_.back().set_.insert(GcRoot<mirror::String>(s));
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700422}
423
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700424void InternTable::Table::VisitRoots(RootVisitor* visitor) {
Mathieu Chartier4809d0a2015-04-07 10:39:04 -0700425 BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(
426 visitor, RootInfo(kRootInternedString));
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700427 for (InternalTable& table : tables_) {
428 for (auto& intern : table.set_) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800429 buffered_visitor.VisitRoot(intern);
430 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700431 }
432}
433
Mathieu Chartier97509952015-07-13 14:35:43 -0700434void InternTable::Table::SweepWeaks(IsMarkedVisitor* visitor) {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700435 for (InternalTable& table : tables_) {
436 SweepWeaks(&table.set_, visitor);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800437 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700438}
439
Mathieu Chartier97509952015-07-13 14:35:43 -0700440void InternTable::Table::SweepWeaks(UnorderedSet* set, IsMarkedVisitor* visitor) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700441 for (auto it = set->begin(), end = set->end(); it != end;) {
442 // This does not need a read barrier because this is called by GC.
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800443 mirror::Object* object = it->Read<kWithoutReadBarrier>();
Mathieu Chartier97509952015-07-13 14:35:43 -0700444 mirror::Object* new_object = visitor->IsMarked(object);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700445 if (new_object == nullptr) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100446 it = set->erase(it);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700447 } else {
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800448 *it = GcRoot<mirror::String>(new_object->AsString());
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700449 ++it;
450 }
451 }
452}
453
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800454size_t InternTable::Table::Size() const {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800455 return std::accumulate(tables_.begin(),
456 tables_.end(),
Mathieu Chartier205b7622016-01-06 15:47:09 -0800457 0U,
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700458 [](size_t sum, const InternalTable& table) {
459 return sum + table.Size();
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800460 });
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800461}
462
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700463void InternTable::ChangeWeakRootState(gc::WeakRootState new_state) {
464 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
465 ChangeWeakRootStateLocked(new_state);
466}
467
468void InternTable::ChangeWeakRootStateLocked(gc::WeakRootState new_state) {
Hiroshi Yamauchifdbd13c2015-09-02 16:16:58 -0700469 CHECK(!kUseReadBarrier);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700470 weak_root_state_ = new_state;
471 if (new_state != gc::kWeakRootStateNoReadsOrWrites) {
472 weak_intern_condition_.Broadcast(Thread::Current());
473 }
474}
475
Mathieu Chartier32cc9ee2015-10-15 09:19:15 -0700476InternTable::Table::Table() {
477 Runtime* const runtime = Runtime::Current();
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700478 InternalTable initial_table;
479 initial_table.set_.SetLoadFactor(runtime->GetHashTableMinLoadFactor(),
480 runtime->GetHashTableMaxLoadFactor());
481 tables_.push_back(std::move(initial_table));
Mathieu Chartier32cc9ee2015-10-15 09:19:15 -0700482}
483
Brian Carlstrom7e93b502011-08-04 14:16:22 -0700484} // namespace art