blob: 6af04550a490674cc93fe3403c6b32dd818fe36c [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Brian Carlstrom7e93b502011-08-04 14:16:22 -070016
Vladimir Marko4e7b3c72021-02-23 18:13:47 +000017#include "intern_table-inl.h"
Brian Carlstrom7e93b502011-08-04 14:16:22 -070018
Ian Rogers700a4022014-05-19 16:49:03 -070019#include <memory>
20
David Sehr0225f8e2018-01-31 08:52:24 +000021#include "dex/utf.h"
Mathieu Chartier97509952015-07-13 14:35:43 -070022#include "gc/collector/garbage_collector.h"
Ian Rogers7dfb28c2013-08-22 08:18:36 -070023#include "gc/space/image_space.h"
Mathieu Chartier14c3bf92015-07-13 14:35:43 -070024#include "gc/weak_root_state.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070025#include "gc_root-inl.h"
Andreas Gampee15b9b12018-10-29 12:54:27 -070026#include "handle_scope-inl.h"
Mathieu Chartier4a26f172016-01-26 14:26:18 -080027#include "image-inl.h"
Vladimir Marko05792b92015-08-03 11:56:49 +010028#include "mirror/dex_cache-inl.h"
Ian Rogers7dfb28c2013-08-22 08:18:36 -070029#include "mirror/object-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070030#include "mirror/object_array-inl.h"
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070031#include "mirror/string-inl.h"
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070032#include "object_callbacks.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070033#include "scoped_thread_state_change-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034#include "thread.h"
Brian Carlstrom7e93b502011-08-04 14:16:22 -070035
36namespace art {
37
Ian Rogers7dfb28c2013-08-22 08:18:36 -070038InternTable::InternTable()
Vladimir Marko1a1de672016-10-13 12:53:15 +010039 : log_new_roots_(false),
Mathieu Chartier14c3bf92015-07-13 14:35:43 -070040 weak_intern_condition_("New intern condition", *Locks::intern_table_lock_),
41 weak_root_state_(gc::kWeakRootStateNormal) {
Mathieu Chartierc11d9b82013-09-19 10:01:59 -070042}
Elliott Hughesde69d7f2011-08-18 16:49:37 -070043
Brian Carlstroma663ea52011-08-19 23:33:41 -070044size_t InternTable::Size() const {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010045 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070046 return strong_interns_.Size() + weak_interns_.Size();
Brian Carlstroma663ea52011-08-19 23:33:41 -070047}
48
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070049size_t InternTable::StrongSize() const {
50 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070051 return strong_interns_.Size();
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070052}
53
54size_t InternTable::WeakSize() const {
55 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070056 return weak_interns_.Size();
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070057}
58
Elliott Hughescac6cc72011-11-03 20:31:21 -070059void InternTable::DumpForSigQuit(std::ostream& os) const {
Mathieu Chartiereb175f72014-10-31 11:49:27 -070060 os << "Intern table: " << StrongSize() << " strong; " << WeakSize() << " weak\n";
Elliott Hughescac6cc72011-11-03 20:31:21 -070061}
62
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070063void InternTable::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010064 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartier893263b2014-03-04 11:07:42 -080065 if ((flags & kVisitRootFlagAllRoots) != 0) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070066 strong_interns_.VisitRoots(visitor);
Mathieu Chartier893263b2014-03-04 11:07:42 -080067 } else if ((flags & kVisitRootFlagNewRoots) != 0) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070068 for (auto& root : new_strong_intern_roots_) {
Mathieu Chartier9e868092016-10-31 14:58:04 -070069 ObjPtr<mirror::String> old_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070070 root.VisitRoot(visitor, RootInfo(kRootInternedString));
Mathieu Chartier9e868092016-10-31 14:58:04 -070071 ObjPtr<mirror::String> new_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierc2e20622014-11-03 11:41:47 -080072 if (new_ref != old_ref) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070073 // The GC moved a root in the log. Need to search the strong interns and update the
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070074 // corresponding object. This is slow, but luckily for us, this may only happen with a
75 // concurrent moving GC.
Mathieu Chartiereb175f72014-10-31 11:49:27 -070076 strong_interns_.Remove(old_ref);
77 strong_interns_.Insert(new_ref);
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070078 }
79 }
Mathieu Chartier893263b2014-03-04 11:07:42 -080080 }
Mathieu Chartier893263b2014-03-04 11:07:42 -080081 if ((flags & kVisitRootFlagClearRootLog) != 0) {
82 new_strong_intern_roots_.clear();
83 }
84 if ((flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
85 log_new_roots_ = true;
86 } else if ((flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
87 log_new_roots_ = false;
Ian Rogers1d54e732013-05-02 21:10:01 -070088 }
Mathieu Chartier423d2a32013-09-12 17:33:56 -070089 // Note: we deliberately don't visit the weak_interns_ table and the immutable image roots.
Brian Carlstrom7e93b502011-08-04 14:16:22 -070090}
91
Mathieu Chartier9e868092016-10-31 14:58:04 -070092ObjPtr<mirror::String> InternTable::LookupWeak(Thread* self, ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -080093 MutexLock mu(self, *Locks::intern_table_lock_);
94 return LookupWeakLocked(s);
Mathieu Chartierf7fd9702015-11-09 11:16:49 -080095}
96
Mathieu Chartier9e868092016-10-31 14:58:04 -070097ObjPtr<mirror::String> InternTable::LookupStrong(Thread* self, ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -080098 MutexLock mu(self, *Locks::intern_table_lock_);
99 return LookupStrongLocked(s);
100}
101
Mathieu Chartier9e868092016-10-31 14:58:04 -0700102ObjPtr<mirror::String> InternTable::LookupStrong(Thread* self,
Vladimir Marko4e7b3c72021-02-23 18:13:47 +0000103 uint32_t utf16_length,
104 const char* utf8_data) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000105 DCHECK_EQ(utf16_length, CountModifiedUtf8Chars(utf8_data));
106 Utf8String string(utf16_length,
107 utf8_data,
108 ComputeUtf16HashFromModifiedUtf8(utf8_data, utf16_length));
109 MutexLock mu(self, *Locks::intern_table_lock_);
110 return strong_interns_.Find(string);
111}
112
Mathieu Chartier9e868092016-10-31 14:58:04 -0700113ObjPtr<mirror::String> InternTable::LookupWeakLocked(ObjPtr<mirror::String> s) {
Nicolas Geoffray1bc977c2016-01-23 14:15:49 +0000114 return weak_interns_.Find(s);
115}
116
Mathieu Chartier9e868092016-10-31 14:58:04 -0700117ObjPtr<mirror::String> InternTable::LookupStrongLocked(ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800118 return strong_interns_.Find(s);
119}
120
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800121void InternTable::AddNewTable() {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700122 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800123 weak_interns_.AddNewTable();
124 strong_interns_.AddNewTable();
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700125}
126
Mathieu Chartier9e868092016-10-31 14:58:04 -0700127ObjPtr<mirror::String> InternTable::InsertStrong(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100128 Runtime* runtime = Runtime::Current();
129 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700130 runtime->RecordStrongStringInsertion(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100131 }
Mathieu Chartier893263b2014-03-04 11:07:42 -0800132 if (log_new_roots_) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700133 new_strong_intern_roots_.push_back(GcRoot<mirror::String>(s));
Mathieu Chartier893263b2014-03-04 11:07:42 -0800134 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700135 strong_interns_.Insert(s);
Mathieu Chartier893263b2014-03-04 11:07:42 -0800136 return s;
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100137}
138
Mathieu Chartier9e868092016-10-31 14:58:04 -0700139ObjPtr<mirror::String> InternTable::InsertWeak(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100140 Runtime* runtime = Runtime::Current();
141 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700142 runtime->RecordWeakStringInsertion(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100143 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700144 weak_interns_.Insert(s);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700145 return s;
146}
147
Mathieu Chartier9e868092016-10-31 14:58:04 -0700148void InternTable::RemoveStrong(ObjPtr<mirror::String> s) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700149 strong_interns_.Remove(s);
Hiroshi Yamauchi1bd48722014-05-23 19:58:15 -0700150}
151
Mathieu Chartier9e868092016-10-31 14:58:04 -0700152void InternTable::RemoveWeak(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100153 Runtime* runtime = Runtime::Current();
154 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700155 runtime->RecordWeakStringRemoval(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100156 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700157 weak_interns_.Remove(s);
Brian Carlstrom7e93b502011-08-04 14:16:22 -0700158}
159
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100160// Insert/remove methods used to undo changes made during an aborted transaction.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700161ObjPtr<mirror::String> InternTable::InsertStrongFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100162 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700163 return InsertStrong(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100164}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700165
166ObjPtr<mirror::String> InternTable::InsertWeakFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100167 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700168 return InsertWeak(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100169}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700170
171void InternTable::RemoveStrongFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100172 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700173 RemoveStrong(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100174}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700175
176void InternTable::RemoveWeakFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100177 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700178 RemoveWeak(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100179}
180
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700181void InternTable::BroadcastForNewInterns() {
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700182 Thread* self = Thread::Current();
183 MutexLock mu(self, *Locks::intern_table_lock_);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700184 weak_intern_condition_.Broadcast(self);
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700185}
186
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700187void InternTable::WaitUntilAccessible(Thread* self) {
188 Locks::intern_table_lock_->ExclusiveUnlock(self);
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700189 {
190 ScopedThreadSuspension sts(self, kWaitingWeakGcRootRead);
191 MutexLock mu(self, *Locks::intern_table_lock_);
Hiroshi Yamauchi9e6f0972016-11-03 13:03:20 -0700192 while ((!kUseReadBarrier && weak_root_state_ == gc::kWeakRootStateNoReadsOrWrites) ||
193 (kUseReadBarrier && !self->GetWeakRefAccessEnabled())) {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700194 weak_intern_condition_.Wait(self);
195 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700196 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700197 Locks::intern_table_lock_->ExclusiveLock(self);
198}
199
Mathieu Chartier9e868092016-10-31 14:58:04 -0700200ObjPtr<mirror::String> InternTable::Insert(ObjPtr<mirror::String> s,
201 bool is_strong,
202 bool holding_locks) {
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800203 if (s == nullptr) {
204 return nullptr;
205 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700206 Thread* const self = Thread::Current();
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100207 MutexLock mu(self, *Locks::intern_table_lock_);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700208 if (kDebugLocking && !holding_locks) {
209 Locks::mutator_lock_->AssertSharedHeld(self);
210 CHECK_EQ(2u, self->NumberOfHeldMutexes()) << "may only safely hold the mutator lock";
Mathieu Chartierc11d9b82013-09-19 10:01:59 -0700211 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700212 while (true) {
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700213 if (holding_locks) {
214 if (!kUseReadBarrier) {
215 CHECK_EQ(weak_root_state_, gc::kWeakRootStateNormal);
216 } else {
217 CHECK(self->GetWeakRefAccessEnabled());
218 }
219 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700220 // Check the strong table for a match.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700221 ObjPtr<mirror::String> strong = LookupStrongLocked(s);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700222 if (strong != nullptr) {
223 return strong;
224 }
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700225 if ((!kUseReadBarrier && weak_root_state_ != gc::kWeakRootStateNoReadsOrWrites) ||
226 (kUseReadBarrier && self->GetWeakRefAccessEnabled())) {
227 break;
228 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700229 // weak_root_state_ is set to gc::kWeakRootStateNoReadsOrWrites in the GC pause but is only
230 // cleared after SweepSystemWeaks has completed. This is why we need to wait until it is
231 // cleared.
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700232 CHECK(!holding_locks);
233 StackHandleScope<1> hs(self);
234 auto h = hs.NewHandleWrapper(&s);
235 WaitUntilAccessible(self);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700236 }
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700237 if (!kUseReadBarrier) {
238 CHECK_EQ(weak_root_state_, gc::kWeakRootStateNormal);
239 } else {
240 CHECK(self->GetWeakRefAccessEnabled());
241 }
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800242 // There is no match in the strong table, check the weak table.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700243 ObjPtr<mirror::String> weak = LookupWeakLocked(s);
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800244 if (weak != nullptr) {
245 if (is_strong) {
246 // A match was found in the weak table. Promote to the strong table.
247 RemoveWeak(weak);
248 return InsertStrong(weak);
249 }
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700250 return weak;
251 }
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800252 // No match in the strong table or the weak table. Insert into the strong / weak table.
253 return is_strong ? InsertStrong(s) : InsertWeak(s);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700254}
255
Mathieu Chartier9e868092016-10-31 14:58:04 -0700256ObjPtr<mirror::String> InternTable::InternStrong(int32_t utf16_length, const char* utf8_data) {
Mathieu Chartiered0fc1d2014-03-21 14:09:35 -0700257 DCHECK(utf8_data != nullptr);
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100258 Thread* self = Thread::Current();
259 // Try to avoid allocation.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700260 ObjPtr<mirror::String> s = LookupStrong(self, utf16_length, utf8_data);
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100261 if (s != nullptr) {
262 return s;
263 }
Ian Rogers7dfb28c2013-08-22 08:18:36 -0700264 return InternStrong(mirror::String::AllocFromModifiedUtf8(
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100265 self, utf16_length, utf8_data));
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700266}
267
Mathieu Chartier9e868092016-10-31 14:58:04 -0700268ObjPtr<mirror::String> InternTable::InternStrong(const char* utf8_data) {
Mathieu Chartiered0fc1d2014-03-21 14:09:35 -0700269 DCHECK(utf8_data != nullptr);
270 return InternStrong(mirror::String::AllocFromModifiedUtf8(Thread::Current(), utf8_data));
Brian Carlstromc74255f2011-09-11 22:47:39 -0700271}
272
Mathieu Chartier9e868092016-10-31 14:58:04 -0700273ObjPtr<mirror::String> InternTable::InternStrongImageString(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700274 // May be holding the heap bitmap lock.
275 return Insert(s, true, true);
276}
277
Vladimir Marko8e05f092019-06-10 11:10:38 +0100278void InternTable::PromoteWeakToStrong() {
279 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
280 DCHECK_EQ(weak_interns_.tables_.size(), 1u);
281 for (GcRoot<mirror::String>& entry : weak_interns_.tables_.front().set_) {
282 DCHECK(LookupStrongLocked(entry.Read()) == nullptr);
283 InsertStrong(entry.Read());
284 }
285 weak_interns_.tables_.front().set_.clear();
286}
287
Mathieu Chartier9e868092016-10-31 14:58:04 -0700288ObjPtr<mirror::String> InternTable::InternStrong(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700289 return Insert(s, true, false);
Brian Carlstromc74255f2011-09-11 22:47:39 -0700290}
291
Vladimir Marko31c3daa2019-06-13 12:18:37 +0100292ObjPtr<mirror::String> InternTable::InternWeak(const char* utf8_data) {
293 DCHECK(utf8_data != nullptr);
294 return InternWeak(mirror::String::AllocFromModifiedUtf8(Thread::Current(), utf8_data));
295}
296
Mathieu Chartier9e868092016-10-31 14:58:04 -0700297ObjPtr<mirror::String> InternTable::InternWeak(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700298 return Insert(s, false, false);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700299}
300
Mathieu Chartier9e868092016-10-31 14:58:04 -0700301bool InternTable::ContainsWeak(ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800302 return LookupWeak(Thread::Current(), s) == s;
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700303}
304
Mathieu Chartier97509952015-07-13 14:35:43 -0700305void InternTable::SweepInternTableWeaks(IsMarkedVisitor* visitor) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100306 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartier97509952015-07-13 14:35:43 -0700307 weak_interns_.SweepWeaks(visitor);
Brian Carlstroma663ea52011-08-19 23:33:41 -0700308}
309
Mathieu Chartier9e868092016-10-31 14:58:04 -0700310void InternTable::Table::Remove(ObjPtr<mirror::String> s) {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700311 for (InternalTable& table : tables_) {
312 auto it = table.set_.find(GcRoot<mirror::String>(s));
313 if (it != table.set_.end()) {
314 table.set_.erase(it);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800315 return;
316 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700317 }
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800318 LOG(FATAL) << "Attempting to remove non-interned string " << s->ToModifiedUtf8();
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700319}
320
Mathieu Chartier9e868092016-10-31 14:58:04 -0700321ObjPtr<mirror::String> InternTable::Table::Find(ObjPtr<mirror::String> s) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700322 Locks::intern_table_lock_->AssertHeld(Thread::Current());
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700323 for (InternalTable& table : tables_) {
324 auto it = table.set_.find(GcRoot<mirror::String>(s));
325 if (it != table.set_.end()) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800326 return it->Read();
327 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700328 }
329 return nullptr;
330}
331
Mathieu Chartier9e868092016-10-31 14:58:04 -0700332ObjPtr<mirror::String> InternTable::Table::Find(const Utf8String& string) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000333 Locks::intern_table_lock_->AssertHeld(Thread::Current());
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700334 for (InternalTable& table : tables_) {
335 auto it = table.set_.find(string);
336 if (it != table.set_.end()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000337 return it->Read();
338 }
339 }
340 return nullptr;
341}
342
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800343void InternTable::Table::AddNewTable() {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700344 tables_.push_back(InternalTable());
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700345}
346
Mathieu Chartier9e868092016-10-31 14:58:04 -0700347void InternTable::Table::Insert(ObjPtr<mirror::String> s) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800348 // Always insert the last table, the image tables are before and we avoid inserting into these
349 // to prevent dirty pages.
350 DCHECK(!tables_.empty());
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700351 tables_.back().set_.insert(GcRoot<mirror::String>(s));
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700352}
353
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700354void InternTable::Table::VisitRoots(RootVisitor* visitor) {
Mathieu Chartier4809d0a2015-04-07 10:39:04 -0700355 BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(
356 visitor, RootInfo(kRootInternedString));
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700357 for (InternalTable& table : tables_) {
358 for (auto& intern : table.set_) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800359 buffered_visitor.VisitRoot(intern);
360 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700361 }
362}
363
Mathieu Chartier97509952015-07-13 14:35:43 -0700364void InternTable::Table::SweepWeaks(IsMarkedVisitor* visitor) {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700365 for (InternalTable& table : tables_) {
366 SweepWeaks(&table.set_, visitor);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800367 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700368}
369
Mathieu Chartier97509952015-07-13 14:35:43 -0700370void InternTable::Table::SweepWeaks(UnorderedSet* set, IsMarkedVisitor* visitor) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700371 for (auto it = set->begin(), end = set->end(); it != end;) {
372 // This does not need a read barrier because this is called by GC.
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800373 mirror::Object* object = it->Read<kWithoutReadBarrier>();
Mathieu Chartier97509952015-07-13 14:35:43 -0700374 mirror::Object* new_object = visitor->IsMarked(object);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700375 if (new_object == nullptr) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100376 it = set->erase(it);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700377 } else {
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800378 *it = GcRoot<mirror::String>(new_object->AsString());
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700379 ++it;
380 }
381 }
382}
383
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800384size_t InternTable::Table::Size() const {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800385 return std::accumulate(tables_.begin(),
386 tables_.end(),
Mathieu Chartier205b7622016-01-06 15:47:09 -0800387 0U,
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700388 [](size_t sum, const InternalTable& table) {
389 return sum + table.Size();
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800390 });
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800391}
392
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700393void InternTable::ChangeWeakRootState(gc::WeakRootState new_state) {
394 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
395 ChangeWeakRootStateLocked(new_state);
396}
397
398void InternTable::ChangeWeakRootStateLocked(gc::WeakRootState new_state) {
Hiroshi Yamauchifdbd13c2015-09-02 16:16:58 -0700399 CHECK(!kUseReadBarrier);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700400 weak_root_state_ = new_state;
401 if (new_state != gc::kWeakRootStateNoReadsOrWrites) {
402 weak_intern_condition_.Broadcast(Thread::Current());
403 }
404}
405
Mathieu Chartier32cc9ee2015-10-15 09:19:15 -0700406InternTable::Table::Table() {
407 Runtime* const runtime = Runtime::Current();
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700408 InternalTable initial_table;
409 initial_table.set_.SetLoadFactor(runtime->GetHashTableMinLoadFactor(),
410 runtime->GetHashTableMaxLoadFactor());
411 tables_.push_back(std::move(initial_table));
Mathieu Chartier32cc9ee2015-10-15 09:19:15 -0700412}
413
Brian Carlstrom7e93b502011-08-04 14:16:22 -0700414} // namespace art