blob: 9ac9927cf4737d39f7ebf25368fa2bb0451d8a0a [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Brian Carlstrom7e93b502011-08-04 14:16:22 -070016
17#include "intern_table.h"
18
Ian Rogers700a4022014-05-19 16:49:03 -070019#include <memory>
20
David Sehr0225f8e2018-01-31 08:52:24 +000021#include "dex/utf.h"
Mathieu Chartier97509952015-07-13 14:35:43 -070022#include "gc/collector/garbage_collector.h"
Ian Rogers7dfb28c2013-08-22 08:18:36 -070023#include "gc/space/image_space.h"
Mathieu Chartier14c3bf92015-07-13 14:35:43 -070024#include "gc/weak_root_state.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070025#include "gc_root-inl.h"
Andreas Gampee15b9b12018-10-29 12:54:27 -070026#include "handle_scope-inl.h"
Mathieu Chartier4a26f172016-01-26 14:26:18 -080027#include "image-inl.h"
Vladimir Marko05792b92015-08-03 11:56:49 +010028#include "mirror/dex_cache-inl.h"
Ian Rogers7dfb28c2013-08-22 08:18:36 -070029#include "mirror/object-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070030#include "mirror/object_array-inl.h"
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070031#include "mirror/string-inl.h"
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070032#include "object_callbacks.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070033#include "scoped_thread_state_change-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034#include "thread.h"
Brian Carlstrom7e93b502011-08-04 14:16:22 -070035
36namespace art {
37
Ian Rogers7dfb28c2013-08-22 08:18:36 -070038InternTable::InternTable()
Vladimir Marko1a1de672016-10-13 12:53:15 +010039 : log_new_roots_(false),
Mathieu Chartier14c3bf92015-07-13 14:35:43 -070040 weak_intern_condition_("New intern condition", *Locks::intern_table_lock_),
41 weak_root_state_(gc::kWeakRootStateNormal) {
Mathieu Chartierc11d9b82013-09-19 10:01:59 -070042}
Elliott Hughesde69d7f2011-08-18 16:49:37 -070043
Brian Carlstroma663ea52011-08-19 23:33:41 -070044size_t InternTable::Size() const {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010045 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070046 return strong_interns_.Size() + weak_interns_.Size();
Brian Carlstroma663ea52011-08-19 23:33:41 -070047}
48
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070049size_t InternTable::StrongSize() const {
50 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070051 return strong_interns_.Size();
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070052}
53
54size_t InternTable::WeakSize() const {
55 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartiereb175f72014-10-31 11:49:27 -070056 return weak_interns_.Size();
Hiroshi Yamauchia91a4bc2014-06-13 16:44:55 -070057}
58
Elliott Hughescac6cc72011-11-03 20:31:21 -070059void InternTable::DumpForSigQuit(std::ostream& os) const {
Mathieu Chartiereb175f72014-10-31 11:49:27 -070060 os << "Intern table: " << StrongSize() << " strong; " << WeakSize() << " weak\n";
Elliott Hughescac6cc72011-11-03 20:31:21 -070061}
62
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070063void InternTable::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010064 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartier893263b2014-03-04 11:07:42 -080065 if ((flags & kVisitRootFlagAllRoots) != 0) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070066 strong_interns_.VisitRoots(visitor);
Mathieu Chartier893263b2014-03-04 11:07:42 -080067 } else if ((flags & kVisitRootFlagNewRoots) != 0) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070068 for (auto& root : new_strong_intern_roots_) {
Mathieu Chartier9e868092016-10-31 14:58:04 -070069 ObjPtr<mirror::String> old_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070070 root.VisitRoot(visitor, RootInfo(kRootInternedString));
Mathieu Chartier9e868092016-10-31 14:58:04 -070071 ObjPtr<mirror::String> new_ref = root.Read<kWithoutReadBarrier>();
Mathieu Chartierc2e20622014-11-03 11:41:47 -080072 if (new_ref != old_ref) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -070073 // The GC moved a root in the log. Need to search the strong interns and update the
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070074 // corresponding object. This is slow, but luckily for us, this may only happen with a
75 // concurrent moving GC.
Mathieu Chartiereb175f72014-10-31 11:49:27 -070076 strong_interns_.Remove(old_ref);
77 strong_interns_.Insert(new_ref);
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070078 }
79 }
Mathieu Chartier893263b2014-03-04 11:07:42 -080080 }
Mathieu Chartier893263b2014-03-04 11:07:42 -080081 if ((flags & kVisitRootFlagClearRootLog) != 0) {
82 new_strong_intern_roots_.clear();
83 }
84 if ((flags & kVisitRootFlagStartLoggingNewRoots) != 0) {
85 log_new_roots_ = true;
86 } else if ((flags & kVisitRootFlagStopLoggingNewRoots) != 0) {
87 log_new_roots_ = false;
Ian Rogers1d54e732013-05-02 21:10:01 -070088 }
Mathieu Chartier423d2a32013-09-12 17:33:56 -070089 // Note: we deliberately don't visit the weak_interns_ table and the immutable image roots.
Brian Carlstrom7e93b502011-08-04 14:16:22 -070090}
91
Mathieu Chartier9e868092016-10-31 14:58:04 -070092ObjPtr<mirror::String> InternTable::LookupWeak(Thread* self, ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -080093 MutexLock mu(self, *Locks::intern_table_lock_);
94 return LookupWeakLocked(s);
Mathieu Chartierf7fd9702015-11-09 11:16:49 -080095}
96
Mathieu Chartier9e868092016-10-31 14:58:04 -070097ObjPtr<mirror::String> InternTable::LookupStrong(Thread* self, ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -080098 MutexLock mu(self, *Locks::intern_table_lock_);
99 return LookupStrongLocked(s);
100}
101
Mathieu Chartier9e868092016-10-31 14:58:04 -0700102ObjPtr<mirror::String> InternTable::LookupStrong(Thread* self,
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000103 uint32_t utf16_length,
104 const char* utf8_data) {
105 DCHECK_EQ(utf16_length, CountModifiedUtf8Chars(utf8_data));
106 Utf8String string(utf16_length,
107 utf8_data,
108 ComputeUtf16HashFromModifiedUtf8(utf8_data, utf16_length));
109 MutexLock mu(self, *Locks::intern_table_lock_);
110 return strong_interns_.Find(string);
111}
112
Mathieu Chartier9e868092016-10-31 14:58:04 -0700113ObjPtr<mirror::String> InternTable::LookupWeakLocked(ObjPtr<mirror::String> s) {
Nicolas Geoffray1bc977c2016-01-23 14:15:49 +0000114 return weak_interns_.Find(s);
115}
116
Mathieu Chartier9e868092016-10-31 14:58:04 -0700117ObjPtr<mirror::String> InternTable::LookupStrongLocked(ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800118 return strong_interns_.Find(s);
119}
120
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800121void InternTable::AddNewTable() {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700122 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800123 weak_interns_.AddNewTable();
124 strong_interns_.AddNewTable();
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700125}
126
Mathieu Chartier9e868092016-10-31 14:58:04 -0700127ObjPtr<mirror::String> InternTable::InsertStrong(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100128 Runtime* runtime = Runtime::Current();
129 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700130 runtime->RecordStrongStringInsertion(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100131 }
Mathieu Chartier893263b2014-03-04 11:07:42 -0800132 if (log_new_roots_) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700133 new_strong_intern_roots_.push_back(GcRoot<mirror::String>(s));
Mathieu Chartier893263b2014-03-04 11:07:42 -0800134 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700135 strong_interns_.Insert(s);
Mathieu Chartier893263b2014-03-04 11:07:42 -0800136 return s;
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100137}
138
Mathieu Chartier9e868092016-10-31 14:58:04 -0700139ObjPtr<mirror::String> InternTable::InsertWeak(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100140 Runtime* runtime = Runtime::Current();
141 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700142 runtime->RecordWeakStringInsertion(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100143 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700144 weak_interns_.Insert(s);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700145 return s;
146}
147
Mathieu Chartier9e868092016-10-31 14:58:04 -0700148void InternTable::RemoveStrong(ObjPtr<mirror::String> s) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700149 strong_interns_.Remove(s);
Hiroshi Yamauchi1bd48722014-05-23 19:58:15 -0700150}
151
Mathieu Chartier9e868092016-10-31 14:58:04 -0700152void InternTable::RemoveWeak(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100153 Runtime* runtime = Runtime::Current();
154 if (runtime->IsActiveTransaction()) {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700155 runtime->RecordWeakStringRemoval(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100156 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700157 weak_interns_.Remove(s);
Brian Carlstrom7e93b502011-08-04 14:16:22 -0700158}
159
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100160// Insert/remove methods used to undo changes made during an aborted transaction.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700161ObjPtr<mirror::String> InternTable::InsertStrongFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100162 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700163 return InsertStrong(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100164}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700165
166ObjPtr<mirror::String> InternTable::InsertWeakFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100167 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700168 return InsertWeak(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100169}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700170
171void InternTable::RemoveStrongFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100172 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700173 RemoveStrong(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100174}
Mathieu Chartier9e868092016-10-31 14:58:04 -0700175
176void InternTable::RemoveWeakFromTransaction(ObjPtr<mirror::String> s) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100177 DCHECK(!Runtime::Current()->IsActiveTransaction());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700178 RemoveWeak(s);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100179}
180
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700181void InternTable::BroadcastForNewInterns() {
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700182 Thread* self = Thread::Current();
183 MutexLock mu(self, *Locks::intern_table_lock_);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700184 weak_intern_condition_.Broadcast(self);
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700185}
186
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700187void InternTable::WaitUntilAccessible(Thread* self) {
188 Locks::intern_table_lock_->ExclusiveUnlock(self);
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700189 {
190 ScopedThreadSuspension sts(self, kWaitingWeakGcRootRead);
191 MutexLock mu(self, *Locks::intern_table_lock_);
Hiroshi Yamauchi9e6f0972016-11-03 13:03:20 -0700192 while ((!kUseReadBarrier && weak_root_state_ == gc::kWeakRootStateNoReadsOrWrites) ||
193 (kUseReadBarrier && !self->GetWeakRefAccessEnabled())) {
Mathieu Chartierf1d666e2015-09-03 16:13:34 -0700194 weak_intern_condition_.Wait(self);
195 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700196 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700197 Locks::intern_table_lock_->ExclusiveLock(self);
198}
199
Mathieu Chartier9e868092016-10-31 14:58:04 -0700200ObjPtr<mirror::String> InternTable::Insert(ObjPtr<mirror::String> s,
201 bool is_strong,
202 bool holding_locks) {
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800203 if (s == nullptr) {
204 return nullptr;
205 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700206 Thread* const self = Thread::Current();
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100207 MutexLock mu(self, *Locks::intern_table_lock_);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700208 if (kDebugLocking && !holding_locks) {
209 Locks::mutator_lock_->AssertSharedHeld(self);
210 CHECK_EQ(2u, self->NumberOfHeldMutexes()) << "may only safely hold the mutator lock";
Mathieu Chartierc11d9b82013-09-19 10:01:59 -0700211 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700212 while (true) {
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700213 if (holding_locks) {
214 if (!kUseReadBarrier) {
215 CHECK_EQ(weak_root_state_, gc::kWeakRootStateNormal);
216 } else {
217 CHECK(self->GetWeakRefAccessEnabled());
218 }
219 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700220 // Check the strong table for a match.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700221 ObjPtr<mirror::String> strong = LookupStrongLocked(s);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700222 if (strong != nullptr) {
223 return strong;
224 }
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700225 if ((!kUseReadBarrier && weak_root_state_ != gc::kWeakRootStateNoReadsOrWrites) ||
226 (kUseReadBarrier && self->GetWeakRefAccessEnabled())) {
227 break;
228 }
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700229 // weak_root_state_ is set to gc::kWeakRootStateNoReadsOrWrites in the GC pause but is only
230 // cleared after SweepSystemWeaks has completed. This is why we need to wait until it is
231 // cleared.
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700232 CHECK(!holding_locks);
233 StackHandleScope<1> hs(self);
234 auto h = hs.NewHandleWrapper(&s);
235 WaitUntilAccessible(self);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700236 }
Mathieu Chartier90ef3db2015-08-04 15:19:41 -0700237 if (!kUseReadBarrier) {
238 CHECK_EQ(weak_root_state_, gc::kWeakRootStateNormal);
239 } else {
240 CHECK(self->GetWeakRefAccessEnabled());
241 }
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800242 // There is no match in the strong table, check the weak table.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700243 ObjPtr<mirror::String> weak = LookupWeakLocked(s);
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800244 if (weak != nullptr) {
245 if (is_strong) {
246 // A match was found in the weak table. Promote to the strong table.
247 RemoveWeak(weak);
248 return InsertStrong(weak);
249 }
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700250 return weak;
251 }
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800252 // No match in the strong table or the weak table. Insert into the strong / weak table.
253 return is_strong ? InsertStrong(s) : InsertWeak(s);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700254}
255
Mathieu Chartier9e868092016-10-31 14:58:04 -0700256ObjPtr<mirror::String> InternTable::InternStrong(int32_t utf16_length, const char* utf8_data) {
Mathieu Chartiered0fc1d2014-03-21 14:09:35 -0700257 DCHECK(utf8_data != nullptr);
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100258 Thread* self = Thread::Current();
259 // Try to avoid allocation.
Mathieu Chartier9e868092016-10-31 14:58:04 -0700260 ObjPtr<mirror::String> s = LookupStrong(self, utf16_length, utf8_data);
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100261 if (s != nullptr) {
262 return s;
263 }
Ian Rogers7dfb28c2013-08-22 08:18:36 -0700264 return InternStrong(mirror::String::AllocFromModifiedUtf8(
Vladimir Markod2bdb9b2016-07-08 17:23:22 +0100265 self, utf16_length, utf8_data));
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700266}
267
Mathieu Chartier9e868092016-10-31 14:58:04 -0700268ObjPtr<mirror::String> InternTable::InternStrong(const char* utf8_data) {
Mathieu Chartiered0fc1d2014-03-21 14:09:35 -0700269 DCHECK(utf8_data != nullptr);
270 return InternStrong(mirror::String::AllocFromModifiedUtf8(Thread::Current(), utf8_data));
Brian Carlstromc74255f2011-09-11 22:47:39 -0700271}
272
Mathieu Chartier9e868092016-10-31 14:58:04 -0700273ObjPtr<mirror::String> InternTable::InternStrongImageString(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700274 // May be holding the heap bitmap lock.
275 return Insert(s, true, true);
276}
277
Mathieu Chartier9e868092016-10-31 14:58:04 -0700278ObjPtr<mirror::String> InternTable::InternStrong(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700279 return Insert(s, true, false);
Brian Carlstromc74255f2011-09-11 22:47:39 -0700280}
281
Mathieu Chartier9e868092016-10-31 14:58:04 -0700282ObjPtr<mirror::String> InternTable::InternWeak(ObjPtr<mirror::String> s) {
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700283 return Insert(s, false, false);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700284}
285
Mathieu Chartier9e868092016-10-31 14:58:04 -0700286bool InternTable::ContainsWeak(ObjPtr<mirror::String> s) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800287 return LookupWeak(Thread::Current(), s) == s;
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700288}
289
Mathieu Chartier97509952015-07-13 14:35:43 -0700290void InternTable::SweepInternTableWeaks(IsMarkedVisitor* visitor) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100291 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartier97509952015-07-13 14:35:43 -0700292 weak_interns_.SweepWeaks(visitor);
Brian Carlstroma663ea52011-08-19 23:33:41 -0700293}
294
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700295size_t InternTable::WriteToMemory(uint8_t* ptr) {
296 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800297 return strong_interns_.WriteToMemory(ptr);
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700298}
299
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800300std::size_t InternTable::StringHashEquals::operator()(const GcRoot<mirror::String>& root) const {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700301 if (kIsDebugBuild) {
302 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
303 }
Alexey Grebenkin21f23642016-12-02 17:44:54 +0300304 // An additional cast to prevent undesired sign extension.
305 return static_cast<size_t>(
306 static_cast<uint32_t>(root.Read<kWithoutReadBarrier>()->GetHashCode()));
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700307}
308
309bool InternTable::StringHashEquals::operator()(const GcRoot<mirror::String>& a,
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800310 const GcRoot<mirror::String>& b) const {
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700311 if (kIsDebugBuild) {
312 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
313 }
Mathieu Chartier9e868092016-10-31 14:58:04 -0700314 return a.Read<kWithoutReadBarrier>()->Equals(b.Read<kWithoutReadBarrier>());
Mathieu Chartiercdfd39f2014-08-29 18:16:58 -0700315}
316
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000317bool InternTable::StringHashEquals::operator()(const GcRoot<mirror::String>& a,
318 const Utf8String& b) const {
319 if (kIsDebugBuild) {
320 Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
321 }
Mathieu Chartier9e868092016-10-31 14:58:04 -0700322 ObjPtr<mirror::String> a_string = a.Read<kWithoutReadBarrier>();
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000323 uint32_t a_length = static_cast<uint32_t>(a_string->GetLength());
324 if (a_length != b.GetUtf16Length()) {
325 return false;
326 }
jessicahandojo3aaa37b2016-07-29 14:46:37 -0700327 if (a_string->IsCompressed()) {
328 size_t b_byte_count = strlen(b.GetUtf8Data());
329 size_t b_utf8_length = CountModifiedUtf8Chars(b.GetUtf8Data(), b_byte_count);
330 // Modified UTF-8 single byte character range is 0x01 .. 0x7f
331 // The string compression occurs on regular ASCII with same exact range,
332 // not on extended ASCII which up to 0xff
333 const bool is_b_regular_ascii = (b_byte_count == b_utf8_length);
334 if (is_b_regular_ascii) {
335 return memcmp(b.GetUtf8Data(),
336 a_string->GetValueCompressed(), a_length * sizeof(uint8_t)) == 0;
337 } else {
338 return false;
339 }
340 } else {
341 const uint16_t* a_value = a_string->GetValue();
342 return CompareModifiedUtf8ToUtf16AsCodePointValues(b.GetUtf8Data(), a_value, a_length) == 0;
343 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000344}
345
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800346size_t InternTable::Table::WriteToMemory(uint8_t* ptr) {
347 if (tables_.empty()) {
348 return 0;
349 }
350 UnorderedSet* table_to_write;
351 UnorderedSet combined;
352 if (tables_.size() > 1) {
353 table_to_write = &combined;
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700354 for (InternalTable& table : tables_) {
355 for (GcRoot<mirror::String>& string : table.set_) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100356 combined.insert(string);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800357 }
358 }
359 } else {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700360 table_to_write = &tables_.back().set_;
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800361 }
362 return table_to_write->WriteToMemory(ptr);
Mathieu Chartierd39645e2015-06-09 17:50:29 -0700363}
364
Mathieu Chartier9e868092016-10-31 14:58:04 -0700365void InternTable::Table::Remove(ObjPtr<mirror::String> s) {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700366 for (InternalTable& table : tables_) {
367 auto it = table.set_.find(GcRoot<mirror::String>(s));
368 if (it != table.set_.end()) {
369 table.set_.erase(it);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800370 return;
371 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700372 }
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800373 LOG(FATAL) << "Attempting to remove non-interned string " << s->ToModifiedUtf8();
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700374}
375
Mathieu Chartier9e868092016-10-31 14:58:04 -0700376ObjPtr<mirror::String> InternTable::Table::Find(ObjPtr<mirror::String> s) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700377 Locks::intern_table_lock_->AssertHeld(Thread::Current());
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700378 for (InternalTable& table : tables_) {
379 auto it = table.set_.find(GcRoot<mirror::String>(s));
380 if (it != table.set_.end()) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800381 return it->Read();
382 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700383 }
384 return nullptr;
385}
386
Mathieu Chartier9e868092016-10-31 14:58:04 -0700387ObjPtr<mirror::String> InternTable::Table::Find(const Utf8String& string) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000388 Locks::intern_table_lock_->AssertHeld(Thread::Current());
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700389 for (InternalTable& table : tables_) {
390 auto it = table.set_.find(string);
391 if (it != table.set_.end()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000392 return it->Read();
393 }
394 }
395 return nullptr;
396}
397
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800398void InternTable::Table::AddNewTable() {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700399 tables_.push_back(InternalTable());
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700400}
401
Mathieu Chartier9e868092016-10-31 14:58:04 -0700402void InternTable::Table::Insert(ObjPtr<mirror::String> s) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800403 // Always insert the last table, the image tables are before and we avoid inserting into these
404 // to prevent dirty pages.
405 DCHECK(!tables_.empty());
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700406 tables_.back().set_.insert(GcRoot<mirror::String>(s));
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700407}
408
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700409void InternTable::Table::VisitRoots(RootVisitor* visitor) {
Mathieu Chartier4809d0a2015-04-07 10:39:04 -0700410 BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(
411 visitor, RootInfo(kRootInternedString));
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700412 for (InternalTable& table : tables_) {
413 for (auto& intern : table.set_) {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800414 buffered_visitor.VisitRoot(intern);
415 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700416 }
417}
418
Mathieu Chartier97509952015-07-13 14:35:43 -0700419void InternTable::Table::SweepWeaks(IsMarkedVisitor* visitor) {
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700420 for (InternalTable& table : tables_) {
421 SweepWeaks(&table.set_, visitor);
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800422 }
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700423}
424
Mathieu Chartier97509952015-07-13 14:35:43 -0700425void InternTable::Table::SweepWeaks(UnorderedSet* set, IsMarkedVisitor* visitor) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700426 for (auto it = set->begin(), end = set->end(); it != end;) {
427 // This does not need a read barrier because this is called by GC.
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800428 mirror::Object* object = it->Read<kWithoutReadBarrier>();
Mathieu Chartier97509952015-07-13 14:35:43 -0700429 mirror::Object* new_object = visitor->IsMarked(object);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700430 if (new_object == nullptr) {
Vladimir Marko54159c62018-06-20 14:30:08 +0100431 it = set->erase(it);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700432 } else {
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800433 *it = GcRoot<mirror::String>(new_object->AsString());
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700434 ++it;
435 }
436 }
437}
438
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800439size_t InternTable::Table::Size() const {
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800440 return std::accumulate(tables_.begin(),
441 tables_.end(),
Mathieu Chartier205b7622016-01-06 15:47:09 -0800442 0U,
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700443 [](size_t sum, const InternalTable& table) {
444 return sum + table.Size();
Mathieu Chartierea0831f2015-12-29 13:17:37 -0800445 });
Mathieu Chartierc2e20622014-11-03 11:41:47 -0800446}
447
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700448void InternTable::ChangeWeakRootState(gc::WeakRootState new_state) {
449 MutexLock mu(Thread::Current(), *Locks::intern_table_lock_);
450 ChangeWeakRootStateLocked(new_state);
451}
452
453void InternTable::ChangeWeakRootStateLocked(gc::WeakRootState new_state) {
Hiroshi Yamauchifdbd13c2015-09-02 16:16:58 -0700454 CHECK(!kUseReadBarrier);
Mathieu Chartier14c3bf92015-07-13 14:35:43 -0700455 weak_root_state_ = new_state;
456 if (new_state != gc::kWeakRootStateNoReadsOrWrites) {
457 weak_intern_condition_.Broadcast(Thread::Current());
458 }
459}
460
Mathieu Chartier32cc9ee2015-10-15 09:19:15 -0700461InternTable::Table::Table() {
462 Runtime* const runtime = Runtime::Current();
Mathieu Chartier8cc418e2018-10-31 10:54:30 -0700463 InternalTable initial_table;
464 initial_table.set_.SetLoadFactor(runtime->GetHashTableMinLoadFactor(),
465 runtime->GetHashTableMaxLoadFactor());
466 tables_.push_back(std::move(initial_table));
Mathieu Chartier32cc9ee2015-10-15 09:19:15 -0700467}
468
Brian Carlstrom7e93b502011-08-04 14:16:22 -0700469} // namespace art