blob: 6efb7474e0d03eadb6717daa91ad75f2973d0106 [file] [log] [blame]
Ian Rogers39ebcb82013-05-30 16:57:23 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
18#define ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
Ian Rogers39ebcb82013-05-30 16:57:23 -070019
20#include "dex_cache.h"
21
Andreas Gampe57943812017-12-06 21:39:13 -080022#include <android-base/logging.h>
23
Andreas Gampea1d2f952017-04-20 22:53:58 -070024#include "art_field.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080025#include "art_method.h"
Vladimir Marko05792b92015-08-03 11:56:49 +010026#include "base/casts.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070027#include "base/enums.h"
Andreas Gampea1d2f952017-04-20 22:53:58 -070028#include "class_linker.h"
David Sehr9e734c72018-01-04 17:56:19 -080029#include "dex/dex_file.h"
Andreas Gampea1d2f952017-04-20 22:53:58 -070030#include "gc/heap-inl.h"
Vladimir Marko317892b2018-05-31 11:11:32 +010031#include "gc_root-inl.h"
Orion Hodsonc069a302017-01-18 09:23:12 +000032#include "mirror/call_site.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070033#include "mirror/class.h"
Narayan Kamath25352fc2016-08-03 12:46:58 +010034#include "mirror/method_type.h"
Mathieu Chartier31e88222016-10-14 18:43:19 -070035#include "obj_ptr.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070036#include "runtime.h"
Mathieu Chartier88ea61e2018-06-20 17:45:41 -070037#include "write_barrier-inl.h"
Mathieu Chartierbc56fc32014-06-03 15:37:03 -070038
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070039#include <atomic>
40
Ian Rogers39ebcb82013-05-30 16:57:23 -070041namespace art {
42namespace mirror {
43
Vladimir Markof44d36c2017-03-14 14:18:46 +000044template <typename T>
45inline void NativeDexCachePair<T>::Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache,
46 PointerSize pointer_size) {
47 NativeDexCachePair<T> first_elem;
48 first_elem.object = nullptr;
49 first_elem.index = InvalidIndexForSlot(0);
50 DexCache::SetNativePairPtrSize(dex_cache, 0, first_elem, pointer_size);
51}
52
Andreas Gampe542451c2016-07-26 09:02:02 -070053inline uint32_t DexCache::ClassSize(PointerSize pointer_size) {
Narayan Kamath6b2dc312017-03-14 13:26:12 +000054 const uint32_t vtable_entries = Object::kVTableLength;
Mathieu Chartiere401d142015-04-22 13:56:20 -070055 return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
Ian Rogers39ebcb82013-05-30 16:57:23 -070056}
57
Vladimir Marko8d6768d2017-03-14 10:13:21 +000058inline uint32_t DexCache::StringSlotIndex(dex::StringIndex string_idx) {
Andreas Gampe8a0128a2016-11-28 07:38:35 -080059 DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds());
Vladimir Marko8d6768d2017-03-14 10:13:21 +000060 const uint32_t slot_idx = string_idx.index_ % kDexCacheStringCacheSize;
61 DCHECK_LT(slot_idx, NumStrings());
62 return slot_idx;
Andreas Gampeaa910d52014-07-30 18:59:05 -070063}
64
Vladimir Marko8d6768d2017-03-14 10:13:21 +000065inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) {
66 return GetStrings()[StringSlotIndex(string_idx)].load(
67 std::memory_order_relaxed).GetObjectForIndex(string_idx.index_);
68}
69
70inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) {
71 DCHECK(resolved != nullptr);
72 GetStrings()[StringSlotIndex(string_idx)].store(
73 StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed);
Mathieu Chartierbb816d62016-09-07 10:17:46 -070074 Runtime* const runtime = Runtime::Current();
75 if (UNLIKELY(runtime->IsActiveTransaction())) {
76 DCHECK(runtime->IsAotCompiler());
77 runtime->RecordResolveString(this, string_idx);
78 }
Vladimir Marko05792b92015-08-03 11:56:49 +010079 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -070080 WriteBarrier::ForEveryFieldWrite(this);
Mathieu Chartierbb816d62016-09-07 10:17:46 -070081}
82
Andreas Gampe8a0128a2016-11-28 07:38:35 -080083inline void DexCache::ClearString(dex::StringIndex string_idx) {
Mathieu Chartierbb816d62016-09-07 10:17:46 -070084 DCHECK(Runtime::Current()->IsAotCompiler());
Vladimir Marko8d6768d2017-03-14 10:13:21 +000085 uint32_t slot_idx = StringSlotIndex(string_idx);
Mathieu Chartierbb816d62016-09-07 10:17:46 -070086 StringDexCacheType* slot = &GetStrings()[slot_idx];
87 // This is racy but should only be called from the transactional interpreter.
Andreas Gampe8a0128a2016-11-28 07:38:35 -080088 if (slot->load(std::memory_order_relaxed).index == string_idx.index_) {
Vladimir Marko8d6768d2017-03-14 10:13:21 +000089 StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx));
Mathieu Chartierbb816d62016-09-07 10:17:46 -070090 slot->store(cleared, std::memory_order_relaxed);
91 }
Vladimir Marko05792b92015-08-03 11:56:49 +010092}
93
Vladimir Marko8d6768d2017-03-14 10:13:21 +000094inline uint32_t DexCache::TypeSlotIndex(dex::TypeIndex type_idx) {
95 DCHECK_LT(type_idx.index_, GetDexFile()->NumTypeIds());
96 const uint32_t slot_idx = type_idx.index_ % kDexCacheTypeCacheSize;
97 DCHECK_LT(slot_idx, NumResolvedTypes());
98 return slot_idx;
99}
100
Andreas Gampea5b09a62016-11-17 15:21:22 -0800101inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) {
Mathieu Chartier5df32d32016-12-06 16:02:27 -0800102 // It is theorized that a load acquire is not required since obtaining the resolved class will
Mathieu Chartierdb70ce52016-12-12 11:06:59 -0800103 // always have an address dependency or a lock.
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000104 return GetResolvedTypes()[TypeSlotIndex(type_idx)].load(
105 std::memory_order_relaxed).GetObjectForIndex(type_idx.index_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100106}
107
Andreas Gampea5b09a62016-11-17 15:21:22 -0800108inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) {
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000109 DCHECK(resolved != nullptr);
Vladimir Marko05792b92015-08-03 11:56:49 +0100110 // TODO default transaction support.
Mathieu Chartier5df32d32016-12-06 16:02:27 -0800111 // Use a release store for SetResolvedType. This is done to prevent other threads from seeing a
112 // class but not necessarily seeing the loaded members like the static fields array.
113 // See b/32075261.
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000114 GetResolvedTypes()[TypeSlotIndex(type_idx)].store(
115 TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release);
Vladimir Marko05792b92015-08-03 11:56:49 +0100116 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -0700117 WriteBarrier::ForEveryFieldWrite(this);
Vladimir Marko05792b92015-08-03 11:56:49 +0100118}
119
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000120inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) {
121 DCHECK(Runtime::Current()->IsAotCompiler());
122 uint32_t slot_idx = TypeSlotIndex(type_idx);
123 TypeDexCacheType* slot = &GetResolvedTypes()[slot_idx];
124 // This is racy but should only be called from the single-threaded ImageWriter and tests.
125 if (slot->load(std::memory_order_relaxed).index == type_idx.index_) {
126 TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx));
127 slot->store(cleared, std::memory_order_relaxed);
128 }
129}
130
Orion Hodson06d10a72018-05-14 08:53:38 +0100131inline uint32_t DexCache::MethodTypeSlotIndex(dex::ProtoIndex proto_idx) {
Narayan Kamath25352fc2016-08-03 12:46:58 +0100132 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
Orion Hodson06d10a72018-05-14 08:53:38 +0100133 DCHECK_LT(proto_idx.index_, GetDexFile()->NumProtoIds());
134 const uint32_t slot_idx = proto_idx.index_ % kDexCacheMethodTypeCacheSize;
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000135 DCHECK_LT(slot_idx, NumResolvedMethodTypes());
136 return slot_idx;
137}
138
Orion Hodson06d10a72018-05-14 08:53:38 +0100139inline MethodType* DexCache::GetResolvedMethodType(dex::ProtoIndex proto_idx) {
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000140 return GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].load(
Orion Hodson06d10a72018-05-14 08:53:38 +0100141 std::memory_order_relaxed).GetObjectForIndex(proto_idx.index_);
Narayan Kamath25352fc2016-08-03 12:46:58 +0100142}
143
Orion Hodson06d10a72018-05-14 08:53:38 +0100144inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved) {
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000145 DCHECK(resolved != nullptr);
146 GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].store(
Orion Hodson06d10a72018-05-14 08:53:38 +0100147 MethodTypeDexCachePair(resolved, proto_idx.index_), std::memory_order_relaxed);
Narayan Kamath25352fc2016-08-03 12:46:58 +0100148 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -0700149 WriteBarrier::ForEveryFieldWrite(this);
Narayan Kamath25352fc2016-08-03 12:46:58 +0100150}
151
Orion Hodsonc069a302017-01-18 09:23:12 +0000152inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) {
153 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
154 DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
155 GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx];
156 Atomic<GcRoot<mirror::CallSite>>& ref =
157 reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target);
Orion Hodson88591fe2018-03-06 13:35:43 +0000158 return ref.load(std::memory_order_seq_cst).Read();
Orion Hodsonc069a302017-01-18 09:23:12 +0000159}
160
Orion Hodson4c8e12e2018-05-18 08:33:20 +0100161inline ObjPtr<CallSite> DexCache::SetResolvedCallSite(uint32_t call_site_idx,
162 ObjPtr<CallSite> call_site) {
Orion Hodsonc069a302017-01-18 09:23:12 +0000163 DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
164 DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
165
166 GcRoot<mirror::CallSite> null_call_site(nullptr);
167 GcRoot<mirror::CallSite> candidate(call_site);
168 GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx];
169
170 // The first assignment for a given call site wins.
171 Atomic<GcRoot<mirror::CallSite>>& ref =
172 reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target);
Orion Hodson4557b382018-01-03 11:47:54 +0000173 if (ref.CompareAndSetStrongSequentiallyConsistent(null_call_site, candidate)) {
Orion Hodsonc069a302017-01-18 09:23:12 +0000174 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
Mathieu Chartier88ea61e2018-06-20 17:45:41 -0700175 WriteBarrier::ForEveryFieldWrite(this);
Orion Hodsonc069a302017-01-18 09:23:12 +0000176 return call_site;
177 } else {
178 return target.Read();
179 }
180}
181
Vladimir Markof44d36c2017-03-14 14:18:46 +0000182inline uint32_t DexCache::FieldSlotIndex(uint32_t field_idx) {
183 DCHECK_LT(field_idx, GetDexFile()->NumFieldIds());
184 const uint32_t slot_idx = field_idx % kDexCacheFieldCacheSize;
185 DCHECK_LT(slot_idx, NumResolvedFields());
186 return slot_idx;
187}
188
Andreas Gampe542451c2016-07-26 09:02:02 -0700189inline ArtField* DexCache::GetResolvedField(uint32_t field_idx, PointerSize ptr_size) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700190 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000191 auto pair = GetNativePairPtrSize(GetResolvedFields(), FieldSlotIndex(field_idx), ptr_size);
192 return pair.GetObjectForIndex(field_idx);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700193}
194
Andreas Gampe542451c2016-07-26 09:02:02 -0700195inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field, PointerSize ptr_size) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700196 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000197 DCHECK(field != nullptr);
198 FieldDexCachePair pair(field, field_idx);
199 SetNativePairPtrSize(GetResolvedFields(), FieldSlotIndex(field_idx), pair, ptr_size);
200}
201
202inline void DexCache::ClearResolvedField(uint32_t field_idx, PointerSize ptr_size) {
203 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
204 uint32_t slot_idx = FieldSlotIndex(field_idx);
205 auto* resolved_fields = GetResolvedFields();
206 // This is racy but should only be called from the single-threaded ImageWriter.
207 DCHECK(Runtime::Current()->IsAotCompiler());
208 if (GetNativePairPtrSize(resolved_fields, slot_idx, ptr_size).index == field_idx) {
209 FieldDexCachePair cleared(nullptr, FieldDexCachePair::InvalidIndexForSlot(slot_idx));
210 SetNativePairPtrSize(resolved_fields, slot_idx, cleared, ptr_size);
211 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700212}
213
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100214inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) {
215 DCHECK_LT(method_idx, GetDexFile()->NumMethodIds());
216 const uint32_t slot_idx = method_idx % kDexCacheMethodCacheSize;
217 DCHECK_LT(slot_idx, NumResolvedMethods());
218 return slot_idx;
219}
220
Andreas Gampe542451c2016-07-26 09:02:02 -0700221inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700222 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100223 auto pair = GetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), ptr_size);
224 return pair.GetObjectForIndex(method_idx);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700225}
226
Andreas Gampe542451c2016-07-26 09:02:02 -0700227inline void DexCache::SetResolvedMethod(uint32_t method_idx,
228 ArtMethod* method,
229 PointerSize ptr_size) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700230 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100231 DCHECK(method != nullptr);
232 MethodDexCachePair pair(method, method_idx);
233 SetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), pair, ptr_size);
234}
235
236inline void DexCache::ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size) {
237 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
238 uint32_t slot_idx = MethodSlotIndex(method_idx);
239 auto* resolved_methods = GetResolvedMethods();
240 // This is racy but should only be called from the single-threaded ImageWriter.
241 DCHECK(Runtime::Current()->IsAotCompiler());
242 if (GetNativePairPtrSize(resolved_methods, slot_idx, ptr_size).index == method_idx) {
243 MethodDexCachePair cleared(nullptr, MethodDexCachePair::InvalidIndexForSlot(slot_idx));
244 SetNativePairPtrSize(resolved_methods, slot_idx, cleared, ptr_size);
245 }
Vladimir Marko05792b92015-08-03 11:56:49 +0100246}
247
Vladimir Markof44d36c2017-03-14 14:18:46 +0000248template <typename T>
249NativeDexCachePair<T> DexCache::GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
250 size_t idx,
251 PointerSize ptr_size) {
252 if (ptr_size == PointerSize::k64) {
253 auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array);
254 ConversionPair64 value = AtomicLoadRelaxed16B(&array[idx]);
255 return NativeDexCachePair<T>(reinterpret_cast64<T*>(value.first),
256 dchecked_integral_cast<size_t>(value.second));
257 } else {
258 auto* array = reinterpret_cast<std::atomic<ConversionPair32>*>(pair_array);
259 ConversionPair32 value = array[idx].load(std::memory_order_relaxed);
Vladimir Markoca8de0a2018-07-04 11:56:08 +0100260 return NativeDexCachePair<T>(reinterpret_cast32<T*>(value.first), value.second);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000261 }
262}
263
264template <typename T>
265void DexCache::SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
266 size_t idx,
267 NativeDexCachePair<T> pair,
268 PointerSize ptr_size) {
269 if (ptr_size == PointerSize::k64) {
270 auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array);
271 ConversionPair64 v(reinterpret_cast64<uint64_t>(pair.object), pair.index);
272 AtomicStoreRelease16B(&array[idx], v);
273 } else {
274 auto* array = reinterpret_cast<std::atomic<ConversionPair32>*>(pair_array);
Vladimir Markoca8de0a2018-07-04 11:56:08 +0100275 ConversionPair32 v(reinterpret_cast32<uint32_t>(pair.object),
276 dchecked_integral_cast<uint32_t>(pair.index));
Vladimir Markof44d36c2017-03-14 14:18:46 +0000277 array[idx].store(v, std::memory_order_release);
278 }
279}
280
Narayan Kamathd08e39b2016-10-19 14:16:35 +0100281template <typename T,
282 ReadBarrierOption kReadBarrierOption,
283 typename Visitor>
284inline void VisitDexCachePairs(std::atomic<DexCachePair<T>>* pairs,
285 size_t num_pairs,
286 const Visitor& visitor)
287 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
288 for (size_t i = 0; i < num_pairs; ++i) {
289 DexCachePair<T> source = pairs[i].load(std::memory_order_relaxed);
290 // NOTE: We need the "template" keyword here to avoid a compilation
291 // failure. GcRoot<T> is a template argument-dependent type and we need to
292 // tell the compiler to treat "Read" as a template rather than a field or
293 // function. Otherwise, on encountering the "<" token, the compiler would
294 // treat "Read" as a field.
Mathieu Chartier6b4c2872016-11-01 14:45:26 -0700295 T* const before = source.object.template Read<kReadBarrierOption>();
296 visitor.VisitRootIfNonNull(source.object.AddressWithoutBarrier());
297 if (source.object.template Read<kReadBarrierOption>() != before) {
Narayan Kamathd08e39b2016-10-19 14:16:35 +0100298 pairs[i].store(source, std::memory_order_relaxed);
299 }
300 }
301}
302
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800303template <bool kVisitNativeRoots,
304 VerifyObjectFlags kVerifyFlags,
305 ReadBarrierOption kReadBarrierOption,
306 typename Visitor>
Mathieu Chartier31e88222016-10-14 18:43:19 -0700307inline void DexCache::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100308 // Visit instance fields first.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800309 VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
Vladimir Marko05792b92015-08-03 11:56:49 +0100310 // Visit arrays after.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800311 if (kVisitNativeRoots) {
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000312 VisitDexCachePairs<String, kReadBarrierOption, Visitor>(
Vladimir Marko98db89c2018-09-07 11:45:46 +0100313 GetStrings<kVerifyFlags>(), NumStrings<kVerifyFlags>(), visitor);
Narayan Kamathd08e39b2016-10-19 14:16:35 +0100314
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000315 VisitDexCachePairs<Class, kReadBarrierOption, Visitor>(
Vladimir Marko98db89c2018-09-07 11:45:46 +0100316 GetResolvedTypes<kVerifyFlags>(), NumResolvedTypes<kVerifyFlags>(), visitor);
Narayan Kamathd08e39b2016-10-19 14:16:35 +0100317
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000318 VisitDexCachePairs<MethodType, kReadBarrierOption, Visitor>(
Vladimir Marko98db89c2018-09-07 11:45:46 +0100319 GetResolvedMethodTypes<kVerifyFlags>(), NumResolvedMethodTypes<kVerifyFlags>(), visitor);
Orion Hodsonc069a302017-01-18 09:23:12 +0000320
Vladimir Marko98db89c2018-09-07 11:45:46 +0100321 GcRoot<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites<kVerifyFlags>();
322 size_t num_call_sites = NumResolvedCallSites<kVerifyFlags>();
323 for (size_t i = 0; i != num_call_sites; ++i) {
Orion Hodsonc069a302017-01-18 09:23:12 +0000324 visitor.VisitRootIfNonNull(resolved_call_sites[i].AddressWithoutBarrier());
325 }
Vladimir Marko05792b92015-08-03 11:56:49 +0100326 }
Mathieu Chartierc7853442015-03-27 14:35:38 -0700327}
328
Mathieu Chartier60bc39c2016-01-27 18:37:48 -0800329template <ReadBarrierOption kReadBarrierOption, typename Visitor>
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000330inline void DexCache::FixupStrings(StringDexCacheType* dest, const Visitor& visitor) {
331 StringDexCacheType* src = GetStrings();
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800332 for (size_t i = 0, count = NumStrings(); i < count; ++i) {
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700333 StringDexCachePair source = src[i].load(std::memory_order_relaxed);
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000334 String* ptr = source.object.Read<kReadBarrierOption>();
335 String* new_source = visitor(ptr);
Narayan Kamathc38a6f82016-09-29 17:07:20 +0100336 source.object = GcRoot<String>(new_source);
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700337 dest[i].store(source, std::memory_order_relaxed);
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800338 }
339}
340
Mathieu Chartier60bc39c2016-01-27 18:37:48 -0800341template <ReadBarrierOption kReadBarrierOption, typename Visitor>
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000342inline void DexCache::FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) {
343 TypeDexCacheType* src = GetResolvedTypes();
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800344 for (size_t i = 0, count = NumResolvedTypes(); i < count; ++i) {
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000345 TypeDexCachePair source = src[i].load(std::memory_order_relaxed);
346 Class* ptr = source.object.Read<kReadBarrierOption>();
347 Class* new_source = visitor(ptr);
348 source.object = GcRoot<Class>(new_source);
349 dest[i].store(source, std::memory_order_relaxed);
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800350 }
351}
352
Narayan Kamath7fe56582016-10-14 18:49:12 +0100353template <ReadBarrierOption kReadBarrierOption, typename Visitor>
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000354inline void DexCache::FixupResolvedMethodTypes(MethodTypeDexCacheType* dest,
Narayan Kamath7fe56582016-10-14 18:49:12 +0100355 const Visitor& visitor) {
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000356 MethodTypeDexCacheType* src = GetResolvedMethodTypes();
Narayan Kamath7fe56582016-10-14 18:49:12 +0100357 for (size_t i = 0, count = NumResolvedMethodTypes(); i < count; ++i) {
358 MethodTypeDexCachePair source = src[i].load(std::memory_order_relaxed);
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000359 MethodType* ptr = source.object.Read<kReadBarrierOption>();
360 MethodType* new_source = visitor(ptr);
Narayan Kamath7fe56582016-10-14 18:49:12 +0100361 source.object = GcRoot<MethodType>(new_source);
362 dest[i].store(source, std::memory_order_relaxed);
363 }
364}
365
Orion Hodsonc069a302017-01-18 09:23:12 +0000366template <ReadBarrierOption kReadBarrierOption, typename Visitor>
367inline void DexCache::FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest,
368 const Visitor& visitor) {
369 GcRoot<mirror::CallSite>* src = GetResolvedCallSites();
370 for (size_t i = 0, count = NumResolvedCallSites(); i < count; ++i) {
371 mirror::CallSite* source = src[i].Read<kReadBarrierOption>();
372 mirror::CallSite* new_source = visitor(source);
373 dest[i] = GcRoot<mirror::CallSite>(new_source);
374 }
375}
376
Ian Rogers39ebcb82013-05-30 16:57:23 -0700377} // namespace mirror
378} // namespace art
379
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700380#endif // ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_