blob: ed0beafa08e8c0a897714862d0273925db27e67b [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Brian Carlstrom7e49dca2011-07-22 18:07:34 -070016
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_MIRROR_DEX_CACHE_H_
18#define ART_RUNTIME_MIRROR_DEX_CACHE_H_
Brian Carlstrom7e49dca2011-07-22 18:07:34 -070019
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "array.h"
Vladimir Marko8d6768d2017-03-14 10:13:21 +000021#include "base/bit_utils.h"
Andreas Gampe57943812017-12-06 21:39:13 -080022#include "base/mutex.h"
David Sehr9e734c72018-01-04 17:56:19 -080023#include "dex/dex_file_types.h"
Vladimir Marko6834d342018-05-25 13:12:09 +010024#include "gc_root-inl.h"
Brian Carlstrom7e49dca2011-07-22 18:07:34 -070025#include "object.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080026#include "object_array.h"
Brian Carlstrom7e49dca2011-07-22 18:07:34 -070027
28namespace art {
29
Vladimir Markoca8de0a2018-07-04 11:56:08 +010030namespace linker {
31class ImageWriter;
32} // namespace linker
33
Vladimir Marko8d6768d2017-03-14 10:13:21 +000034class ArtField;
Alex Lightdba61482016-12-21 08:20:29 -080035class ArtMethod;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080036struct DexCacheOffsets;
37class DexFile;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080038union JValue;
Andreas Gampecc1b5352016-12-01 16:58:38 -080039class LinearAlloc;
40class Thread;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080041
42namespace mirror {
43
Orion Hodsonc069a302017-01-18 09:23:12 +000044class CallSite;
Vladimir Marko8d6768d2017-03-14 10:13:21 +000045class Class;
Narayan Kamath25352fc2016-08-03 12:46:58 +010046class MethodType;
Mingyao Yang98d1cc82014-05-15 17:02:16 -070047class String;
Brian Carlstrom7e49dca2011-07-22 18:07:34 -070048
Narayan Kamathc38a6f82016-09-29 17:07:20 +010049template <typename T> struct PACKED(8) DexCachePair {
50 GcRoot<T> object;
51 uint32_t index;
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070052 // The array is initially [ {0,0}, {0,0}, {0,0} ... ]
53 // We maintain the invariant that once a dex cache entry is populated,
54 // the pointer is always non-0
55 // Any given entry would thus be:
56 // {non-0, non-0} OR {0,0}
57 //
58 // It's generally sufficiently enough then to check if the
Narayan Kamathc38a6f82016-09-29 17:07:20 +010059 // lookup index matches the stored index (for a >0 lookup index)
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070060 // because if it's true the pointer is also non-null.
61 //
62 // For the 0th entry which is a special case, the value is either
63 // {0,0} (initial state) or {non-0, 0} which indicates
Narayan Kamathc38a6f82016-09-29 17:07:20 +010064 // that a valid object is stored at that index for a dex section id of 0.
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070065 //
Narayan Kamathc38a6f82016-09-29 17:07:20 +010066 // As an optimization, we want to avoid branching on the object pointer since
67 // it's always non-null if the id branch succeeds (except for the 0th id).
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070068 // Set the initial state for the 0th entry to be {0,1} which is guaranteed to fail
Narayan Kamathc38a6f82016-09-29 17:07:20 +010069 // the lookup id == stored id branch.
Vladimir Marko8d6768d2017-03-14 10:13:21 +000070 DexCachePair(ObjPtr<T> object, uint32_t index)
Narayan Kamathc38a6f82016-09-29 17:07:20 +010071 : object(object),
72 index(index) {}
Andreas Gamped9911ee2017-03-27 13:27:24 -070073 DexCachePair() : index(0) {}
Narayan Kamathc38a6f82016-09-29 17:07:20 +010074 DexCachePair(const DexCachePair<T>&) = default;
75 DexCachePair& operator=(const DexCachePair<T>&) = default;
Mathieu Chartierbb816d62016-09-07 10:17:46 -070076
Narayan Kamathc38a6f82016-09-29 17:07:20 +010077 static void Initialize(std::atomic<DexCachePair<T>>* dex_cache) {
78 DexCachePair<T> first_elem;
79 first_elem.object = GcRoot<T>(nullptr);
80 first_elem.index = InvalidIndexForSlot(0);
81 dex_cache[0].store(first_elem, std::memory_order_relaxed);
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070082 }
Mathieu Chartierbb816d62016-09-07 10:17:46 -070083
Narayan Kamathc38a6f82016-09-29 17:07:20 +010084 static uint32_t InvalidIndexForSlot(uint32_t slot) {
Mathieu Chartierbb816d62016-09-07 10:17:46 -070085 // Since the cache size is a power of two, 0 will always map to slot 0.
86 // Use 1 for slot 0 and 0 for all other slots.
87 return (slot == 0) ? 1u : 0u;
88 }
Vladimir Marko8d6768d2017-03-14 10:13:21 +000089
90 T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) {
91 if (idx != index) {
92 return nullptr;
93 }
94 DCHECK(!object.IsNull());
95 return object.Read();
96 }
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070097};
Narayan Kamathc38a6f82016-09-29 17:07:20 +010098
Vladimir Markof44d36c2017-03-14 14:18:46 +000099template <typename T> struct PACKED(2 * __SIZEOF_POINTER__) NativeDexCachePair {
100 T* object;
101 size_t index;
102 // This is similar to DexCachePair except that we're storing a native pointer
103 // instead of a GC root. See DexCachePair for the details.
104 NativeDexCachePair(T* object, uint32_t index)
105 : object(object),
106 index(index) {}
107 NativeDexCachePair() : object(nullptr), index(0u) { }
108 NativeDexCachePair(const NativeDexCachePair<T>&) = default;
109 NativeDexCachePair& operator=(const NativeDexCachePair<T>&) = default;
110
111 static void Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache, PointerSize pointer_size);
112
113 static uint32_t InvalidIndexForSlot(uint32_t slot) {
114 // Since the cache size is a power of two, 0 will always map to slot 0.
115 // Use 1 for slot 0 and 0 for all other slots.
116 return (slot == 0) ? 1u : 0u;
117 }
118
119 T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) {
120 if (idx != index) {
121 return nullptr;
122 }
123 DCHECK(object != nullptr);
124 return object;
125 }
126};
127
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000128using TypeDexCachePair = DexCachePair<Class>;
129using TypeDexCacheType = std::atomic<TypeDexCachePair>;
130
131using StringDexCachePair = DexCachePair<String>;
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700132using StringDexCacheType = std::atomic<StringDexCachePair>;
133
Vladimir Markof44d36c2017-03-14 14:18:46 +0000134using FieldDexCachePair = NativeDexCachePair<ArtField>;
135using FieldDexCacheType = std::atomic<FieldDexCachePair>;
136
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100137using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
138using MethodDexCacheType = std::atomic<MethodDexCachePair>;
139
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000140using MethodTypeDexCachePair = DexCachePair<MethodType>;
Narayan Kamath25352fc2016-08-03 12:46:58 +0100141using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>;
142
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700143// C++ mirror of java.lang.DexCache.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100144class MANAGED DexCache final : public Object {
Brian Carlstrom83db7722011-08-26 17:32:56 -0700145 public:
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700146 // Size of java.lang.DexCache.class.
Andreas Gampe542451c2016-07-26 09:02:02 -0700147 static uint32_t ClassSize(PointerSize pointer_size);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700148
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000149 // Size of type dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
150 static constexpr size_t kDexCacheTypeCacheSize = 1024;
151 static_assert(IsPowerOfTwo(kDexCacheTypeCacheSize),
152 "Type dex cache size is not a power of 2.");
153
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700154 // Size of string dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
155 static constexpr size_t kDexCacheStringCacheSize = 1024;
156 static_assert(IsPowerOfTwo(kDexCacheStringCacheSize),
157 "String dex cache size is not a power of 2.");
158
Vladimir Markof44d36c2017-03-14 14:18:46 +0000159 // Size of field dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
Vladimir Marko990fc442018-08-29 13:58:23 +0000160 static constexpr size_t kDexCacheFieldCacheSize = 1024;
Vladimir Markof44d36c2017-03-14 14:18:46 +0000161 static_assert(IsPowerOfTwo(kDexCacheFieldCacheSize),
162 "Field dex cache size is not a power of 2.");
163
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100164 // Size of method dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
Vladimir Marko990fc442018-08-29 13:58:23 +0000165 static constexpr size_t kDexCacheMethodCacheSize = 1024;
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100166 static_assert(IsPowerOfTwo(kDexCacheMethodCacheSize),
167 "Method dex cache size is not a power of 2.");
168
Narayan Kamath25352fc2016-08-03 12:46:58 +0100169 // Size of method type dex cache. Needs to be a power of 2 for entrypoint assumptions
170 // to hold.
171 static constexpr size_t kDexCacheMethodTypeCacheSize = 1024;
172 static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize),
173 "MethodType dex cache size is not a power of 2.");
174
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000175 static constexpr size_t StaticTypeSize() {
176 return kDexCacheTypeCacheSize;
177 }
178
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700179 static constexpr size_t StaticStringSize() {
180 return kDexCacheStringCacheSize;
181 }
182
Vladimir Markof44d36c2017-03-14 14:18:46 +0000183 static constexpr size_t StaticArtFieldSize() {
184 return kDexCacheFieldCacheSize;
185 }
186
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100187 static constexpr size_t StaticMethodSize() {
188 return kDexCacheMethodCacheSize;
189 }
190
Narayan Kamath25352fc2016-08-03 12:46:58 +0100191 static constexpr size_t StaticMethodTypeSize() {
192 return kDexCacheMethodTypeCacheSize;
193 }
194
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700195 // Size of an instance of java.lang.DexCache not including referenced values.
196 static constexpr uint32_t InstanceSize() {
197 return sizeof(DexCache);
198 }
199
Andreas Gampecc1b5352016-12-01 16:58:38 -0800200 static void InitializeDexCache(Thread* self,
201 ObjPtr<mirror::DexCache> dex_cache,
202 ObjPtr<mirror::String> location,
203 const DexFile* dex_file,
204 LinearAlloc* linear_alloc,
205 PointerSize image_pointer_size)
206 REQUIRES_SHARED(Locks::mutator_lock_)
207 REQUIRES(Locks::dex_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700208
Mathieu Chartier60bc39c2016-01-27 18:37:48 -0800209 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700210 void FixupStrings(StringDexCacheType* dest, const Visitor& visitor)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700211 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800212
Mathieu Chartier60bc39c2016-01-27 18:37:48 -0800213 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000214 void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700215 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800216
Narayan Kamath7fe56582016-10-14 18:49:12 +0100217 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
218 void FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, const Visitor& visitor)
219 REQUIRES_SHARED(Locks::mutator_lock_);
220
Orion Hodsonc069a302017-01-18 09:23:12 +0000221 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
222 void FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, const Visitor& visitor)
223 REQUIRES_SHARED(Locks::mutator_lock_);
224
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700225 String* GetLocation() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700226 return GetFieldObject<String>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_));
Brian Carlstroma663ea52011-08-19 23:33:41 -0700227 }
228
buzbee5cd21802011-08-26 10:40:14 -0700229 static MemberOffset StringsOffset() {
Mathieu Chartier66f19252012-09-18 08:57:04 -0700230 return OFFSET_OF_OBJECT_MEMBER(DexCache, strings_);
buzbeec5ef0462011-08-25 18:44:49 -0700231 }
232
Vladimir Marko05792b92015-08-03 11:56:49 +0100233 static MemberOffset ResolvedTypesOffset() {
234 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_);
235 }
236
Brian Carlstrom1caa2c22011-08-28 13:02:33 -0700237 static MemberOffset ResolvedFieldsOffset() {
Mathieu Chartier66f19252012-09-18 08:57:04 -0700238 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_fields_);
buzbeec5ef0462011-08-25 18:44:49 -0700239 }
240
Brian Carlstrom1caa2c22011-08-28 13:02:33 -0700241 static MemberOffset ResolvedMethodsOffset() {
Mathieu Chartier66f19252012-09-18 08:57:04 -0700242 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_methods_);
buzbeec5ef0462011-08-25 18:44:49 -0700243 }
244
Narayan Kamath25352fc2016-08-03 12:46:58 +0100245 static MemberOffset ResolvedMethodTypesOffset() {
246 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_method_types_);
247 }
248
Orion Hodsonc069a302017-01-18 09:23:12 +0000249 static MemberOffset ResolvedCallSitesOffset() {
250 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_call_sites_);
251 }
252
Vladimir Marko05792b92015-08-03 11:56:49 +0100253 static MemberOffset NumStringsOffset() {
254 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_strings_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700255 }
256
Vladimir Marko05792b92015-08-03 11:56:49 +0100257 static MemberOffset NumResolvedTypesOffset() {
258 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_types_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700259 }
260
Vladimir Marko05792b92015-08-03 11:56:49 +0100261 static MemberOffset NumResolvedFieldsOffset() {
262 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_fields_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700263 }
264
Vladimir Marko05792b92015-08-03 11:56:49 +0100265 static MemberOffset NumResolvedMethodsOffset() {
266 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_methods_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700267 }
268
Narayan Kamath25352fc2016-08-03 12:46:58 +0100269 static MemberOffset NumResolvedMethodTypesOffset() {
270 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_);
271 }
272
Orion Hodsonc069a302017-01-18 09:23:12 +0000273 static MemberOffset NumResolvedCallSitesOffset() {
274 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_call_sites_);
275 }
276
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000277 String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700278 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700279
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800280 void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700281 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700282
Mathieu Chartierbb816d62016-09-07 10:17:46 -0700283 // Clear a string for a string_idx, used to undo string intern transactions to make sure
284 // the string isn't kept live.
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800285 void ClearString(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierbb816d62016-09-07 10:17:46 -0700286
Andreas Gampea5b09a62016-11-17 15:21:22 -0800287 Class* GetResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100288
Andreas Gampea5b09a62016-11-17 15:21:22 -0800289 void SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved)
Mathieu Chartier31e88222016-10-14 18:43:19 -0700290 REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100291
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000292 void ClearResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
293
Andreas Gampe542451c2016-07-26 09:02:02 -0700294 ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700295 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700296
Andreas Gampe542451c2016-07-26 09:02:02 -0700297 ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx,
298 ArtMethod* resolved,
299 PointerSize ptr_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700300 REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100301 ALWAYS_INLINE void ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size)
302 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700303
Mathieu Chartierc7853442015-03-27 14:35:38 -0700304 // Pointer sized variant, used for patching.
Andreas Gampe542451c2016-07-26 09:02:02 -0700305 ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, PointerSize ptr_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700306 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700307
308 // Pointer sized variant, used for patching.
Andreas Gampe542451c2016-07-26 09:02:02 -0700309 ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, PointerSize ptr_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700310 REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000311 ALWAYS_INLINE void ClearResolvedField(uint32_t idx, PointerSize ptr_size)
312 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700313
Orion Hodson06d10a72018-05-14 08:53:38 +0100314 MethodType* GetResolvedMethodType(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
Narayan Kamath25352fc2016-08-03 12:46:58 +0100315
Orion Hodson06d10a72018-05-14 08:53:38 +0100316 void SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved)
Orion Hodsonc069a302017-01-18 09:23:12 +0000317 REQUIRES_SHARED(Locks::mutator_lock_);
318
319 CallSite* GetResolvedCallSite(uint32_t call_site_idx) REQUIRES_SHARED(Locks::mutator_lock_);
320
321 // Attempts to bind |call_site_idx| to the call site |resolved|. The
322 // caller must use the return value in place of |resolved|. This is
323 // because multiple threads can invoke the bootstrap method each
324 // producing a call site, but the method handle invocation on the
325 // call site must be on a common agreed value.
Orion Hodson4c8e12e2018-05-18 08:33:20 +0100326 ObjPtr<CallSite> SetResolvedCallSite(uint32_t call_site_idx, ObjPtr<CallSite> resolved)
327 REQUIRES_SHARED(Locks::mutator_lock_) WARN_UNUSED;
Narayan Kamath25352fc2016-08-03 12:46:58 +0100328
Vladimir Marko98db89c2018-09-07 11:45:46 +0100329 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700330 StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko98db89c2018-09-07 11:45:46 +0100331 return GetFieldPtr64<StringDexCacheType*, kVerifyFlags>(StringsOffset());
Brian Carlstrom1caa2c22011-08-28 13:02:33 -0700332 }
Brian Carlstrom83db7722011-08-26 17:32:56 -0700333
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700334 void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800335 SetFieldPtr<false>(StringsOffset(), strings);
336 }
337
Vladimir Marko98db89c2018-09-07 11:45:46 +0100338 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000339 TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko98db89c2018-09-07 11:45:46 +0100340 return GetFieldPtr<TypeDexCacheType*, kVerifyFlags>(ResolvedTypesOffset());
Mathieu Chartier66f19252012-09-18 08:57:04 -0700341 }
342
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000343 void SetResolvedTypes(TypeDexCacheType* resolved_types)
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800344 ALWAYS_INLINE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700345 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800346 SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types);
347 }
348
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100349 MethodDexCacheType* GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
350 return GetFieldPtr<MethodDexCacheType*>(ResolvedMethodsOffset());
Mathieu Chartier66f19252012-09-18 08:57:04 -0700351 }
352
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100353 void SetResolvedMethods(MethodDexCacheType* resolved_methods)
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800354 ALWAYS_INLINE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700355 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800356 SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods);
357 }
358
Vladimir Markof44d36c2017-03-14 14:18:46 +0000359 FieldDexCacheType* GetResolvedFields() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
360 return GetFieldPtr<FieldDexCacheType*>(ResolvedFieldsOffset());
Vladimir Marko05792b92015-08-03 11:56:49 +0100361 }
362
Vladimir Markof44d36c2017-03-14 14:18:46 +0000363 void SetResolvedFields(FieldDexCacheType* resolved_fields)
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800364 ALWAYS_INLINE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700365 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800366 SetFieldPtr<false>(ResolvedFieldsOffset(), resolved_fields);
367 }
368
Vladimir Marko98db89c2018-09-07 11:45:46 +0100369 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Narayan Kamath25352fc2016-08-03 12:46:58 +0100370 MethodTypeDexCacheType* GetResolvedMethodTypes()
371 ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko98db89c2018-09-07 11:45:46 +0100372 return GetFieldPtr64<MethodTypeDexCacheType*, kVerifyFlags>(ResolvedMethodTypesOffset());
Narayan Kamath25352fc2016-08-03 12:46:58 +0100373 }
374
375 void SetResolvedMethodTypes(MethodTypeDexCacheType* resolved_method_types)
376 ALWAYS_INLINE
377 REQUIRES_SHARED(Locks::mutator_lock_) {
378 SetFieldPtr<false>(ResolvedMethodTypesOffset(), resolved_method_types);
379 }
380
Vladimir Marko98db89c2018-09-07 11:45:46 +0100381 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Orion Hodsonc069a302017-01-18 09:23:12 +0000382 GcRoot<CallSite>* GetResolvedCallSites()
383 ALWAYS_INLINE
384 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko98db89c2018-09-07 11:45:46 +0100385 return GetFieldPtr<GcRoot<CallSite>*, kVerifyFlags>(ResolvedCallSitesOffset());
Orion Hodsonc069a302017-01-18 09:23:12 +0000386 }
387
388 void SetResolvedCallSites(GcRoot<CallSite>* resolved_call_sites)
389 ALWAYS_INLINE
390 REQUIRES_SHARED(Locks::mutator_lock_) {
391 SetFieldPtr<false>(ResolvedCallSitesOffset(), resolved_call_sites);
392 }
393
Vladimir Marko98db89c2018-09-07 11:45:46 +0100394 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700395 size_t NumStrings() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko98db89c2018-09-07 11:45:46 +0100396 return GetField32<kVerifyFlags>(NumStringsOffset());
Vladimir Marko05792b92015-08-03 11:56:49 +0100397 }
398
Vladimir Marko98db89c2018-09-07 11:45:46 +0100399 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700400 size_t NumResolvedTypes() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko98db89c2018-09-07 11:45:46 +0100401 return GetField32<kVerifyFlags>(NumResolvedTypesOffset());
Vladimir Marko05792b92015-08-03 11:56:49 +0100402 }
403
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700404 size_t NumResolvedMethods() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100405 return GetField32(NumResolvedMethodsOffset());
406 }
407
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700408 size_t NumResolvedFields() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100409 return GetField32(NumResolvedFieldsOffset());
Mathieu Chartier66f19252012-09-18 08:57:04 -0700410 }
411
Vladimir Marko98db89c2018-09-07 11:45:46 +0100412 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Narayan Kamath25352fc2016-08-03 12:46:58 +0100413 size_t NumResolvedMethodTypes() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko98db89c2018-09-07 11:45:46 +0100414 return GetField32<kVerifyFlags>(NumResolvedMethodTypesOffset());
Narayan Kamath25352fc2016-08-03 12:46:58 +0100415 }
416
Vladimir Marko98db89c2018-09-07 11:45:46 +0100417 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Orion Hodsonc069a302017-01-18 09:23:12 +0000418 size_t NumResolvedCallSites() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko98db89c2018-09-07 11:45:46 +0100419 return GetField32<kVerifyFlags>(NumResolvedCallSitesOffset());
Orion Hodsonc069a302017-01-18 09:23:12 +0000420 }
421
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700422 const DexFile* GetDexFile() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700423 return GetFieldPtr<const DexFile*>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_));
Mathieu Chartier66f19252012-09-18 08:57:04 -0700424 }
425
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700426 void SetDexFile(const DexFile* dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier76172162016-01-26 14:54:06 -0800427 SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file);
Brian Carlstrom4b620ff2011-09-11 01:11:01 -0700428 }
Brian Carlstromc4fa2c02011-08-21 03:00:12 -0700429
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000430 void SetLocation(ObjPtr<String> location) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier76172162016-01-26 14:54:06 -0800431
Vladimir Markof44d36c2017-03-14 14:18:46 +0000432 template <typename T>
433 static NativeDexCachePair<T> GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
434 size_t idx,
435 PointerSize ptr_size);
436
437 template <typename T>
438 static void SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
439 size_t idx,
440 NativeDexCachePair<T> pair,
441 PointerSize ptr_size);
442
Vladimir Markof25cc732017-03-16 16:18:15 +0000443 uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
444 uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
445 uint32_t FieldSlotIndex(uint32_t field_idx) REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100446 uint32_t MethodSlotIndex(uint32_t method_idx) REQUIRES_SHARED(Locks::mutator_lock_);
Orion Hodson06d10a72018-05-14 08:53:38 +0100447 uint32_t MethodTypeSlotIndex(dex::ProtoIndex proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Markof25cc732017-03-16 16:18:15 +0000448
Brian Carlstromc4fa2c02011-08-21 03:00:12 -0700449 private:
Andreas Gampecc1b5352016-12-01 16:58:38 -0800450 void Init(const DexFile* dex_file,
451 ObjPtr<String> location,
452 StringDexCacheType* strings,
453 uint32_t num_strings,
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000454 TypeDexCacheType* resolved_types,
Andreas Gampecc1b5352016-12-01 16:58:38 -0800455 uint32_t num_resolved_types,
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100456 MethodDexCacheType* resolved_methods,
Andreas Gampecc1b5352016-12-01 16:58:38 -0800457 uint32_t num_resolved_methods,
Vladimir Markof44d36c2017-03-14 14:18:46 +0000458 FieldDexCacheType* resolved_fields,
Andreas Gampecc1b5352016-12-01 16:58:38 -0800459 uint32_t num_resolved_fields,
Orion Hodsonc069a302017-01-18 09:23:12 +0000460 MethodTypeDexCacheType* resolved_method_types,
461 uint32_t num_resolved_method_types,
462 GcRoot<CallSite>* resolved_call_sites,
Vladimir Marko07bfbac2017-07-06 14:55:02 +0100463 uint32_t num_resolved_call_sites)
Andreas Gampecc1b5352016-12-01 16:58:38 -0800464 REQUIRES_SHARED(Locks::mutator_lock_);
465
Vladimir Markof44d36c2017-03-14 14:18:46 +0000466 // std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations,
467 // so we use a custom pair class for loading and storing the NativeDexCachePair<>.
468 template <typename IntType>
469 struct PACKED(2 * sizeof(IntType)) ConversionPair {
470 ConversionPair(IntType f, IntType s) : first(f), second(s) { }
471 ConversionPair(const ConversionPair&) = default;
472 ConversionPair& operator=(const ConversionPair&) = default;
473 IntType first;
474 IntType second;
475 };
476 using ConversionPair32 = ConversionPair<uint32_t>;
477 using ConversionPair64 = ConversionPair<uint64_t>;
478
Vladimir Marko05792b92015-08-03 11:56:49 +0100479 // Visit instance fields of the dex cache as well as its associated arrays.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800480 template <bool kVisitNativeRoots,
481 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
482 ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
483 typename Visitor>
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000484 void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700485 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100486
Vladimir Markof44d36c2017-03-14 14:18:46 +0000487 // Due to lack of 16-byte atomics support, we use hand-crafted routines.
Alexey Frunze279cfba2017-07-22 00:24:43 -0700488#if defined(__aarch64__) || defined(__mips__)
489 // 16-byte atomics are supported on aarch64, mips and mips64.
Vladimir Markof44d36c2017-03-14 14:18:46 +0000490 ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B(
491 std::atomic<ConversionPair64>* target) {
492 return target->load(std::memory_order_relaxed);
493 }
494
495 ALWAYS_INLINE static void AtomicStoreRelease16B(
496 std::atomic<ConversionPair64>* target, ConversionPair64 value) {
497 target->store(value, std::memory_order_release);
498 }
499#elif defined(__x86_64__)
500 ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B(
501 std::atomic<ConversionPair64>* target) {
502 uint64_t first, second;
503 __asm__ __volatile__(
504 "lock cmpxchg16b (%2)"
505 : "=&a"(first), "=&d"(second)
506 : "r"(target), "a"(0), "d"(0), "b"(0), "c"(0)
507 : "cc");
508 return ConversionPair64(first, second);
509 }
510
511 ALWAYS_INLINE static void AtomicStoreRelease16B(
512 std::atomic<ConversionPair64>* target, ConversionPair64 value) {
513 uint64_t first, second;
514 __asm__ __volatile__ (
515 "movq (%2), %%rax\n\t"
516 "movq 8(%2), %%rdx\n\t"
517 "1:\n\t"
518 "lock cmpxchg16b (%2)\n\t"
519 "jnz 1b"
520 : "=&a"(first), "=&d"(second)
521 : "r"(target), "b"(value.first), "c"(value.second)
522 : "cc");
523 }
524#else
525 static ConversionPair64 AtomicLoadRelaxed16B(std::atomic<ConversionPair64>* target);
526 static void AtomicStoreRelease16B(std::atomic<ConversionPair64>* target, ConversionPair64 value);
527#endif
528
Ian Rogersef7d42f2014-01-06 12:55:46 -0800529 HeapReference<String> location_;
Narayan Kamath6b2dc312017-03-14 13:26:12 +0000530 // Number of elements in the call_sites_ array. Note that this appears here
531 // because of our packing logic for 32 bit fields.
532 uint32_t num_resolved_call_sites_;
533
Narayan Kamath25352fc2016-08-03 12:46:58 +0100534 uint64_t dex_file_; // const DexFile*
Orion Hodsonc069a302017-01-18 09:23:12 +0000535 uint64_t resolved_call_sites_; // GcRoot<CallSite>* array with num_resolved_call_sites_
536 // elements.
Vladimir Markof44d36c2017-03-14 14:18:46 +0000537 uint64_t resolved_fields_; // std::atomic<FieldDexCachePair>*, array with
538 // num_resolved_fields_ elements.
Narayan Kamath25352fc2016-08-03 12:46:58 +0100539 uint64_t resolved_method_types_; // std::atomic<MethodTypeDexCachePair>* array with
540 // num_resolved_method_types_ elements.
541 uint64_t resolved_methods_; // ArtMethod*, array with num_resolved_methods_ elements.
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000542 uint64_t resolved_types_; // TypeDexCacheType*, array with num_resolved_types_ elements.
Narayan Kamath25352fc2016-08-03 12:46:58 +0100543 uint64_t strings_; // std::atomic<StringDexCachePair>*, array with num_strings_
544 // elements.
545
546 uint32_t num_resolved_fields_; // Number of elements in the resolved_fields_ array.
547 uint32_t num_resolved_method_types_; // Number of elements in the resolved_method_types_ array.
548 uint32_t num_resolved_methods_; // Number of elements in the resolved_methods_ array.
549 uint32_t num_resolved_types_; // Number of elements in the resolved_types_ array.
550 uint32_t num_strings_; // Number of elements in the strings_ array.
Brian Carlstrom83db7722011-08-26 17:32:56 -0700551
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700552 friend struct art::DexCacheOffsets; // for verifying offset information
Vladimir Markoca8de0a2018-07-04 11:56:08 +0100553 friend class linker::ImageWriter;
Vladimir Marko05792b92015-08-03 11:56:49 +0100554 friend class Object; // For VisitReferences
Brian Carlstromc4fa2c02011-08-21 03:00:12 -0700555 DISALLOW_IMPLICIT_CONSTRUCTORS(DexCache);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700556};
557
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800558} // namespace mirror
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700559} // namespace art
560
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700561#endif // ART_RUNTIME_MIRROR_DEX_CACHE_H_