blob: 78b2e15f5a42045116d0ed30a9728ef0daabeb96 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Brian Carlstrom7e49dca2011-07-22 18:07:34 -070016
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_MIRROR_DEX_CACHE_H_
18#define ART_RUNTIME_MIRROR_DEX_CACHE_H_
Brian Carlstrom7e49dca2011-07-22 18:07:34 -070019
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "array.h"
Vladimir Marko8d6768d2017-03-14 10:13:21 +000021#include "base/bit_utils.h"
Andreas Gampea5b09a62016-11-17 15:21:22 -080022#include "dex_file_types.h"
Brian Carlstrom7e49dca2011-07-22 18:07:34 -070023#include "object.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080024#include "object_array.h"
Brian Carlstrom7e49dca2011-07-22 18:07:34 -070025
26namespace art {
27
Vladimir Marko8d6768d2017-03-14 10:13:21 +000028class ArtField;
Alex Lightdba61482016-12-21 08:20:29 -080029class ArtMethod;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080030struct DexCacheOffsets;
31class DexFile;
32class ImageWriter;
33union JValue;
Andreas Gampecc1b5352016-12-01 16:58:38 -080034class LinearAlloc;
35class Thread;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080036
37namespace mirror {
38
Orion Hodsonc069a302017-01-18 09:23:12 +000039class CallSite;
Vladimir Marko8d6768d2017-03-14 10:13:21 +000040class Class;
Narayan Kamath25352fc2016-08-03 12:46:58 +010041class MethodType;
Mingyao Yang98d1cc82014-05-15 17:02:16 -070042class String;
Brian Carlstrom7e49dca2011-07-22 18:07:34 -070043
Narayan Kamathc38a6f82016-09-29 17:07:20 +010044template <typename T> struct PACKED(8) DexCachePair {
45 GcRoot<T> object;
46 uint32_t index;
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070047 // The array is initially [ {0,0}, {0,0}, {0,0} ... ]
48 // We maintain the invariant that once a dex cache entry is populated,
49 // the pointer is always non-0
50 // Any given entry would thus be:
51 // {non-0, non-0} OR {0,0}
52 //
53 // It's generally sufficiently enough then to check if the
Narayan Kamathc38a6f82016-09-29 17:07:20 +010054 // lookup index matches the stored index (for a >0 lookup index)
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070055 // because if it's true the pointer is also non-null.
56 //
57 // For the 0th entry which is a special case, the value is either
58 // {0,0} (initial state) or {non-0, 0} which indicates
Narayan Kamathc38a6f82016-09-29 17:07:20 +010059 // that a valid object is stored at that index for a dex section id of 0.
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070060 //
Narayan Kamathc38a6f82016-09-29 17:07:20 +010061 // As an optimization, we want to avoid branching on the object pointer since
62 // it's always non-null if the id branch succeeds (except for the 0th id).
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070063 // Set the initial state for the 0th entry to be {0,1} which is guaranteed to fail
Narayan Kamathc38a6f82016-09-29 17:07:20 +010064 // the lookup id == stored id branch.
Vladimir Marko8d6768d2017-03-14 10:13:21 +000065 DexCachePair(ObjPtr<T> object, uint32_t index)
Narayan Kamathc38a6f82016-09-29 17:07:20 +010066 : object(object),
67 index(index) {}
68 DexCachePair() = default;
69 DexCachePair(const DexCachePair<T>&) = default;
70 DexCachePair& operator=(const DexCachePair<T>&) = default;
Mathieu Chartierbb816d62016-09-07 10:17:46 -070071
Narayan Kamathc38a6f82016-09-29 17:07:20 +010072 static void Initialize(std::atomic<DexCachePair<T>>* dex_cache) {
73 DexCachePair<T> first_elem;
74 first_elem.object = GcRoot<T>(nullptr);
75 first_elem.index = InvalidIndexForSlot(0);
76 dex_cache[0].store(first_elem, std::memory_order_relaxed);
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070077 }
Mathieu Chartierbb816d62016-09-07 10:17:46 -070078
Narayan Kamathc38a6f82016-09-29 17:07:20 +010079 static uint32_t InvalidIndexForSlot(uint32_t slot) {
Mathieu Chartierbb816d62016-09-07 10:17:46 -070080 // Since the cache size is a power of two, 0 will always map to slot 0.
81 // Use 1 for slot 0 and 0 for all other slots.
82 return (slot == 0) ? 1u : 0u;
83 }
Vladimir Marko8d6768d2017-03-14 10:13:21 +000084
85 T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) {
86 if (idx != index) {
87 return nullptr;
88 }
89 DCHECK(!object.IsNull());
90 return object.Read();
91 }
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -070092};
Narayan Kamathc38a6f82016-09-29 17:07:20 +010093
Vladimir Markof44d36c2017-03-14 14:18:46 +000094template <typename T> struct PACKED(2 * __SIZEOF_POINTER__) NativeDexCachePair {
95 T* object;
96 size_t index;
97 // This is similar to DexCachePair except that we're storing a native pointer
98 // instead of a GC root. See DexCachePair for the details.
99 NativeDexCachePair(T* object, uint32_t index)
100 : object(object),
101 index(index) {}
102 NativeDexCachePair() : object(nullptr), index(0u) { }
103 NativeDexCachePair(const NativeDexCachePair<T>&) = default;
104 NativeDexCachePair& operator=(const NativeDexCachePair<T>&) = default;
105
106 static void Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache, PointerSize pointer_size);
107
108 static uint32_t InvalidIndexForSlot(uint32_t slot) {
109 // Since the cache size is a power of two, 0 will always map to slot 0.
110 // Use 1 for slot 0 and 0 for all other slots.
111 return (slot == 0) ? 1u : 0u;
112 }
113
114 T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) {
115 if (idx != index) {
116 return nullptr;
117 }
118 DCHECK(object != nullptr);
119 return object;
120 }
121};
122
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000123using TypeDexCachePair = DexCachePair<Class>;
124using TypeDexCacheType = std::atomic<TypeDexCachePair>;
125
126using StringDexCachePair = DexCachePair<String>;
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700127using StringDexCacheType = std::atomic<StringDexCachePair>;
128
Vladimir Markof44d36c2017-03-14 14:18:46 +0000129using FieldDexCachePair = NativeDexCachePair<ArtField>;
130using FieldDexCacheType = std::atomic<FieldDexCachePair>;
131
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000132using MethodTypeDexCachePair = DexCachePair<MethodType>;
Narayan Kamath25352fc2016-08-03 12:46:58 +0100133using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>;
134
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700135// C++ mirror of java.lang.DexCache.
136class MANAGED DexCache FINAL : public Object {
Brian Carlstrom83db7722011-08-26 17:32:56 -0700137 public:
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700138 // Size of java.lang.DexCache.class.
Andreas Gampe542451c2016-07-26 09:02:02 -0700139 static uint32_t ClassSize(PointerSize pointer_size);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700140
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000141 // Size of type dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
142 static constexpr size_t kDexCacheTypeCacheSize = 1024;
143 static_assert(IsPowerOfTwo(kDexCacheTypeCacheSize),
144 "Type dex cache size is not a power of 2.");
145
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700146 // Size of string dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
147 static constexpr size_t kDexCacheStringCacheSize = 1024;
148 static_assert(IsPowerOfTwo(kDexCacheStringCacheSize),
149 "String dex cache size is not a power of 2.");
150
Vladimir Markof44d36c2017-03-14 14:18:46 +0000151 // Size of field dex cache. Needs to be a power of 2 for entrypoint assumptions to hold.
152 static constexpr size_t kDexCacheFieldCacheSize = 1024;
153 static_assert(IsPowerOfTwo(kDexCacheFieldCacheSize),
154 "Field dex cache size is not a power of 2.");
155
Narayan Kamath25352fc2016-08-03 12:46:58 +0100156 // Size of method type dex cache. Needs to be a power of 2 for entrypoint assumptions
157 // to hold.
158 static constexpr size_t kDexCacheMethodTypeCacheSize = 1024;
159 static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize),
160 "MethodType dex cache size is not a power of 2.");
161
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000162 static constexpr size_t StaticTypeSize() {
163 return kDexCacheTypeCacheSize;
164 }
165
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700166 static constexpr size_t StaticStringSize() {
167 return kDexCacheStringCacheSize;
168 }
169
Vladimir Markof44d36c2017-03-14 14:18:46 +0000170 static constexpr size_t StaticArtFieldSize() {
171 return kDexCacheFieldCacheSize;
172 }
173
Narayan Kamath25352fc2016-08-03 12:46:58 +0100174 static constexpr size_t StaticMethodTypeSize() {
175 return kDexCacheMethodTypeCacheSize;
176 }
177
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700178 // Size of an instance of java.lang.DexCache not including referenced values.
179 static constexpr uint32_t InstanceSize() {
180 return sizeof(DexCache);
181 }
182
Andreas Gampecc1b5352016-12-01 16:58:38 -0800183 static void InitializeDexCache(Thread* self,
184 ObjPtr<mirror::DexCache> dex_cache,
185 ObjPtr<mirror::String> location,
186 const DexFile* dex_file,
187 LinearAlloc* linear_alloc,
188 PointerSize image_pointer_size)
189 REQUIRES_SHARED(Locks::mutator_lock_)
190 REQUIRES(Locks::dex_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700191
Andreas Gampe542451c2016-07-26 09:02:02 -0700192 void Fixup(ArtMethod* trampoline, PointerSize pointer_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700193 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers19846512012-02-24 11:42:47 -0800194
Mathieu Chartier60bc39c2016-01-27 18:37:48 -0800195 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700196 void FixupStrings(StringDexCacheType* dest, const Visitor& visitor)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700197 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800198
Mathieu Chartier60bc39c2016-01-27 18:37:48 -0800199 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000200 void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700201 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier4b00d342015-11-13 10:42:08 -0800202
Narayan Kamath7fe56582016-10-14 18:49:12 +0100203 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
204 void FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, const Visitor& visitor)
205 REQUIRES_SHARED(Locks::mutator_lock_);
206
Orion Hodsonc069a302017-01-18 09:23:12 +0000207 template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
208 void FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, const Visitor& visitor)
209 REQUIRES_SHARED(Locks::mutator_lock_);
210
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700211 String* GetLocation() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700212 return GetFieldObject<String>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_));
Brian Carlstroma663ea52011-08-19 23:33:41 -0700213 }
214
buzbee5cd21802011-08-26 10:40:14 -0700215 static MemberOffset StringsOffset() {
Mathieu Chartier66f19252012-09-18 08:57:04 -0700216 return OFFSET_OF_OBJECT_MEMBER(DexCache, strings_);
buzbeec5ef0462011-08-25 18:44:49 -0700217 }
218
Vladimir Marko05792b92015-08-03 11:56:49 +0100219 static MemberOffset ResolvedTypesOffset() {
220 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_);
221 }
222
Brian Carlstrom1caa2c22011-08-28 13:02:33 -0700223 static MemberOffset ResolvedFieldsOffset() {
Mathieu Chartier66f19252012-09-18 08:57:04 -0700224 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_fields_);
buzbeec5ef0462011-08-25 18:44:49 -0700225 }
226
Brian Carlstrom1caa2c22011-08-28 13:02:33 -0700227 static MemberOffset ResolvedMethodsOffset() {
Mathieu Chartier66f19252012-09-18 08:57:04 -0700228 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_methods_);
buzbeec5ef0462011-08-25 18:44:49 -0700229 }
230
Narayan Kamath25352fc2016-08-03 12:46:58 +0100231 static MemberOffset ResolvedMethodTypesOffset() {
232 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_method_types_);
233 }
234
Orion Hodsonc069a302017-01-18 09:23:12 +0000235 static MemberOffset ResolvedCallSitesOffset() {
236 return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_call_sites_);
237 }
238
Vladimir Marko05792b92015-08-03 11:56:49 +0100239 static MemberOffset NumStringsOffset() {
240 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_strings_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700241 }
242
Vladimir Marko05792b92015-08-03 11:56:49 +0100243 static MemberOffset NumResolvedTypesOffset() {
244 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_types_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700245 }
246
Vladimir Marko05792b92015-08-03 11:56:49 +0100247 static MemberOffset NumResolvedFieldsOffset() {
248 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_fields_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700249 }
250
Vladimir Marko05792b92015-08-03 11:56:49 +0100251 static MemberOffset NumResolvedMethodsOffset() {
252 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_methods_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700253 }
254
Narayan Kamath25352fc2016-08-03 12:46:58 +0100255 static MemberOffset NumResolvedMethodTypesOffset() {
256 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_);
257 }
258
Orion Hodsonc069a302017-01-18 09:23:12 +0000259 static MemberOffset NumResolvedCallSitesOffset() {
260 return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_call_sites_);
261 }
262
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000263 String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700264 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700265
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800266 void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700267 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700268
Mathieu Chartierbb816d62016-09-07 10:17:46 -0700269 // Clear a string for a string_idx, used to undo string intern transactions to make sure
270 // the string isn't kept live.
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800271 void ClearString(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierbb816d62016-09-07 10:17:46 -0700272
Andreas Gampea5b09a62016-11-17 15:21:22 -0800273 Class* GetResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100274
Andreas Gampea5b09a62016-11-17 15:21:22 -0800275 void SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved)
Mathieu Chartier31e88222016-10-14 18:43:19 -0700276 REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100277
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000278 void ClearResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
279
Andreas Gampe542451c2016-07-26 09:02:02 -0700280 ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700281 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700282
Andreas Gampe542451c2016-07-26 09:02:02 -0700283 ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx,
284 ArtMethod* resolved,
285 PointerSize ptr_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700286 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700287
Mathieu Chartierc7853442015-03-27 14:35:38 -0700288 // Pointer sized variant, used for patching.
Andreas Gampe542451c2016-07-26 09:02:02 -0700289 ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, PointerSize ptr_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700290 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700291
292 // Pointer sized variant, used for patching.
Andreas Gampe542451c2016-07-26 09:02:02 -0700293 ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, PointerSize ptr_size)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700294 REQUIRES_SHARED(Locks::mutator_lock_);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000295 ALWAYS_INLINE void ClearResolvedField(uint32_t idx, PointerSize ptr_size)
296 REQUIRES_SHARED(Locks::mutator_lock_);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700297
Narayan Kamath25352fc2016-08-03 12:46:58 +0100298 MethodType* GetResolvedMethodType(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
299
Orion Hodsonc069a302017-01-18 09:23:12 +0000300 void SetResolvedMethodType(uint32_t proto_idx, MethodType* resolved)
301 REQUIRES_SHARED(Locks::mutator_lock_);
302
303 CallSite* GetResolvedCallSite(uint32_t call_site_idx) REQUIRES_SHARED(Locks::mutator_lock_);
304
305 // Attempts to bind |call_site_idx| to the call site |resolved|. The
306 // caller must use the return value in place of |resolved|. This is
307 // because multiple threads can invoke the bootstrap method each
308 // producing a call site, but the method handle invocation on the
309 // call site must be on a common agreed value.
310 CallSite* SetResolvedCallSite(uint32_t call_site_idx, CallSite* resolved) WARN_UNUSED
311 REQUIRES_SHARED(Locks::mutator_lock_);
Narayan Kamath25352fc2016-08-03 12:46:58 +0100312
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700313 StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -0700314 return GetFieldPtr64<StringDexCacheType*>(StringsOffset());
Brian Carlstrom1caa2c22011-08-28 13:02:33 -0700315 }
Brian Carlstrom83db7722011-08-26 17:32:56 -0700316
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700317 void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800318 SetFieldPtr<false>(StringsOffset(), strings);
319 }
320
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000321 TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
322 return GetFieldPtr<TypeDexCacheType*>(ResolvedTypesOffset());
Mathieu Chartier66f19252012-09-18 08:57:04 -0700323 }
324
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000325 void SetResolvedTypes(TypeDexCacheType* resolved_types)
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800326 ALWAYS_INLINE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700327 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800328 SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types);
329 }
330
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700331 ArtMethod** GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100332 return GetFieldPtr<ArtMethod**>(ResolvedMethodsOffset());
Mathieu Chartier66f19252012-09-18 08:57:04 -0700333 }
334
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800335 void SetResolvedMethods(ArtMethod** resolved_methods)
336 ALWAYS_INLINE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700337 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800338 SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods);
339 }
340
Vladimir Markof44d36c2017-03-14 14:18:46 +0000341 FieldDexCacheType* GetResolvedFields() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
342 return GetFieldPtr<FieldDexCacheType*>(ResolvedFieldsOffset());
Vladimir Marko05792b92015-08-03 11:56:49 +0100343 }
344
Vladimir Markof44d36c2017-03-14 14:18:46 +0000345 void SetResolvedFields(FieldDexCacheType* resolved_fields)
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800346 ALWAYS_INLINE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700347 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800348 SetFieldPtr<false>(ResolvedFieldsOffset(), resolved_fields);
349 }
350
Narayan Kamath25352fc2016-08-03 12:46:58 +0100351 MethodTypeDexCacheType* GetResolvedMethodTypes()
352 ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
Narayan Kamath7fe56582016-10-14 18:49:12 +0100353 return GetFieldPtr64<MethodTypeDexCacheType*>(ResolvedMethodTypesOffset());
Narayan Kamath25352fc2016-08-03 12:46:58 +0100354 }
355
356 void SetResolvedMethodTypes(MethodTypeDexCacheType* resolved_method_types)
357 ALWAYS_INLINE
358 REQUIRES_SHARED(Locks::mutator_lock_) {
359 SetFieldPtr<false>(ResolvedMethodTypesOffset(), resolved_method_types);
360 }
361
Orion Hodsonc069a302017-01-18 09:23:12 +0000362 GcRoot<CallSite>* GetResolvedCallSites()
363 ALWAYS_INLINE
364 REQUIRES_SHARED(Locks::mutator_lock_) {
365 return GetFieldPtr<GcRoot<CallSite>*>(ResolvedCallSitesOffset());
366 }
367
368 void SetResolvedCallSites(GcRoot<CallSite>* resolved_call_sites)
369 ALWAYS_INLINE
370 REQUIRES_SHARED(Locks::mutator_lock_) {
371 SetFieldPtr<false>(ResolvedCallSitesOffset(), resolved_call_sites);
372 }
373
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700374 size_t NumStrings() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100375 return GetField32(NumStringsOffset());
376 }
377
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700378 size_t NumResolvedTypes() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100379 return GetField32(NumResolvedTypesOffset());
380 }
381
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700382 size_t NumResolvedMethods() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100383 return GetField32(NumResolvedMethodsOffset());
384 }
385
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700386 size_t NumResolvedFields() REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100387 return GetField32(NumResolvedFieldsOffset());
Mathieu Chartier66f19252012-09-18 08:57:04 -0700388 }
389
Narayan Kamath25352fc2016-08-03 12:46:58 +0100390 size_t NumResolvedMethodTypes() REQUIRES_SHARED(Locks::mutator_lock_) {
391 return GetField32(NumResolvedMethodTypesOffset());
392 }
393
Orion Hodsonc069a302017-01-18 09:23:12 +0000394 size_t NumResolvedCallSites() REQUIRES_SHARED(Locks::mutator_lock_) {
395 return GetField32(NumResolvedCallSitesOffset());
396 }
397
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700398 const DexFile* GetDexFile() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogersb0fa5dc2014-04-28 16:47:08 -0700399 return GetFieldPtr<const DexFile*>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_));
Mathieu Chartier66f19252012-09-18 08:57:04 -0700400 }
401
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700402 void SetDexFile(const DexFile* dex_file) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier76172162016-01-26 14:54:06 -0800403 SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file);
Brian Carlstrom4b620ff2011-09-11 01:11:01 -0700404 }
Brian Carlstromc4fa2c02011-08-21 03:00:12 -0700405
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000406 void SetLocation(ObjPtr<String> location) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartier76172162016-01-26 14:54:06 -0800407
Vladimir Marko05792b92015-08-03 11:56:49 +0100408 // NOTE: Get/SetElementPtrSize() are intended for working with ArtMethod** and ArtField**
409 // provided by GetResolvedMethods/Fields() and ArtMethod::GetDexCacheResolvedMethods(),
410 // so they need to be public.
411
412 template <typename PtrType>
Andreas Gampe542451c2016-07-26 09:02:02 -0700413 static PtrType GetElementPtrSize(PtrType* ptr_array, size_t idx, PointerSize ptr_size);
Vladimir Marko05792b92015-08-03 11:56:49 +0100414
415 template <typename PtrType>
Andreas Gampe542451c2016-07-26 09:02:02 -0700416 static void SetElementPtrSize(PtrType* ptr_array, size_t idx, PtrType ptr, PointerSize ptr_size);
Vladimir Marko05792b92015-08-03 11:56:49 +0100417
Vladimir Markof44d36c2017-03-14 14:18:46 +0000418 template <typename T>
419 static NativeDexCachePair<T> GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
420 size_t idx,
421 PointerSize ptr_size);
422
423 template <typename T>
424 static void SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
425 size_t idx,
426 NativeDexCachePair<T> pair,
427 PointerSize ptr_size);
428
Vladimir Markof25cc732017-03-16 16:18:15 +0000429 uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_);
430 uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
431 uint32_t FieldSlotIndex(uint32_t field_idx) REQUIRES_SHARED(Locks::mutator_lock_);
432 uint32_t MethodTypeSlotIndex(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_);
433
Brian Carlstromc4fa2c02011-08-21 03:00:12 -0700434 private:
Andreas Gampecc1b5352016-12-01 16:58:38 -0800435 void Init(const DexFile* dex_file,
436 ObjPtr<String> location,
437 StringDexCacheType* strings,
438 uint32_t num_strings,
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000439 TypeDexCacheType* resolved_types,
Andreas Gampecc1b5352016-12-01 16:58:38 -0800440 uint32_t num_resolved_types,
441 ArtMethod** resolved_methods,
442 uint32_t num_resolved_methods,
Vladimir Markof44d36c2017-03-14 14:18:46 +0000443 FieldDexCacheType* resolved_fields,
Andreas Gampecc1b5352016-12-01 16:58:38 -0800444 uint32_t num_resolved_fields,
Orion Hodsonc069a302017-01-18 09:23:12 +0000445 MethodTypeDexCacheType* resolved_method_types,
446 uint32_t num_resolved_method_types,
447 GcRoot<CallSite>* resolved_call_sites,
448 uint32_t num_resolved_call_sites,
Andreas Gampecc1b5352016-12-01 16:58:38 -0800449 PointerSize pointer_size)
450 REQUIRES_SHARED(Locks::mutator_lock_);
451
Vladimir Markof44d36c2017-03-14 14:18:46 +0000452 // std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations,
453 // so we use a custom pair class for loading and storing the NativeDexCachePair<>.
454 template <typename IntType>
455 struct PACKED(2 * sizeof(IntType)) ConversionPair {
456 ConversionPair(IntType f, IntType s) : first(f), second(s) { }
457 ConversionPair(const ConversionPair&) = default;
458 ConversionPair& operator=(const ConversionPair&) = default;
459 IntType first;
460 IntType second;
461 };
462 using ConversionPair32 = ConversionPair<uint32_t>;
463 using ConversionPair64 = ConversionPair<uint64_t>;
464
Vladimir Marko05792b92015-08-03 11:56:49 +0100465 // Visit instance fields of the dex cache as well as its associated arrays.
Mathieu Chartierfbc31082016-01-24 11:59:56 -0800466 template <bool kVisitNativeRoots,
467 VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
468 ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
469 typename Visitor>
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000470 void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700471 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100472
Vladimir Markof44d36c2017-03-14 14:18:46 +0000473 // Due to lack of 16-byte atomics support, we use hand-crafted routines.
474#if defined(__aarch64__)
475 // 16-byte atomics are supported on aarch64.
476 ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B(
477 std::atomic<ConversionPair64>* target) {
478 return target->load(std::memory_order_relaxed);
479 }
480
481 ALWAYS_INLINE static void AtomicStoreRelease16B(
482 std::atomic<ConversionPair64>* target, ConversionPair64 value) {
483 target->store(value, std::memory_order_release);
484 }
485#elif defined(__x86_64__)
486 ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B(
487 std::atomic<ConversionPair64>* target) {
488 uint64_t first, second;
489 __asm__ __volatile__(
490 "lock cmpxchg16b (%2)"
491 : "=&a"(first), "=&d"(second)
492 : "r"(target), "a"(0), "d"(0), "b"(0), "c"(0)
493 : "cc");
494 return ConversionPair64(first, second);
495 }
496
497 ALWAYS_INLINE static void AtomicStoreRelease16B(
498 std::atomic<ConversionPair64>* target, ConversionPair64 value) {
499 uint64_t first, second;
500 __asm__ __volatile__ (
501 "movq (%2), %%rax\n\t"
502 "movq 8(%2), %%rdx\n\t"
503 "1:\n\t"
504 "lock cmpxchg16b (%2)\n\t"
505 "jnz 1b"
506 : "=&a"(first), "=&d"(second)
507 : "r"(target), "b"(value.first), "c"(value.second)
508 : "cc");
509 }
510#else
511 static ConversionPair64 AtomicLoadRelaxed16B(std::atomic<ConversionPair64>* target);
512 static void AtomicStoreRelease16B(std::atomic<ConversionPair64>* target, ConversionPair64 value);
513#endif
514
Ian Rogersef7d42f2014-01-06 12:55:46 -0800515 HeapReference<String> location_;
Narayan Kamath6b2dc312017-03-14 13:26:12 +0000516 // Number of elements in the call_sites_ array. Note that this appears here
517 // because of our packing logic for 32 bit fields.
518 uint32_t num_resolved_call_sites_;
519
Narayan Kamath25352fc2016-08-03 12:46:58 +0100520 uint64_t dex_file_; // const DexFile*
Orion Hodsonc069a302017-01-18 09:23:12 +0000521 uint64_t resolved_call_sites_; // GcRoot<CallSite>* array with num_resolved_call_sites_
522 // elements.
Vladimir Markof44d36c2017-03-14 14:18:46 +0000523 uint64_t resolved_fields_; // std::atomic<FieldDexCachePair>*, array with
524 // num_resolved_fields_ elements.
Narayan Kamath25352fc2016-08-03 12:46:58 +0100525 uint64_t resolved_method_types_; // std::atomic<MethodTypeDexCachePair>* array with
526 // num_resolved_method_types_ elements.
527 uint64_t resolved_methods_; // ArtMethod*, array with num_resolved_methods_ elements.
Vladimir Marko8d6768d2017-03-14 10:13:21 +0000528 uint64_t resolved_types_; // TypeDexCacheType*, array with num_resolved_types_ elements.
Narayan Kamath25352fc2016-08-03 12:46:58 +0100529 uint64_t strings_; // std::atomic<StringDexCachePair>*, array with num_strings_
530 // elements.
531
532 uint32_t num_resolved_fields_; // Number of elements in the resolved_fields_ array.
533 uint32_t num_resolved_method_types_; // Number of elements in the resolved_method_types_ array.
534 uint32_t num_resolved_methods_; // Number of elements in the resolved_methods_ array.
535 uint32_t num_resolved_types_; // Number of elements in the resolved_types_ array.
536 uint32_t num_strings_; // Number of elements in the strings_ array.
Brian Carlstrom83db7722011-08-26 17:32:56 -0700537
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700538 friend struct art::DexCacheOffsets; // for verifying offset information
Vladimir Marko05792b92015-08-03 11:56:49 +0100539 friend class Object; // For VisitReferences
Brian Carlstromc4fa2c02011-08-21 03:00:12 -0700540 DISALLOW_IMPLICIT_CONSTRUCTORS(DexCache);
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700541};
542
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800543} // namespace mirror
Brian Carlstrom7e49dca2011-07-22 18:07:34 -0700544} // namespace art
545
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700546#endif // ART_RUNTIME_MIRROR_DEX_CACHE_H_