Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_H_ |
| 18 | #define ART_RUNTIME_MIRROR_DEX_CACHE_H_ |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 19 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 20 | #include "array.h" |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 21 | #include "base/bit_utils.h" |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 22 | #include "dex_file_types.h" |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 23 | #include "object.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 24 | #include "object_array.h" |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 25 | |
| 26 | namespace art { |
| 27 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 28 | class ArtField; |
Alex Light | dba6148 | 2016-12-21 08:20:29 -0800 | [diff] [blame] | 29 | class ArtMethod; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 30 | struct DexCacheOffsets; |
| 31 | class DexFile; |
| 32 | class ImageWriter; |
| 33 | union JValue; |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 34 | class LinearAlloc; |
| 35 | class Thread; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 36 | |
| 37 | namespace mirror { |
| 38 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 39 | class CallSite; |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 40 | class Class; |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 41 | class MethodType; |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 42 | class String; |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 43 | |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 44 | template <typename T> struct PACKED(8) DexCachePair { |
| 45 | GcRoot<T> object; |
| 46 | uint32_t index; |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 47 | // The array is initially [ {0,0}, {0,0}, {0,0} ... ] |
| 48 | // We maintain the invariant that once a dex cache entry is populated, |
| 49 | // the pointer is always non-0 |
| 50 | // Any given entry would thus be: |
| 51 | // {non-0, non-0} OR {0,0} |
| 52 | // |
| 53 | // It's generally sufficiently enough then to check if the |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 54 | // lookup index matches the stored index (for a >0 lookup index) |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 55 | // because if it's true the pointer is also non-null. |
| 56 | // |
| 57 | // For the 0th entry which is a special case, the value is either |
| 58 | // {0,0} (initial state) or {non-0, 0} which indicates |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 59 | // that a valid object is stored at that index for a dex section id of 0. |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 60 | // |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 61 | // As an optimization, we want to avoid branching on the object pointer since |
| 62 | // it's always non-null if the id branch succeeds (except for the 0th id). |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 63 | // Set the initial state for the 0th entry to be {0,1} which is guaranteed to fail |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 64 | // the lookup id == stored id branch. |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 65 | DexCachePair(ObjPtr<T> object, uint32_t index) |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 66 | : object(object), |
| 67 | index(index) {} |
| 68 | DexCachePair() = default; |
| 69 | DexCachePair(const DexCachePair<T>&) = default; |
| 70 | DexCachePair& operator=(const DexCachePair<T>&) = default; |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 71 | |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 72 | static void Initialize(std::atomic<DexCachePair<T>>* dex_cache) { |
| 73 | DexCachePair<T> first_elem; |
| 74 | first_elem.object = GcRoot<T>(nullptr); |
| 75 | first_elem.index = InvalidIndexForSlot(0); |
| 76 | dex_cache[0].store(first_elem, std::memory_order_relaxed); |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 77 | } |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 78 | |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 79 | static uint32_t InvalidIndexForSlot(uint32_t slot) { |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 80 | // Since the cache size is a power of two, 0 will always map to slot 0. |
| 81 | // Use 1 for slot 0 and 0 for all other slots. |
| 82 | return (slot == 0) ? 1u : 0u; |
| 83 | } |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 84 | |
| 85 | T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) { |
| 86 | if (idx != index) { |
| 87 | return nullptr; |
| 88 | } |
| 89 | DCHECK(!object.IsNull()); |
| 90 | return object.Read(); |
| 91 | } |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 92 | }; |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 93 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 94 | template <typename T> struct PACKED(2 * __SIZEOF_POINTER__) NativeDexCachePair { |
| 95 | T* object; |
| 96 | size_t index; |
| 97 | // This is similar to DexCachePair except that we're storing a native pointer |
| 98 | // instead of a GC root. See DexCachePair for the details. |
| 99 | NativeDexCachePair(T* object, uint32_t index) |
| 100 | : object(object), |
| 101 | index(index) {} |
| 102 | NativeDexCachePair() : object(nullptr), index(0u) { } |
| 103 | NativeDexCachePair(const NativeDexCachePair<T>&) = default; |
| 104 | NativeDexCachePair& operator=(const NativeDexCachePair<T>&) = default; |
| 105 | |
| 106 | static void Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache, PointerSize pointer_size); |
| 107 | |
| 108 | static uint32_t InvalidIndexForSlot(uint32_t slot) { |
| 109 | // Since the cache size is a power of two, 0 will always map to slot 0. |
| 110 | // Use 1 for slot 0 and 0 for all other slots. |
| 111 | return (slot == 0) ? 1u : 0u; |
| 112 | } |
| 113 | |
| 114 | T* GetObjectForIndex(uint32_t idx) REQUIRES_SHARED(Locks::mutator_lock_) { |
| 115 | if (idx != index) { |
| 116 | return nullptr; |
| 117 | } |
| 118 | DCHECK(object != nullptr); |
| 119 | return object; |
| 120 | } |
| 121 | }; |
| 122 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 123 | using TypeDexCachePair = DexCachePair<Class>; |
| 124 | using TypeDexCacheType = std::atomic<TypeDexCachePair>; |
| 125 | |
| 126 | using StringDexCachePair = DexCachePair<String>; |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 127 | using StringDexCacheType = std::atomic<StringDexCachePair>; |
| 128 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 129 | using FieldDexCachePair = NativeDexCachePair<ArtField>; |
| 130 | using FieldDexCacheType = std::atomic<FieldDexCachePair>; |
| 131 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 132 | using MethodTypeDexCachePair = DexCachePair<MethodType>; |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 133 | using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>; |
| 134 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 135 | // C++ mirror of java.lang.DexCache. |
| 136 | class MANAGED DexCache FINAL : public Object { |
Brian Carlstrom | 83db772 | 2011-08-26 17:32:56 -0700 | [diff] [blame] | 137 | public: |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 138 | // Size of java.lang.DexCache.class. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 139 | static uint32_t ClassSize(PointerSize pointer_size); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 140 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 141 | // Size of type dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. |
| 142 | static constexpr size_t kDexCacheTypeCacheSize = 1024; |
| 143 | static_assert(IsPowerOfTwo(kDexCacheTypeCacheSize), |
| 144 | "Type dex cache size is not a power of 2."); |
| 145 | |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 146 | // Size of string dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. |
| 147 | static constexpr size_t kDexCacheStringCacheSize = 1024; |
| 148 | static_assert(IsPowerOfTwo(kDexCacheStringCacheSize), |
| 149 | "String dex cache size is not a power of 2."); |
| 150 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 151 | // Size of field dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. |
| 152 | static constexpr size_t kDexCacheFieldCacheSize = 1024; |
| 153 | static_assert(IsPowerOfTwo(kDexCacheFieldCacheSize), |
| 154 | "Field dex cache size is not a power of 2."); |
| 155 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 156 | // Size of method type dex cache. Needs to be a power of 2 for entrypoint assumptions |
| 157 | // to hold. |
| 158 | static constexpr size_t kDexCacheMethodTypeCacheSize = 1024; |
| 159 | static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize), |
| 160 | "MethodType dex cache size is not a power of 2."); |
| 161 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 162 | static constexpr size_t StaticTypeSize() { |
| 163 | return kDexCacheTypeCacheSize; |
| 164 | } |
| 165 | |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 166 | static constexpr size_t StaticStringSize() { |
| 167 | return kDexCacheStringCacheSize; |
| 168 | } |
| 169 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 170 | static constexpr size_t StaticArtFieldSize() { |
| 171 | return kDexCacheFieldCacheSize; |
| 172 | } |
| 173 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 174 | static constexpr size_t StaticMethodTypeSize() { |
| 175 | return kDexCacheMethodTypeCacheSize; |
| 176 | } |
| 177 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 178 | // Size of an instance of java.lang.DexCache not including referenced values. |
| 179 | static constexpr uint32_t InstanceSize() { |
| 180 | return sizeof(DexCache); |
| 181 | } |
| 182 | |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 183 | static void InitializeDexCache(Thread* self, |
| 184 | ObjPtr<mirror::DexCache> dex_cache, |
| 185 | ObjPtr<mirror::String> location, |
| 186 | const DexFile* dex_file, |
| 187 | LinearAlloc* linear_alloc, |
| 188 | PointerSize image_pointer_size) |
| 189 | REQUIRES_SHARED(Locks::mutator_lock_) |
| 190 | REQUIRES(Locks::dex_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 191 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 192 | void Fixup(ArtMethod* trampoline, PointerSize pointer_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 193 | REQUIRES_SHARED(Locks::mutator_lock_); |
Ian Rogers | 1984651 | 2012-02-24 11:42:47 -0800 | [diff] [blame] | 194 | |
Mathieu Chartier | 60bc39c | 2016-01-27 18:37:48 -0800 | [diff] [blame] | 195 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 196 | void FixupStrings(StringDexCacheType* dest, const Visitor& visitor) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 197 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 198 | |
Mathieu Chartier | 60bc39c | 2016-01-27 18:37:48 -0800 | [diff] [blame] | 199 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 200 | void FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 201 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 202 | |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 203 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
| 204 | void FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, const Visitor& visitor) |
| 205 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 206 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 207 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
| 208 | void FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, const Visitor& visitor) |
| 209 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 210 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 211 | String* GetLocation() REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 212 | return GetFieldObject<String>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_)); |
Brian Carlstrom | a663ea5 | 2011-08-19 23:33:41 -0700 | [diff] [blame] | 213 | } |
| 214 | |
buzbee | 5cd2180 | 2011-08-26 10:40:14 -0700 | [diff] [blame] | 215 | static MemberOffset StringsOffset() { |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 216 | return OFFSET_OF_OBJECT_MEMBER(DexCache, strings_); |
buzbee | c5ef046 | 2011-08-25 18:44:49 -0700 | [diff] [blame] | 217 | } |
| 218 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 219 | static MemberOffset ResolvedTypesOffset() { |
| 220 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_); |
| 221 | } |
| 222 | |
Brian Carlstrom | 1caa2c2 | 2011-08-28 13:02:33 -0700 | [diff] [blame] | 223 | static MemberOffset ResolvedFieldsOffset() { |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 224 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_fields_); |
buzbee | c5ef046 | 2011-08-25 18:44:49 -0700 | [diff] [blame] | 225 | } |
| 226 | |
Brian Carlstrom | 1caa2c2 | 2011-08-28 13:02:33 -0700 | [diff] [blame] | 227 | static MemberOffset ResolvedMethodsOffset() { |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 228 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_methods_); |
buzbee | c5ef046 | 2011-08-25 18:44:49 -0700 | [diff] [blame] | 229 | } |
| 230 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 231 | static MemberOffset ResolvedMethodTypesOffset() { |
| 232 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_method_types_); |
| 233 | } |
| 234 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 235 | static MemberOffset ResolvedCallSitesOffset() { |
| 236 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_call_sites_); |
| 237 | } |
| 238 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 239 | static MemberOffset NumStringsOffset() { |
| 240 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_strings_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 241 | } |
| 242 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 243 | static MemberOffset NumResolvedTypesOffset() { |
| 244 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_types_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 245 | } |
| 246 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 247 | static MemberOffset NumResolvedFieldsOffset() { |
| 248 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_fields_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 249 | } |
| 250 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 251 | static MemberOffset NumResolvedMethodsOffset() { |
| 252 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_methods_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 253 | } |
| 254 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 255 | static MemberOffset NumResolvedMethodTypesOffset() { |
| 256 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_); |
| 257 | } |
| 258 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 259 | static MemberOffset NumResolvedCallSitesOffset() { |
| 260 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_call_sites_); |
| 261 | } |
| 262 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 263 | String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 264 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 265 | |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 266 | void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 267 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 268 | |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 269 | // Clear a string for a string_idx, used to undo string intern transactions to make sure |
| 270 | // the string isn't kept live. |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 271 | void ClearString(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 272 | |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 273 | Class* GetResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 274 | |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 275 | void SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 276 | REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 277 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 278 | void ClearResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 279 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 280 | ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 281 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 282 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 283 | ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx, |
| 284 | ArtMethod* resolved, |
| 285 | PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 286 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 287 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 288 | // Pointer sized variant, used for patching. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 289 | ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 290 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 291 | |
| 292 | // Pointer sized variant, used for patching. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 293 | ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 294 | REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 295 | ALWAYS_INLINE void ClearResolvedField(uint32_t idx, PointerSize ptr_size) |
| 296 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 297 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 298 | MethodType* GetResolvedMethodType(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 299 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 300 | void SetResolvedMethodType(uint32_t proto_idx, MethodType* resolved) |
| 301 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 302 | |
| 303 | CallSite* GetResolvedCallSite(uint32_t call_site_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 304 | |
| 305 | // Attempts to bind |call_site_idx| to the call site |resolved|. The |
| 306 | // caller must use the return value in place of |resolved|. This is |
| 307 | // because multiple threads can invoke the bootstrap method each |
| 308 | // producing a call site, but the method handle invocation on the |
| 309 | // call site must be on a common agreed value. |
| 310 | CallSite* SetResolvedCallSite(uint32_t call_site_idx, CallSite* resolved) WARN_UNUSED |
| 311 | REQUIRES_SHARED(Locks::mutator_lock_); |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 312 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 313 | StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 314 | return GetFieldPtr64<StringDexCacheType*>(StringsOffset()); |
Brian Carlstrom | 1caa2c2 | 2011-08-28 13:02:33 -0700 | [diff] [blame] | 315 | } |
Brian Carlstrom | 83db772 | 2011-08-26 17:32:56 -0700 | [diff] [blame] | 316 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 317 | void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 318 | SetFieldPtr<false>(StringsOffset(), strings); |
| 319 | } |
| 320 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 321 | TypeDexCacheType* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
| 322 | return GetFieldPtr<TypeDexCacheType*>(ResolvedTypesOffset()); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 323 | } |
| 324 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 325 | void SetResolvedTypes(TypeDexCacheType* resolved_types) |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 326 | ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 327 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 328 | SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types); |
| 329 | } |
| 330 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 331 | ArtMethod** GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 332 | return GetFieldPtr<ArtMethod**>(ResolvedMethodsOffset()); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 333 | } |
| 334 | |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 335 | void SetResolvedMethods(ArtMethod** resolved_methods) |
| 336 | ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 337 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 338 | SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods); |
| 339 | } |
| 340 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 341 | FieldDexCacheType* GetResolvedFields() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
| 342 | return GetFieldPtr<FieldDexCacheType*>(ResolvedFieldsOffset()); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 343 | } |
| 344 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 345 | void SetResolvedFields(FieldDexCacheType* resolved_fields) |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 346 | ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 347 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 348 | SetFieldPtr<false>(ResolvedFieldsOffset(), resolved_fields); |
| 349 | } |
| 350 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 351 | MethodTypeDexCacheType* GetResolvedMethodTypes() |
| 352 | ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 353 | return GetFieldPtr64<MethodTypeDexCacheType*>(ResolvedMethodTypesOffset()); |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 354 | } |
| 355 | |
| 356 | void SetResolvedMethodTypes(MethodTypeDexCacheType* resolved_method_types) |
| 357 | ALWAYS_INLINE |
| 358 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 359 | SetFieldPtr<false>(ResolvedMethodTypesOffset(), resolved_method_types); |
| 360 | } |
| 361 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 362 | GcRoot<CallSite>* GetResolvedCallSites() |
| 363 | ALWAYS_INLINE |
| 364 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 365 | return GetFieldPtr<GcRoot<CallSite>*>(ResolvedCallSitesOffset()); |
| 366 | } |
| 367 | |
| 368 | void SetResolvedCallSites(GcRoot<CallSite>* resolved_call_sites) |
| 369 | ALWAYS_INLINE |
| 370 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 371 | SetFieldPtr<false>(ResolvedCallSitesOffset(), resolved_call_sites); |
| 372 | } |
| 373 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 374 | size_t NumStrings() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 375 | return GetField32(NumStringsOffset()); |
| 376 | } |
| 377 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 378 | size_t NumResolvedTypes() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 379 | return GetField32(NumResolvedTypesOffset()); |
| 380 | } |
| 381 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 382 | size_t NumResolvedMethods() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 383 | return GetField32(NumResolvedMethodsOffset()); |
| 384 | } |
| 385 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 386 | size_t NumResolvedFields() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 387 | return GetField32(NumResolvedFieldsOffset()); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 388 | } |
| 389 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 390 | size_t NumResolvedMethodTypes() REQUIRES_SHARED(Locks::mutator_lock_) { |
| 391 | return GetField32(NumResolvedMethodTypesOffset()); |
| 392 | } |
| 393 | |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 394 | size_t NumResolvedCallSites() REQUIRES_SHARED(Locks::mutator_lock_) { |
| 395 | return GetField32(NumResolvedCallSitesOffset()); |
| 396 | } |
| 397 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 398 | const DexFile* GetDexFile() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 399 | return GetFieldPtr<const DexFile*>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_)); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 400 | } |
| 401 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 402 | void SetDexFile(const DexFile* dex_file) REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | 7617216 | 2016-01-26 14:54:06 -0800 | [diff] [blame] | 403 | SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file); |
Brian Carlstrom | 4b620ff | 2011-09-11 01:11:01 -0700 | [diff] [blame] | 404 | } |
Brian Carlstrom | c4fa2c0 | 2011-08-21 03:00:12 -0700 | [diff] [blame] | 405 | |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 406 | void SetLocation(ObjPtr<String> location) REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 7617216 | 2016-01-26 14:54:06 -0800 | [diff] [blame] | 407 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 408 | // NOTE: Get/SetElementPtrSize() are intended for working with ArtMethod** and ArtField** |
| 409 | // provided by GetResolvedMethods/Fields() and ArtMethod::GetDexCacheResolvedMethods(), |
| 410 | // so they need to be public. |
| 411 | |
| 412 | template <typename PtrType> |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 413 | static PtrType GetElementPtrSize(PtrType* ptr_array, size_t idx, PointerSize ptr_size); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 414 | |
| 415 | template <typename PtrType> |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 416 | static void SetElementPtrSize(PtrType* ptr_array, size_t idx, PtrType ptr, PointerSize ptr_size); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 417 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 418 | template <typename T> |
| 419 | static NativeDexCachePair<T> GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array, |
| 420 | size_t idx, |
| 421 | PointerSize ptr_size); |
| 422 | |
| 423 | template <typename T> |
| 424 | static void SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array, |
| 425 | size_t idx, |
| 426 | NativeDexCachePair<T> pair, |
| 427 | PointerSize ptr_size); |
| 428 | |
Vladimir Marko | f25cc73 | 2017-03-16 16:18:15 +0000 | [diff] [blame^] | 429 | uint32_t StringSlotIndex(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 430 | uint32_t TypeSlotIndex(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 431 | uint32_t FieldSlotIndex(uint32_t field_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 432 | uint32_t MethodTypeSlotIndex(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 433 | |
Brian Carlstrom | c4fa2c0 | 2011-08-21 03:00:12 -0700 | [diff] [blame] | 434 | private: |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 435 | void Init(const DexFile* dex_file, |
| 436 | ObjPtr<String> location, |
| 437 | StringDexCacheType* strings, |
| 438 | uint32_t num_strings, |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 439 | TypeDexCacheType* resolved_types, |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 440 | uint32_t num_resolved_types, |
| 441 | ArtMethod** resolved_methods, |
| 442 | uint32_t num_resolved_methods, |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 443 | FieldDexCacheType* resolved_fields, |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 444 | uint32_t num_resolved_fields, |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 445 | MethodTypeDexCacheType* resolved_method_types, |
| 446 | uint32_t num_resolved_method_types, |
| 447 | GcRoot<CallSite>* resolved_call_sites, |
| 448 | uint32_t num_resolved_call_sites, |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 449 | PointerSize pointer_size) |
| 450 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 451 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 452 | // std::pair<> is not trivially copyable and as such it is unsuitable for atomic operations, |
| 453 | // so we use a custom pair class for loading and storing the NativeDexCachePair<>. |
| 454 | template <typename IntType> |
| 455 | struct PACKED(2 * sizeof(IntType)) ConversionPair { |
| 456 | ConversionPair(IntType f, IntType s) : first(f), second(s) { } |
| 457 | ConversionPair(const ConversionPair&) = default; |
| 458 | ConversionPair& operator=(const ConversionPair&) = default; |
| 459 | IntType first; |
| 460 | IntType second; |
| 461 | }; |
| 462 | using ConversionPair32 = ConversionPair<uint32_t>; |
| 463 | using ConversionPair64 = ConversionPair<uint64_t>; |
| 464 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 465 | // Visit instance fields of the dex cache as well as its associated arrays. |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 466 | template <bool kVisitNativeRoots, |
| 467 | VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, |
| 468 | ReadBarrierOption kReadBarrierOption = kWithReadBarrier, |
| 469 | typename Visitor> |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 470 | void VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 471 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 472 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 473 | // Due to lack of 16-byte atomics support, we use hand-crafted routines. |
| 474 | #if defined(__aarch64__) |
| 475 | // 16-byte atomics are supported on aarch64. |
| 476 | ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B( |
| 477 | std::atomic<ConversionPair64>* target) { |
| 478 | return target->load(std::memory_order_relaxed); |
| 479 | } |
| 480 | |
| 481 | ALWAYS_INLINE static void AtomicStoreRelease16B( |
| 482 | std::atomic<ConversionPair64>* target, ConversionPair64 value) { |
| 483 | target->store(value, std::memory_order_release); |
| 484 | } |
| 485 | #elif defined(__x86_64__) |
| 486 | ALWAYS_INLINE static ConversionPair64 AtomicLoadRelaxed16B( |
| 487 | std::atomic<ConversionPair64>* target) { |
| 488 | uint64_t first, second; |
| 489 | __asm__ __volatile__( |
| 490 | "lock cmpxchg16b (%2)" |
| 491 | : "=&a"(first), "=&d"(second) |
| 492 | : "r"(target), "a"(0), "d"(0), "b"(0), "c"(0) |
| 493 | : "cc"); |
| 494 | return ConversionPair64(first, second); |
| 495 | } |
| 496 | |
| 497 | ALWAYS_INLINE static void AtomicStoreRelease16B( |
| 498 | std::atomic<ConversionPair64>* target, ConversionPair64 value) { |
| 499 | uint64_t first, second; |
| 500 | __asm__ __volatile__ ( |
| 501 | "movq (%2), %%rax\n\t" |
| 502 | "movq 8(%2), %%rdx\n\t" |
| 503 | "1:\n\t" |
| 504 | "lock cmpxchg16b (%2)\n\t" |
| 505 | "jnz 1b" |
| 506 | : "=&a"(first), "=&d"(second) |
| 507 | : "r"(target), "b"(value.first), "c"(value.second) |
| 508 | : "cc"); |
| 509 | } |
| 510 | #else |
| 511 | static ConversionPair64 AtomicLoadRelaxed16B(std::atomic<ConversionPair64>* target); |
| 512 | static void AtomicStoreRelease16B(std::atomic<ConversionPair64>* target, ConversionPair64 value); |
| 513 | #endif |
| 514 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 515 | HeapReference<String> location_; |
Narayan Kamath | 6b2dc31 | 2017-03-14 13:26:12 +0000 | [diff] [blame] | 516 | // Number of elements in the call_sites_ array. Note that this appears here |
| 517 | // because of our packing logic for 32 bit fields. |
| 518 | uint32_t num_resolved_call_sites_; |
| 519 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 520 | uint64_t dex_file_; // const DexFile* |
Orion Hodson | c069a30 | 2017-01-18 09:23:12 +0000 | [diff] [blame] | 521 | uint64_t resolved_call_sites_; // GcRoot<CallSite>* array with num_resolved_call_sites_ |
| 522 | // elements. |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 523 | uint64_t resolved_fields_; // std::atomic<FieldDexCachePair>*, array with |
| 524 | // num_resolved_fields_ elements. |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 525 | uint64_t resolved_method_types_; // std::atomic<MethodTypeDexCachePair>* array with |
| 526 | // num_resolved_method_types_ elements. |
| 527 | uint64_t resolved_methods_; // ArtMethod*, array with num_resolved_methods_ elements. |
Vladimir Marko | 8d6768d | 2017-03-14 10:13:21 +0000 | [diff] [blame] | 528 | uint64_t resolved_types_; // TypeDexCacheType*, array with num_resolved_types_ elements. |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 529 | uint64_t strings_; // std::atomic<StringDexCachePair>*, array with num_strings_ |
| 530 | // elements. |
| 531 | |
| 532 | uint32_t num_resolved_fields_; // Number of elements in the resolved_fields_ array. |
| 533 | uint32_t num_resolved_method_types_; // Number of elements in the resolved_method_types_ array. |
| 534 | uint32_t num_resolved_methods_; // Number of elements in the resolved_methods_ array. |
| 535 | uint32_t num_resolved_types_; // Number of elements in the resolved_types_ array. |
| 536 | uint32_t num_strings_; // Number of elements in the strings_ array. |
Brian Carlstrom | 83db772 | 2011-08-26 17:32:56 -0700 | [diff] [blame] | 537 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 538 | friend struct art::DexCacheOffsets; // for verifying offset information |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 539 | friend class Object; // For VisitReferences |
Brian Carlstrom | c4fa2c0 | 2011-08-21 03:00:12 -0700 | [diff] [blame] | 540 | DISALLOW_IMPLICIT_CONSTRUCTORS(DexCache); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 541 | }; |
| 542 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 543 | } // namespace mirror |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 544 | } // namespace art |
| 545 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 546 | #endif // ART_RUNTIME_MIRROR_DEX_CACHE_H_ |