Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Ian Rogers | 9651f42 | 2011-09-19 20:26:07 -0700 | [diff] [blame] | 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_ASM_SUPPORT_H_ |
| 18 | #define ART_RUNTIME_ASM_SUPPORT_H_ |
Ian Rogers | 9651f42 | 2011-09-19 20:26:07 -0700 | [diff] [blame] | 19 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 20 | #if defined(__cplusplus) |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 21 | #include "art_method.h" |
Hiroshi Yamauchi | dc412b6 | 2015-10-15 12:26:57 -0700 | [diff] [blame] | 22 | #include "gc/allocator/rosalloc.h" |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 23 | #include "lock_word.h" |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 24 | #include "mirror/class.h" |
| 25 | #include "mirror/string.h" |
| 26 | #include "runtime.h" |
| 27 | #include "thread.h" |
| 28 | #endif |
| 29 | |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 30 | #include "read_barrier_c.h" |
Hiroshi Yamauchi | 9d04a20 | 2014-01-31 13:35:49 -0800 | [diff] [blame] | 31 | |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 32 | #if defined(__arm__) || defined(__mips__) |
| 33 | // In quick code for ARM and MIPS we make poor use of registers and perform frequent suspend |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 34 | // checks in the event of loop back edges. The SUSPEND_CHECK_INTERVAL constant is loaded into a |
| 35 | // register at the point of an up-call or after handling a suspend check. It reduces the number of |
| 36 | // loads of the TLS suspend check value by the given amount (turning it into a decrement and compare |
| 37 | // of a register). This increases the time for a thread to respond to requests from GC and the |
| 38 | // debugger, damaging GC performance and creating other unwanted artifacts. For example, this count |
| 39 | // has the effect of making loops and Java code look cold in profilers, where the count is reset |
| 40 | // impacts where samples will occur. Reducing the count as much as possible improves profiler |
| 41 | // accuracy in tools like traceview. |
| 42 | // TODO: get a compiler that can do a proper job of loop optimization and remove this. |
buzbee | 72be1cd | 2014-11-11 22:48:59 -0800 | [diff] [blame] | 43 | #define SUSPEND_CHECK_INTERVAL 96 |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 44 | #endif |
| 45 | |
| 46 | #if defined(__cplusplus) |
| 47 | |
| 48 | #ifndef ADD_TEST_EQ // Allow #include-r to replace with their own. |
| 49 | #define ADD_TEST_EQ(x, y) CHECK_EQ(x, y); |
| 50 | #endif |
| 51 | |
| 52 | static inline void CheckAsmSupportOffsetsAndSizes() { |
| 53 | #else |
| 54 | #define ADD_TEST_EQ(x, y) |
| 55 | #endif |
| 56 | |
Hiroshi Yamauchi | dc412b6 | 2015-10-15 12:26:57 -0700 | [diff] [blame] | 57 | #if defined(__LP64__) |
| 58 | #define POINTER_SIZE_SHIFT 3 |
| 59 | #else |
| 60 | #define POINTER_SIZE_SHIFT 2 |
| 61 | #endif |
| 62 | ADD_TEST_EQ(static_cast<size_t>(1U << POINTER_SIZE_SHIFT), |
| 63 | static_cast<size_t>(__SIZEOF_POINTER__)) |
| 64 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 65 | // Size of references to the heap on the stack. |
| 66 | #define STACK_REFERENCE_SIZE 4 |
| 67 | ADD_TEST_EQ(static_cast<size_t>(STACK_REFERENCE_SIZE), sizeof(art::StackReference<art::mirror::Object>)) |
| 68 | |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 69 | // Size of heap references |
| 70 | #define COMPRESSED_REFERENCE_SIZE 4 |
| 71 | ADD_TEST_EQ(static_cast<size_t>(COMPRESSED_REFERENCE_SIZE), |
| 72 | sizeof(art::mirror::CompressedReference<art::mirror::Object>)) |
| 73 | |
Hiroshi Yamauchi | dc412b6 | 2015-10-15 12:26:57 -0700 | [diff] [blame] | 74 | #define COMPRESSED_REFERENCE_SIZE_SHIFT 2 |
| 75 | ADD_TEST_EQ(static_cast<size_t>(1U << COMPRESSED_REFERENCE_SIZE_SHIFT), |
| 76 | static_cast<size_t>(COMPRESSED_REFERENCE_SIZE)) |
| 77 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 78 | // Note: these callee save methods loads require read barriers. |
| 79 | // Offset of field Runtime::callee_save_methods_[kSaveAll] |
| 80 | #define RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET 0 |
| 81 | ADD_TEST_EQ(static_cast<size_t>(RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET), |
| 82 | art::Runtime::GetCalleeSaveMethodOffset(art::Runtime::kSaveAll)) |
| 83 | |
| 84 | // Offset of field Runtime::callee_save_methods_[kRefsOnly] |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 85 | #define RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET 8 |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 86 | ADD_TEST_EQ(static_cast<size_t>(RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET), |
| 87 | art::Runtime::GetCalleeSaveMethodOffset(art::Runtime::kRefsOnly)) |
| 88 | |
| 89 | // Offset of field Runtime::callee_save_methods_[kRefsAndArgs] |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 90 | #define RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET (2 * 8) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 91 | ADD_TEST_EQ(static_cast<size_t>(RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET), |
| 92 | art::Runtime::GetCalleeSaveMethodOffset(art::Runtime::kRefsAndArgs)) |
| 93 | |
| 94 | // Offset of field Thread::tls32_.state_and_flags. |
| 95 | #define THREAD_FLAGS_OFFSET 0 |
| 96 | ADD_TEST_EQ(THREAD_FLAGS_OFFSET, |
| 97 | art::Thread::ThreadFlagsOffset<__SIZEOF_POINTER__>().Int32Value()) |
| 98 | |
| 99 | // Offset of field Thread::tls32_.thin_lock_thread_id. |
| 100 | #define THREAD_ID_OFFSET 12 |
| 101 | ADD_TEST_EQ(THREAD_ID_OFFSET, |
| 102 | art::Thread::ThinLockIdOffset<__SIZEOF_POINTER__>().Int32Value()) |
| 103 | |
| 104 | // Offset of field Thread::tlsPtr_.card_table. |
Sebastien Hertz | 0747466 | 2015-08-25 15:12:33 +0000 | [diff] [blame] | 105 | #define THREAD_CARD_TABLE_OFFSET 128 |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 106 | ADD_TEST_EQ(THREAD_CARD_TABLE_OFFSET, |
| 107 | art::Thread::CardTableOffset<__SIZEOF_POINTER__>().Int32Value()) |
| 108 | |
| 109 | // Offset of field Thread::tlsPtr_.exception. |
| 110 | #define THREAD_EXCEPTION_OFFSET (THREAD_CARD_TABLE_OFFSET + __SIZEOF_POINTER__) |
| 111 | ADD_TEST_EQ(THREAD_EXCEPTION_OFFSET, |
| 112 | art::Thread::ExceptionOffset<__SIZEOF_POINTER__>().Int32Value()) |
| 113 | |
| 114 | // Offset of field Thread::tlsPtr_.managed_stack.top_quick_frame_. |
| 115 | #define THREAD_TOP_QUICK_FRAME_OFFSET (THREAD_CARD_TABLE_OFFSET + (3 * __SIZEOF_POINTER__)) |
| 116 | ADD_TEST_EQ(THREAD_TOP_QUICK_FRAME_OFFSET, |
| 117 | art::Thread::TopOfManagedStackOffset<__SIZEOF_POINTER__>().Int32Value()) |
| 118 | |
Roland Levillain | ca80ecc | 2015-07-22 17:19:28 +0100 | [diff] [blame] | 119 | // Offset of field Thread::tlsPtr_.self. |
Andreas Gampe | 449357d | 2015-06-01 22:29:51 -0700 | [diff] [blame] | 120 | #define THREAD_SELF_OFFSET (THREAD_CARD_TABLE_OFFSET + (9 * __SIZEOF_POINTER__)) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 121 | ADD_TEST_EQ(THREAD_SELF_OFFSET, |
| 122 | art::Thread::SelfOffset<__SIZEOF_POINTER__>().Int32Value()) |
Ian Rogers | 4a510d8 | 2011-10-09 14:30:24 -0700 | [diff] [blame] | 123 | |
Roland Levillain | ca80ecc | 2015-07-22 17:19:28 +0100 | [diff] [blame] | 124 | // Offset of field Thread::tlsPtr_.thread_local_pos. |
Roland Levillain | 0d5a281 | 2015-11-13 10:07:31 +0000 | [diff] [blame] | 125 | #define THREAD_LOCAL_POS_OFFSET (THREAD_CARD_TABLE_OFFSET + 151 * __SIZEOF_POINTER__) |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 126 | ADD_TEST_EQ(THREAD_LOCAL_POS_OFFSET, |
| 127 | art::Thread::ThreadLocalPosOffset<__SIZEOF_POINTER__>().Int32Value()) |
Roland Levillain | ca80ecc | 2015-07-22 17:19:28 +0100 | [diff] [blame] | 128 | // Offset of field Thread::tlsPtr_.thread_local_end. |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 129 | #define THREAD_LOCAL_END_OFFSET (THREAD_LOCAL_POS_OFFSET + __SIZEOF_POINTER__) |
| 130 | ADD_TEST_EQ(THREAD_LOCAL_END_OFFSET, |
| 131 | art::Thread::ThreadLocalEndOffset<__SIZEOF_POINTER__>().Int32Value()) |
Roland Levillain | ca80ecc | 2015-07-22 17:19:28 +0100 | [diff] [blame] | 132 | // Offset of field Thread::tlsPtr_.thread_local_objects. |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 133 | #define THREAD_LOCAL_OBJECTS_OFFSET (THREAD_LOCAL_POS_OFFSET + 2 * __SIZEOF_POINTER__) |
| 134 | ADD_TEST_EQ(THREAD_LOCAL_OBJECTS_OFFSET, |
| 135 | art::Thread::ThreadLocalObjectsOffset<__SIZEOF_POINTER__>().Int32Value()) |
Hiroshi Yamauchi | dc412b6 | 2015-10-15 12:26:57 -0700 | [diff] [blame] | 136 | // Offset of field Thread::tlsPtr_.rosalloc_runs. |
| 137 | #define THREAD_ROSALLOC_RUNS_OFFSET (THREAD_LOCAL_POS_OFFSET + 3 * __SIZEOF_POINTER__) |
| 138 | ADD_TEST_EQ(THREAD_ROSALLOC_RUNS_OFFSET, |
| 139 | art::Thread::RosAllocRunsOffset<__SIZEOF_POINTER__>().Int32Value()) |
| 140 | // Offset of field Thread::tlsPtr_.thread_local_alloc_stack_top. |
| 141 | #define THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET (THREAD_ROSALLOC_RUNS_OFFSET + 34 * __SIZEOF_POINTER__) |
| 142 | ADD_TEST_EQ(THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET, |
| 143 | art::Thread::ThreadLocalAllocStackTopOffset<__SIZEOF_POINTER__>().Int32Value()) |
| 144 | // Offset of field Thread::tlsPtr_.thread_local_alloc_stack_end. |
| 145 | #define THREAD_LOCAL_ALLOC_STACK_END_OFFSET (THREAD_ROSALLOC_RUNS_OFFSET + 35 * __SIZEOF_POINTER__) |
| 146 | ADD_TEST_EQ(THREAD_LOCAL_ALLOC_STACK_END_OFFSET, |
| 147 | art::Thread::ThreadLocalAllocStackEndOffset<__SIZEOF_POINTER__>().Int32Value()) |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 148 | |
Ian Rogers | d9c4fc9 | 2013-10-01 19:45:43 -0700 | [diff] [blame] | 149 | // Offsets within java.lang.Object. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 150 | #define MIRROR_OBJECT_CLASS_OFFSET 0 |
| 151 | ADD_TEST_EQ(MIRROR_OBJECT_CLASS_OFFSET, art::mirror::Object::ClassOffset().Int32Value()) |
| 152 | #define MIRROR_OBJECT_LOCK_WORD_OFFSET 4 |
| 153 | ADD_TEST_EQ(MIRROR_OBJECT_LOCK_WORD_OFFSET, art::mirror::Object::MonitorOffset().Int32Value()) |
Ian Rogers | d9c4fc9 | 2013-10-01 19:45:43 -0700 | [diff] [blame] | 154 | |
Hiroshi Yamauchi | 60f63f5 | 2015-04-23 16:12:40 -0700 | [diff] [blame] | 155 | #if defined(USE_BROOKS_READ_BARRIER) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 156 | #define MIRROR_OBJECT_HEADER_SIZE 16 |
Hiroshi Yamauchi | 9d04a20 | 2014-01-31 13:35:49 -0800 | [diff] [blame] | 157 | #else |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 158 | #define MIRROR_OBJECT_HEADER_SIZE 8 |
| 159 | #endif |
| 160 | ADD_TEST_EQ(size_t(MIRROR_OBJECT_HEADER_SIZE), sizeof(art::mirror::Object)) |
Hiroshi Yamauchi | 9d04a20 | 2014-01-31 13:35:49 -0800 | [diff] [blame] | 161 | |
| 162 | // Offsets within java.lang.Class. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 163 | #define MIRROR_CLASS_COMPONENT_TYPE_OFFSET (4 + MIRROR_OBJECT_HEADER_SIZE) |
| 164 | ADD_TEST_EQ(MIRROR_CLASS_COMPONENT_TYPE_OFFSET, |
| 165 | art::mirror::Class::ComponentTypeOffset().Int32Value()) |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 166 | #define MIRROR_CLASS_ACCESS_FLAGS_OFFSET (72 + MIRROR_OBJECT_HEADER_SIZE) |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 167 | ADD_TEST_EQ(MIRROR_CLASS_ACCESS_FLAGS_OFFSET, |
| 168 | art::mirror::Class::AccessFlagsOffset().Int32Value()) |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 169 | #define MIRROR_CLASS_OBJECT_SIZE_OFFSET (104 + MIRROR_OBJECT_HEADER_SIZE) |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 170 | ADD_TEST_EQ(MIRROR_CLASS_OBJECT_SIZE_OFFSET, |
| 171 | art::mirror::Class::ObjectSizeOffset().Int32Value()) |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 172 | #define MIRROR_CLASS_STATUS_OFFSET (116 + MIRROR_OBJECT_HEADER_SIZE) |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 173 | ADD_TEST_EQ(MIRROR_CLASS_STATUS_OFFSET, |
| 174 | art::mirror::Class::StatusOffset().Int32Value()) |
| 175 | |
| 176 | #define MIRROR_CLASS_STATUS_INITIALIZED 10 |
| 177 | ADD_TEST_EQ(static_cast<uint32_t>(MIRROR_CLASS_STATUS_INITIALIZED), |
| 178 | static_cast<uint32_t>(art::mirror::Class::kStatusInitialized)) |
| 179 | #define ACCESS_FLAGS_CLASS_IS_FINALIZABLE 0x80000000 |
| 180 | ADD_TEST_EQ(static_cast<uint32_t>(ACCESS_FLAGS_CLASS_IS_FINALIZABLE), |
Mathieu Chartier | 52a7f5c | 2015-08-18 18:35:52 -0700 | [diff] [blame] | 181 | static_cast<uint32_t>(art::kAccClassIsFinalizable)) |
Hiroshi Yamauchi | 9d04a20 | 2014-01-31 13:35:49 -0800 | [diff] [blame] | 182 | |
| 183 | // Array offsets. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 184 | #define MIRROR_ARRAY_LENGTH_OFFSET MIRROR_OBJECT_HEADER_SIZE |
| 185 | ADD_TEST_EQ(MIRROR_ARRAY_LENGTH_OFFSET, art::mirror::Array::LengthOffset().Int32Value()) |
| 186 | |
| 187 | #define MIRROR_CHAR_ARRAY_DATA_OFFSET (4 + MIRROR_OBJECT_HEADER_SIZE) |
| 188 | ADD_TEST_EQ(MIRROR_CHAR_ARRAY_DATA_OFFSET, |
| 189 | art::mirror::Array::DataOffset(sizeof(uint16_t)).Int32Value()) |
| 190 | |
| 191 | #define MIRROR_OBJECT_ARRAY_DATA_OFFSET (4 + MIRROR_OBJECT_HEADER_SIZE) |
| 192 | ADD_TEST_EQ(MIRROR_OBJECT_ARRAY_DATA_OFFSET, |
| 193 | art::mirror::Array::DataOffset( |
| 194 | sizeof(art::mirror::HeapReference<art::mirror::Object>)).Int32Value()) |
Hiroshi Yamauchi | 9d04a20 | 2014-01-31 13:35:49 -0800 | [diff] [blame] | 195 | |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 196 | #define MIRROR_OBJECT_ARRAY_COMPONENT_SIZE 4 |
| 197 | ADD_TEST_EQ(static_cast<size_t>(MIRROR_OBJECT_ARRAY_COMPONENT_SIZE), |
| 198 | sizeof(art::mirror::HeapReference<art::mirror::Object>)) |
| 199 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 200 | #define MIRROR_LONG_ARRAY_DATA_OFFSET (8 + MIRROR_OBJECT_HEADER_SIZE) |
| 201 | ADD_TEST_EQ(MIRROR_LONG_ARRAY_DATA_OFFSET, |
| 202 | art::mirror::Array::DataOffset(sizeof(uint64_t)).Int32Value()) |
| 203 | |
Hiroshi Yamauchi | 9d04a20 | 2014-01-31 13:35:49 -0800 | [diff] [blame] | 204 | // Offsets within java.lang.String. |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 205 | #define MIRROR_STRING_COUNT_OFFSET MIRROR_OBJECT_HEADER_SIZE |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 206 | ADD_TEST_EQ(MIRROR_STRING_COUNT_OFFSET, art::mirror::String::CountOffset().Int32Value()) |
Hiroshi Yamauchi | 9d04a20 | 2014-01-31 13:35:49 -0800 | [diff] [blame] | 207 | |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 208 | #define MIRROR_STRING_VALUE_OFFSET (8 + MIRROR_OBJECT_HEADER_SIZE) |
| 209 | ADD_TEST_EQ(MIRROR_STRING_VALUE_OFFSET, art::mirror::String::ValueOffset().Int32Value()) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 210 | |
| 211 | // Offsets within java.lang.reflect.ArtMethod. |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 212 | #define ART_METHOD_DEX_CACHE_METHODS_OFFSET_32 20 |
| 213 | ADD_TEST_EQ(ART_METHOD_DEX_CACHE_METHODS_OFFSET_32, |
| 214 | art::ArtMethod::DexCacheResolvedMethodsOffset(4).Int32Value()) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 215 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 216 | #define ART_METHOD_DEX_CACHE_METHODS_OFFSET_64 24 |
| 217 | ADD_TEST_EQ(ART_METHOD_DEX_CACHE_METHODS_OFFSET_64, |
| 218 | art::ArtMethod::DexCacheResolvedMethodsOffset(8).Int32Value()) |
| 219 | |
| 220 | #define ART_METHOD_DEX_CACHE_TYPES_OFFSET_32 24 |
| 221 | ADD_TEST_EQ(ART_METHOD_DEX_CACHE_TYPES_OFFSET_32, |
| 222 | art::ArtMethod::DexCacheResolvedTypesOffset(4).Int32Value()) |
| 223 | |
| 224 | #define ART_METHOD_DEX_CACHE_TYPES_OFFSET_64 32 |
| 225 | ADD_TEST_EQ(ART_METHOD_DEX_CACHE_TYPES_OFFSET_64, |
| 226 | art::ArtMethod::DexCacheResolvedTypesOffset(8).Int32Value()) |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 227 | |
Nicolas Geoffray | 7bf2b4f | 2015-07-08 10:11:59 +0000 | [diff] [blame] | 228 | #define ART_METHOD_QUICK_CODE_OFFSET_32 32 |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 229 | ADD_TEST_EQ(ART_METHOD_QUICK_CODE_OFFSET_32, |
| 230 | art::ArtMethod::EntryPointFromQuickCompiledCodeOffset(4).Int32Value()) |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 231 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 232 | #define ART_METHOD_QUICK_CODE_OFFSET_64 48 |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 233 | ADD_TEST_EQ(ART_METHOD_QUICK_CODE_OFFSET_64, |
| 234 | art::ArtMethod::EntryPointFromQuickCompiledCodeOffset(8).Int32Value()) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 235 | |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 236 | #define LOCK_WORD_STATE_SHIFT 30 |
| 237 | ADD_TEST_EQ(LOCK_WORD_STATE_SHIFT, static_cast<int32_t>(art::LockWord::kStateShift)) |
| 238 | |
| 239 | #define LOCK_WORD_STATE_MASK 0xC0000000 |
| 240 | ADD_TEST_EQ(LOCK_WORD_STATE_MASK, static_cast<uint32_t>(art::LockWord::kStateMaskShifted)) |
| 241 | |
| 242 | #define LOCK_WORD_READ_BARRIER_STATE_SHIFT 28 |
| 243 | ADD_TEST_EQ(LOCK_WORD_READ_BARRIER_STATE_SHIFT, |
| 244 | static_cast<int32_t>(art::LockWord::kReadBarrierStateShift)) |
| 245 | |
| 246 | #define LOCK_WORD_READ_BARRIER_STATE_MASK 0x30000000 |
| 247 | ADD_TEST_EQ(LOCK_WORD_READ_BARRIER_STATE_MASK, |
| 248 | static_cast<int32_t>(art::LockWord::kReadBarrierStateMaskShifted)) |
| 249 | |
| 250 | #define LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED 0xCFFFFFFF |
| 251 | ADD_TEST_EQ(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED, |
| 252 | static_cast<uint32_t>(art::LockWord::kReadBarrierStateMaskShiftedToggled)) |
| 253 | |
| 254 | #define LOCK_WORD_THIN_LOCK_COUNT_ONE 65536 |
| 255 | ADD_TEST_EQ(LOCK_WORD_THIN_LOCK_COUNT_ONE, static_cast<int32_t>(art::LockWord::kThinLockCountOne)) |
| 256 | |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 257 | #define OBJECT_ALIGNMENT_MASK 7 |
| 258 | ADD_TEST_EQ(static_cast<size_t>(OBJECT_ALIGNMENT_MASK), art::kObjectAlignment - 1) |
| 259 | |
| 260 | #define OBJECT_ALIGNMENT_MASK_TOGGLED 0xFFFFFFF8 |
| 261 | ADD_TEST_EQ(static_cast<uint32_t>(OBJECT_ALIGNMENT_MASK_TOGGLED), |
| 262 | ~static_cast<uint32_t>(art::kObjectAlignment - 1)) |
| 263 | |
Hiroshi Yamauchi | dc412b6 | 2015-10-15 12:26:57 -0700 | [diff] [blame] | 264 | #define ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE 128 |
| 265 | ADD_TEST_EQ(ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE, |
| 266 | static_cast<int32_t>(art::gc::allocator::RosAlloc::kMaxThreadLocalBracketSize)) |
| 267 | |
| 268 | #define ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT 4 |
| 269 | ADD_TEST_EQ(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT, |
| 270 | static_cast<int32_t>(art::gc::allocator::RosAlloc::kBracketQuantumSizeShift)) |
| 271 | |
| 272 | #define ROSALLOC_BRACKET_QUANTUM_SIZE_MASK 15 |
| 273 | ADD_TEST_EQ(ROSALLOC_BRACKET_QUANTUM_SIZE_MASK, |
| 274 | static_cast<int32_t>(art::gc::allocator::RosAlloc::kBracketQuantumSize - 1)) |
| 275 | |
| 276 | #define ROSALLOC_BRACKET_QUANTUM_SIZE_MASK_TOGGLED32 0xfffffff0 |
| 277 | ADD_TEST_EQ(static_cast<uint32_t>(ROSALLOC_BRACKET_QUANTUM_SIZE_MASK_TOGGLED32), |
| 278 | ~static_cast<uint32_t>(art::gc::allocator::RosAlloc::kBracketQuantumSize - 1)) |
| 279 | |
| 280 | #define ROSALLOC_BRACKET_QUANTUM_SIZE_MASK_TOGGLED64 0xfffffffffffffff0 |
| 281 | ADD_TEST_EQ(static_cast<uint64_t>(ROSALLOC_BRACKET_QUANTUM_SIZE_MASK_TOGGLED64), |
| 282 | ~static_cast<uint64_t>(art::gc::allocator::RosAlloc::kBracketQuantumSize - 1)) |
| 283 | |
| 284 | #define ROSALLOC_RUN_FREE_LIST_OFFSET 8 |
| 285 | ADD_TEST_EQ(ROSALLOC_RUN_FREE_LIST_OFFSET, |
| 286 | static_cast<int32_t>(art::gc::allocator::RosAlloc::RunFreeListOffset())) |
| 287 | |
| 288 | #define ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET 0 |
| 289 | ADD_TEST_EQ(ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET, |
| 290 | static_cast<int32_t>(art::gc::allocator::RosAlloc::RunFreeListHeadOffset())) |
| 291 | |
| 292 | #define ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET 16 |
| 293 | ADD_TEST_EQ(ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET, |
| 294 | static_cast<int32_t>(art::gc::allocator::RosAlloc::RunFreeListSizeOffset())) |
| 295 | |
| 296 | #define ROSALLOC_SLOT_NEXT_OFFSET 0 |
| 297 | ADD_TEST_EQ(ROSALLOC_SLOT_NEXT_OFFSET, |
| 298 | static_cast<int32_t>(art::gc::allocator::RosAlloc::RunSlotNextOffset())) |
| 299 | // Assert this so that we can avoid zeroing the next field by installing the class pointer. |
| 300 | ADD_TEST_EQ(ROSALLOC_SLOT_NEXT_OFFSET, MIRROR_OBJECT_CLASS_OFFSET) |
| 301 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 302 | #if defined(__cplusplus) |
| 303 | } // End of CheckAsmSupportOffsets. |
Hiroshi Yamauchi | 9d04a20 | 2014-01-31 13:35:49 -0800 | [diff] [blame] | 304 | #endif |
| 305 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 306 | #endif // ART_RUNTIME_ASM_SUPPORT_H_ |