Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 17 | #include "entrypoints/quick/quick_alloc_entrypoints.h" |
| 18 | |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 19 | #include "callee_save_frame.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 20 | #include "entrypoints/entrypoint_utils-inl.h" |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 21 | #include "mirror/art_method-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 22 | #include "mirror/class-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 23 | #include "mirror/object_array-inl.h" |
Ian Rogers | 4f6ad8a | 2013-03-18 15:27:28 -0700 | [diff] [blame] | 24 | #include "mirror/object-inl.h" |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 25 | |
| 26 | namespace art { |
| 27 | |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 28 | static constexpr bool kUseTlabFastPath = true; |
| 29 | |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 30 | #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \ |
| 31 | extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 32 | uint32_t type_idx, mirror::ArtMethod* method, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 33 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 34 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 35 | if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ |
Andreas Gampe | 05d2ab2 | 2014-08-06 16:27:52 -0700 | [diff] [blame] | 36 | mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 37 | if (LIKELY(klass != nullptr && klass->IsInitialized() && !klass->IsFinalizable())) { \ |
| 38 | size_t byte_count = klass->GetObjectSize(); \ |
| 39 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 40 | mirror::Object* obj; \ |
| 41 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 42 | obj = self->AllocTlab(byte_count); \ |
| 43 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 44 | obj->SetClass(klass); \ |
| 45 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 46 | if (kUseBrooksReadBarrier) { \ |
| 47 | obj->SetReadBarrierPointer(obj); \ |
| 48 | } \ |
| 49 | obj->AssertReadBarrierPointer(); \ |
| 50 | } \ |
| 51 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 52 | return obj; \ |
| 53 | } \ |
| 54 | } \ |
| 55 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 56 | return AllocObjectFromCode<false, instrumented_bool>(type_idx, method, self, allocator_type); \ |
| 57 | } \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 58 | extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 59 | mirror::Class* klass, mirror::ArtMethod* method, Thread* self) \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 60 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 61 | UNUSED(method); \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 62 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 63 | if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ |
| 64 | if (LIKELY(klass->IsInitialized())) { \ |
| 65 | size_t byte_count = klass->GetObjectSize(); \ |
| 66 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 67 | mirror::Object* obj; \ |
| 68 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 69 | obj = self->AllocTlab(byte_count); \ |
| 70 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 71 | obj->SetClass(klass); \ |
| 72 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 73 | if (kUseBrooksReadBarrier) { \ |
| 74 | obj->SetReadBarrierPointer(obj); \ |
| 75 | } \ |
| 76 | obj->AssertReadBarrierPointer(); \ |
| 77 | } \ |
| 78 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 79 | return obj; \ |
| 80 | } \ |
| 81 | } \ |
| 82 | } \ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 83 | return AllocObjectFromCodeResolved<instrumented_bool>(klass, self, allocator_type); \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 84 | } \ |
| 85 | extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 86 | mirror::Class* klass, mirror::ArtMethod* method, Thread* self) \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 87 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 88 | UNUSED(method); \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 89 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 90 | if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ |
| 91 | size_t byte_count = klass->GetObjectSize(); \ |
| 92 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 93 | mirror::Object* obj; \ |
| 94 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 95 | obj = self->AllocTlab(byte_count); \ |
| 96 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 97 | obj->SetClass(klass); \ |
| 98 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 99 | if (kUseBrooksReadBarrier) { \ |
| 100 | obj->SetReadBarrierPointer(obj); \ |
| 101 | } \ |
| 102 | obj->AssertReadBarrierPointer(); \ |
| 103 | } \ |
| 104 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 105 | return obj; \ |
| 106 | } \ |
| 107 | } \ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 108 | return AllocObjectFromCodeInitialized<instrumented_bool>(klass, self, allocator_type); \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 109 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 110 | extern "C" mirror::Object* artAllocObjectFromCodeWithAccessCheck##suffix##suffix2( \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 111 | uint32_t type_idx, mirror::ArtMethod* method, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 112 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 113 | ScopedQuickEntrypointChecks sqec(self); \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 114 | return AllocObjectFromCode<true, instrumented_bool>(type_idx, method, self, allocator_type); \ |
| 115 | } \ |
| 116 | extern "C" mirror::Array* artAllocArrayFromCode##suffix##suffix2( \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 117 | uint32_t type_idx, int32_t component_count, mirror::ArtMethod* method, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 118 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 119 | ScopedQuickEntrypointChecks sqec(self); \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 120 | return AllocArrayFromCode<false, instrumented_bool>(type_idx, component_count, method, self, \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 121 | allocator_type); \ |
| 122 | } \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 123 | extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 124 | mirror::Class* klass, int32_t component_count, mirror::ArtMethod* method, Thread* self) \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 125 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 126 | ScopedQuickEntrypointChecks sqec(self); \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 127 | return AllocArrayFromCodeResolved<false, instrumented_bool>(klass, component_count, method, self, \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 128 | allocator_type); \ |
| 129 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 130 | extern "C" mirror::Array* artAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 131 | uint32_t type_idx, int32_t component_count, mirror::ArtMethod* method, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 132 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 133 | ScopedQuickEntrypointChecks sqec(self); \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 134 | return AllocArrayFromCode<true, instrumented_bool>(type_idx, component_count, method, self, \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 135 | allocator_type); \ |
| 136 | } \ |
| 137 | extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 138 | uint32_t type_idx, int32_t component_count, mirror::ArtMethod* method, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 139 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 140 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 141 | if (!instrumented_bool) { \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 142 | return CheckAndAllocArrayFromCode(type_idx, component_count, method, self, false, allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 143 | } else { \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 144 | return CheckAndAllocArrayFromCodeInstrumented(type_idx, component_count, method, self, false, allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 145 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 146 | } \ |
| 147 | extern "C" mirror::Array* artCheckAndAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 148 | uint32_t type_idx, int32_t component_count, mirror::ArtMethod* method, Thread* self) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 149 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 150 | ScopedQuickEntrypointChecks sqec(self); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 151 | if (!instrumented_bool) { \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 152 | return CheckAndAllocArrayFromCode(type_idx, component_count, method, self, true, allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 153 | } else { \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 154 | return CheckAndAllocArrayFromCodeInstrumented(type_idx, component_count, method, self, true, allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 155 | } \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame^] | 156 | } \ |
| 157 | extern "C" mirror::String* artAllocStringFromBytesFromCode##suffix##suffix2( \ |
| 158 | mirror::ByteArray* byte_array, int32_t high, int32_t offset, int32_t byte_count, \ |
| 159 | Thread* self) \ |
| 160 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
| 161 | ScopedQuickEntrypointChecks sqec(self); \ |
| 162 | StackHandleScope<1> hs(self); \ |
| 163 | Handle<mirror::ByteArray> handle_array(hs.NewHandle(byte_array)); \ |
| 164 | return mirror::String::AllocFromByteArray<instrumented_bool>(self, byte_count, handle_array, \ |
| 165 | offset, high, allocator_type); \ |
| 166 | } \ |
| 167 | extern "C" mirror::String* artAllocStringFromCharsFromCode##suffix##suffix2( \ |
| 168 | int32_t offset, int32_t char_count, mirror::CharArray* char_array, Thread* self) \ |
| 169 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
| 170 | StackHandleScope<1> hs(self); \ |
| 171 | Handle<mirror::CharArray> handle_array(hs.NewHandle(char_array)); \ |
| 172 | return mirror::String::AllocFromCharArray<instrumented_bool>(self, char_count, handle_array, \ |
| 173 | offset, allocator_type); \ |
| 174 | } \ |
| 175 | extern "C" mirror::String* artAllocStringFromStringFromCode##suffix##suffix2( \ |
| 176 | mirror::String* string, Thread* self) \ |
| 177 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
| 178 | StackHandleScope<1> hs(self); \ |
| 179 | Handle<mirror::String> handle_string(hs.NewHandle(string)); \ |
| 180 | return mirror::String::AllocFromString<instrumented_bool>(self, handle_string->GetLength(), \ |
| 181 | handle_string, 0, allocator_type); \ |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 182 | } |
| 183 | |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 184 | #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(suffix, allocator_type) \ |
| 185 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, Instrumented, true, allocator_type) \ |
| 186 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, , false, allocator_type) |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 187 | |
Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 188 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(DlMalloc, gc::kAllocatorTypeDlMalloc) |
| 189 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RosAlloc, gc::kAllocatorTypeRosAlloc) |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 190 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(BumpPointer, gc::kAllocatorTypeBumpPointer) |
Mathieu Chartier | 692fafd | 2013-11-29 17:24:40 -0800 | [diff] [blame] | 191 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(TLAB, gc::kAllocatorTypeTLAB) |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 192 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(Region, gc::kAllocatorTypeRegion) |
| 193 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RegionTLAB, gc::kAllocatorTypeRegionTLAB) |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 194 | |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 195 | #define GENERATE_ENTRYPOINTS(suffix) \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 196 | extern "C" void* art_quick_alloc_array##suffix(uint32_t, int32_t, mirror::ArtMethod* ref); \ |
| 197 | extern "C" void* art_quick_alloc_array_resolved##suffix(mirror::Class* klass, int32_t, mirror::ArtMethod* ref); \ |
| 198 | extern "C" void* art_quick_alloc_array_with_access_check##suffix(uint32_t, int32_t, mirror::ArtMethod* ref); \ |
| 199 | extern "C" void* art_quick_alloc_object##suffix(uint32_t type_idx, mirror::ArtMethod* ref); \ |
| 200 | extern "C" void* art_quick_alloc_object_resolved##suffix(mirror::Class* klass, mirror::ArtMethod* ref); \ |
| 201 | extern "C" void* art_quick_alloc_object_initialized##suffix(mirror::Class* klass, mirror::ArtMethod* ref); \ |
| 202 | extern "C" void* art_quick_alloc_object_with_access_check##suffix(uint32_t type_idx, mirror::ArtMethod* ref); \ |
| 203 | extern "C" void* art_quick_check_and_alloc_array##suffix(uint32_t, int32_t, mirror::ArtMethod* ref); \ |
| 204 | extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix(uint32_t, int32_t, mirror::ArtMethod* ref); \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame^] | 205 | extern "C" void* art_quick_alloc_string_from_bytes##suffix(void*, int32_t, int32_t, int32_t); \ |
| 206 | extern "C" void* art_quick_alloc_string_from_chars##suffix(int32_t, int32_t, void*); \ |
| 207 | extern "C" void* art_quick_alloc_string_from_string##suffix(void*); \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 208 | extern "C" void* art_quick_alloc_array##suffix##_instrumented(uint32_t, int32_t, mirror::ArtMethod* ref); \ |
| 209 | extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(mirror::Class* klass, int32_t, mirror::ArtMethod* ref); \ |
| 210 | extern "C" void* art_quick_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, mirror::ArtMethod* ref); \ |
| 211 | extern "C" void* art_quick_alloc_object##suffix##_instrumented(uint32_t type_idx, mirror::ArtMethod* ref); \ |
| 212 | extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(mirror::Class* klass, mirror::ArtMethod* ref); \ |
| 213 | extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(mirror::Class* klass, mirror::ArtMethod* ref); \ |
| 214 | extern "C" void* art_quick_alloc_object_with_access_check##suffix##_instrumented(uint32_t type_idx, mirror::ArtMethod* ref); \ |
| 215 | extern "C" void* art_quick_check_and_alloc_array##suffix##_instrumented(uint32_t, int32_t, mirror::ArtMethod* ref); \ |
| 216 | extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, mirror::ArtMethod* ref); \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame^] | 217 | extern "C" void* art_quick_alloc_string_from_bytes##suffix##_instrumented(void*, int32_t, int32_t, int32_t); \ |
| 218 | extern "C" void* art_quick_alloc_string_from_chars##suffix##_instrumented(int32_t, int32_t, void*); \ |
| 219 | extern "C" void* art_quick_alloc_string_from_string##suffix##_instrumented(void*); \ |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 220 | void SetQuickAllocEntryPoints##suffix(QuickEntryPoints* qpoints, bool instrumented) { \ |
| 221 | if (instrumented) { \ |
| 222 | qpoints->pAllocArray = art_quick_alloc_array##suffix##_instrumented; \ |
| 223 | qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix##_instrumented; \ |
| 224 | qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix##_instrumented; \ |
| 225 | qpoints->pAllocObject = art_quick_alloc_object##suffix##_instrumented; \ |
| 226 | qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix##_instrumented; \ |
| 227 | qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix##_instrumented; \ |
| 228 | qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix##_instrumented; \ |
| 229 | qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix##_instrumented; \ |
| 230 | qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented; \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame^] | 231 | qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix##_instrumented; \ |
| 232 | qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix##_instrumented; \ |
| 233 | qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix##_instrumented; \ |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 234 | } else { \ |
| 235 | qpoints->pAllocArray = art_quick_alloc_array##suffix; \ |
| 236 | qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix; \ |
| 237 | qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix; \ |
| 238 | qpoints->pAllocObject = art_quick_alloc_object##suffix; \ |
| 239 | qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix; \ |
| 240 | qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix; \ |
| 241 | qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix; \ |
| 242 | qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix; \ |
| 243 | qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix; \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame^] | 244 | qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix; \ |
| 245 | qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix; \ |
| 246 | qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix; \ |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 247 | } \ |
| 248 | } |
| 249 | |
| 250 | // Generate the entrypoint functions. |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 251 | #if !defined(__APPLE__) || !defined(__LP64__) |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 252 | GENERATE_ENTRYPOINTS(_dlmalloc) |
| 253 | GENERATE_ENTRYPOINTS(_rosalloc) |
| 254 | GENERATE_ENTRYPOINTS(_bump_pointer) |
| 255 | GENERATE_ENTRYPOINTS(_tlab) |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 256 | GENERATE_ENTRYPOINTS(_region) |
| 257 | GENERATE_ENTRYPOINTS(_region_tlab) |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 258 | #endif |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 259 | |
| 260 | static bool entry_points_instrumented = false; |
| 261 | static gc::AllocatorType entry_points_allocator = gc::kAllocatorTypeDlMalloc; |
| 262 | |
| 263 | void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) { |
| 264 | entry_points_allocator = allocator; |
| 265 | } |
| 266 | |
| 267 | void SetQuickAllocEntryPointsInstrumented(bool instrumented) { |
| 268 | entry_points_instrumented = instrumented; |
| 269 | } |
| 270 | |
| 271 | void ResetQuickAllocEntryPoints(QuickEntryPoints* qpoints) { |
Andreas Gampe | 48cc32c | 2015-04-07 02:53:04 +0000 | [diff] [blame] | 272 | #if !defined(__APPLE__) || !defined(__LP64__) |
Ian Rogers | de2db52 | 2014-11-04 14:43:18 -0800 | [diff] [blame] | 273 | switch (entry_points_allocator) { |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 274 | case gc::kAllocatorTypeDlMalloc: { |
| 275 | SetQuickAllocEntryPoints_dlmalloc(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 276 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 277 | } |
| 278 | case gc::kAllocatorTypeRosAlloc: { |
| 279 | SetQuickAllocEntryPoints_rosalloc(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 280 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 281 | } |
| 282 | case gc::kAllocatorTypeBumpPointer: { |
| 283 | CHECK(kMovingCollector); |
| 284 | SetQuickAllocEntryPoints_bump_pointer(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 285 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 286 | } |
| 287 | case gc::kAllocatorTypeTLAB: { |
| 288 | CHECK(kMovingCollector); |
| 289 | SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 290 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 291 | } |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 292 | case gc::kAllocatorTypeRegion: { |
| 293 | CHECK(kMovingCollector); |
| 294 | SetQuickAllocEntryPoints_region(qpoints, entry_points_instrumented); |
| 295 | return; |
| 296 | } |
| 297 | case gc::kAllocatorTypeRegionTLAB: { |
| 298 | CHECK(kMovingCollector); |
| 299 | SetQuickAllocEntryPoints_region_tlab(qpoints, entry_points_instrumented); |
| 300 | return; |
| 301 | } |
Andreas Gampe | 48cc32c | 2015-04-07 02:53:04 +0000 | [diff] [blame] | 302 | default: |
| 303 | break; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 304 | } |
Andreas Gampe | 48cc32c | 2015-04-07 02:53:04 +0000 | [diff] [blame] | 305 | #else |
| 306 | UNUSED(qpoints); |
| 307 | #endif |
| 308 | UNIMPLEMENTED(FATAL); |
Ian Rogers | de2db52 | 2014-11-04 14:43:18 -0800 | [diff] [blame] | 309 | UNREACHABLE(); |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 310 | } |
| 311 | |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 312 | } // namespace art |