blob: 319b1c2a9cc5b5b31d0fc0aef059cf0feaabdae1 [file] [log] [blame]
Andreas Gampee54d9922016-10-11 19:55:37 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampeba8df692016-11-01 10:30:44 -070017#include "ti_heap.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070018
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070019#include "art_field-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070020#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070023#include "class_linker.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070024#include "gc/heap.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070025#include "gc_root-inl.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070026#include "jni_env_ext.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070027#include "jni_internal.h"
Andreas Gampe9e38a502017-03-06 08:19:26 -080028#include "jvmti_weak_table-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070029#include "mirror/class.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070030#include "mirror/object-inl.h"
31#include "mirror/object_array-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070032#include "object_callbacks.h"
33#include "object_tagging.h"
34#include "obj_ptr-inl.h"
Andreas Gampebecd6ad2017-02-22 19:20:37 -080035#include "primitive.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070036#include "runtime.h"
37#include "scoped_thread_state_change-inl.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070038#include "stack.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070039#include "thread-inl.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070040#include "thread_list.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070041
42namespace openjdkjvmti {
43
Andreas Gampe3ec8e402017-02-21 15:49:53 -080044namespace {
45
Andreas Gampe9e38a502017-03-06 08:19:26 -080046struct IndexCache {
47 // The number of interface fields implemented by the class. This is a prefix to all assigned
48 // field indices.
49 size_t interface_fields;
50
51 // It would be nice to also cache the following, but it is complicated to wire up into the
52 // generic visit:
53 // The number of fields in interfaces and superclasses. This is the first index assigned to
54 // fields of the class.
55 // size_t superclass_fields;
56};
57using IndexCachingTable = JvmtiWeakTable<IndexCache>;
58
59static IndexCachingTable gIndexCachingTable;
60
Andreas Gampe3ec8e402017-02-21 15:49:53 -080061// Report the contents of a string, if a callback is set.
62jint ReportString(art::ObjPtr<art::mirror::Object> obj,
63 jvmtiEnv* env,
64 ObjectTagTable* tag_table,
65 const jvmtiHeapCallbacks* cb,
66 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
67 if (UNLIKELY(cb->string_primitive_value_callback != nullptr) && obj->IsString()) {
68 art::ObjPtr<art::mirror::String> str = obj->AsString();
69 int32_t string_length = str->GetLength();
Andreas Gampe5f942032017-02-27 19:59:40 -080070 JvmtiUniquePtr<uint16_t[]> data;
Andreas Gampe3ec8e402017-02-21 15:49:53 -080071
Andreas Gampe5f942032017-02-27 19:59:40 -080072 if (string_length > 0) {
73 jvmtiError alloc_error;
74 data = AllocJvmtiUniquePtr<uint16_t[]>(env, string_length, &alloc_error);
75 if (data == nullptr) {
76 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
77 // back? For now just warn.
78 LOG(WARNING) << "Unable to allocate buffer for string reporting! Silently dropping value."
79 << " >" << str->ToModifiedUtf8() << "<";
80 return 0;
Andreas Gampe3ec8e402017-02-21 15:49:53 -080081 }
Andreas Gampe5f942032017-02-27 19:59:40 -080082
83 if (str->IsCompressed()) {
84 uint8_t* compressed_data = str->GetValueCompressed();
85 for (int32_t i = 0; i != string_length; ++i) {
86 data[i] = compressed_data[i];
87 }
88 } else {
89 // Can copy directly.
90 memcpy(data.get(), str->GetValue(), string_length * sizeof(uint16_t));
91 }
Andreas Gampe3ec8e402017-02-21 15:49:53 -080092 }
93
94 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
95 jlong string_tag = tag_table->GetTagOrZero(obj.Ptr());
96 const jlong saved_string_tag = string_tag;
97
98 jint result = cb->string_primitive_value_callback(class_tag,
99 obj->SizeOf(),
100 &string_tag,
101 data.get(),
102 string_length,
103 const_cast<void*>(user_data));
104 if (string_tag != saved_string_tag) {
105 tag_table->Set(obj.Ptr(), string_tag);
106 }
107
108 return result;
109 }
110 return 0;
111}
112
Andreas Gampebecd6ad2017-02-22 19:20:37 -0800113// Report the contents of a primitive array, if a callback is set.
114jint ReportPrimitiveArray(art::ObjPtr<art::mirror::Object> obj,
115 jvmtiEnv* env,
116 ObjectTagTable* tag_table,
117 const jvmtiHeapCallbacks* cb,
118 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
119 if (UNLIKELY(cb->array_primitive_value_callback != nullptr) &&
120 obj->IsArrayInstance() &&
121 !obj->IsObjectArray()) {
122 art::ObjPtr<art::mirror::Array> array = obj->AsArray();
123 int32_t array_length = array->GetLength();
124 size_t component_size = array->GetClass()->GetComponentSize();
125 art::Primitive::Type art_prim_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
126 jvmtiPrimitiveType prim_type =
127 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
128 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
129 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
130 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
131 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
132 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
133 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
134 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
135 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
136
137 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
138 jlong array_tag = tag_table->GetTagOrZero(obj.Ptr());
139 const jlong saved_array_tag = array_tag;
140
141 jint result;
142 if (array_length == 0) {
143 result = cb->array_primitive_value_callback(class_tag,
144 obj->SizeOf(),
145 &array_tag,
146 0,
147 prim_type,
148 nullptr,
149 const_cast<void*>(user_data));
150 } else {
151 jvmtiError alloc_error;
152 JvmtiUniquePtr<char[]> data = AllocJvmtiUniquePtr<char[]>(env,
153 array_length * component_size,
154 &alloc_error);
155 if (data == nullptr) {
156 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
157 // back? For now just warn.
158 LOG(WARNING) << "Unable to allocate buffer for array reporting! Silently dropping value.";
159 return 0;
160 }
161
162 memcpy(data.get(), array->GetRawData(component_size, 0), array_length * component_size);
163
164 result = cb->array_primitive_value_callback(class_tag,
165 obj->SizeOf(),
166 &array_tag,
167 array_length,
168 prim_type,
169 data.get(),
170 const_cast<void*>(user_data));
171 }
172
173 if (array_tag != saved_array_tag) {
174 tag_table->Set(obj.Ptr(), array_tag);
175 }
176
177 return result;
178 }
179 return 0;
180}
181
Andreas Gampee7316932017-02-25 09:15:05 -0800182template <typename UserData>
183bool VisitorFalse(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
184 art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
185 art::ArtField& field ATTRIBUTE_UNUSED,
186 size_t field_index ATTRIBUTE_UNUSED,
187 UserData* user_data ATTRIBUTE_UNUSED) {
188 return false;
189}
190
Andreas Gampe95114602017-02-28 15:47:44 -0800191template <typename UserData, bool kCallVisitorOnRecursion>
Andreas Gampee7316932017-02-25 09:15:05 -0800192class FieldVisitor {
193 public:
194 // Report the contents of a primitive fields of the given object, if a callback is set.
Andreas Gampe95114602017-02-28 15:47:44 -0800195 template <typename StaticPrimitiveVisitor,
196 typename StaticReferenceVisitor,
197 typename InstancePrimitiveVisitor,
198 typename InstanceReferenceVisitor>
Andreas Gampee7316932017-02-25 09:15:05 -0800199 static bool ReportFields(art::ObjPtr<art::mirror::Object> obj,
200 UserData* user_data,
201 StaticPrimitiveVisitor& static_prim_visitor,
202 StaticReferenceVisitor& static_ref_visitor,
203 InstancePrimitiveVisitor& instance_prim_visitor,
204 InstanceReferenceVisitor& instance_ref_visitor)
205 REQUIRES_SHARED(art::Locks::mutator_lock_) {
206 FieldVisitor fv(user_data);
207
208 if (obj->IsClass()) {
209 // When visiting a class, we only visit the static fields of the given class. No field of
210 // superclasses is visited.
211 art::ObjPtr<art::mirror::Class> klass = obj->AsClass();
212 // Only report fields on resolved classes. We need valid field data.
213 if (!klass->IsResolved()) {
214 return false;
215 }
216 return fv.ReportFieldsImpl(nullptr,
217 obj->AsClass(),
218 obj->AsClass()->IsInterface(),
219 static_prim_visitor,
220 static_ref_visitor,
221 instance_prim_visitor,
222 instance_ref_visitor);
223 } else {
224 // See comment above. Just double-checking here, but an instance *should* mean the class was
225 // resolved.
226 DCHECK(obj->GetClass()->IsResolved() || obj->GetClass()->IsErroneousResolved());
227 return fv.ReportFieldsImpl(obj,
228 obj->GetClass(),
229 false,
230 static_prim_visitor,
231 static_ref_visitor,
232 instance_prim_visitor,
233 instance_ref_visitor);
234 }
235 }
236
237 private:
238 explicit FieldVisitor(UserData* user_data) : user_data_(user_data) {}
239
240 // Report the contents of fields of the given object. If obj is null, report the static fields,
241 // otherwise the instance fields.
Andreas Gampe95114602017-02-28 15:47:44 -0800242 template <typename StaticPrimitiveVisitor,
243 typename StaticReferenceVisitor,
244 typename InstancePrimitiveVisitor,
245 typename InstanceReferenceVisitor>
Andreas Gampee7316932017-02-25 09:15:05 -0800246 bool ReportFieldsImpl(art::ObjPtr<art::mirror::Object> obj,
247 art::ObjPtr<art::mirror::Class> klass,
248 bool skip_java_lang_object,
249 StaticPrimitiveVisitor& static_prim_visitor,
250 StaticReferenceVisitor& static_ref_visitor,
251 InstancePrimitiveVisitor& instance_prim_visitor,
252 InstanceReferenceVisitor& instance_ref_visitor)
253 REQUIRES_SHARED(art::Locks::mutator_lock_) {
254 // Compute the offset of field indices.
255 size_t interface_field_count = CountInterfaceFields(klass);
256
257 size_t tmp;
258 bool aborted = ReportFieldsRecursive(obj,
259 klass,
260 interface_field_count,
261 skip_java_lang_object,
262 static_prim_visitor,
263 static_ref_visitor,
264 instance_prim_visitor,
265 instance_ref_visitor,
266 &tmp);
267 return aborted;
268 }
269
270 // Visit primitive fields in an object (instance). Return true if the visit was aborted.
Andreas Gampe95114602017-02-28 15:47:44 -0800271 template <typename StaticPrimitiveVisitor,
272 typename StaticReferenceVisitor,
273 typename InstancePrimitiveVisitor,
274 typename InstanceReferenceVisitor>
Andreas Gampee7316932017-02-25 09:15:05 -0800275 bool ReportFieldsRecursive(art::ObjPtr<art::mirror::Object> obj,
276 art::ObjPtr<art::mirror::Class> klass,
277 size_t interface_fields,
278 bool skip_java_lang_object,
279 StaticPrimitiveVisitor& static_prim_visitor,
280 StaticReferenceVisitor& static_ref_visitor,
281 InstancePrimitiveVisitor& instance_prim_visitor,
282 InstanceReferenceVisitor& instance_ref_visitor,
283 size_t* field_index_out)
284 REQUIRES_SHARED(art::Locks::mutator_lock_) {
285 DCHECK(klass != nullptr);
286 size_t field_index;
287 if (klass->GetSuperClass() == nullptr) {
288 // j.l.Object. Start with the fields from interfaces.
289 field_index = interface_fields;
290 if (skip_java_lang_object) {
291 *field_index_out = field_index;
292 return false;
293 }
294 } else {
295 // Report superclass fields.
296 if (kCallVisitorOnRecursion) {
297 if (ReportFieldsRecursive(obj,
298 klass->GetSuperClass(),
299 interface_fields,
300 skip_java_lang_object,
301 static_prim_visitor,
302 static_ref_visitor,
303 instance_prim_visitor,
304 instance_ref_visitor,
305 &field_index)) {
306 return true;
307 }
308 } else {
309 // Still call, but with empty visitor. This is required for correct counting.
310 ReportFieldsRecursive(obj,
311 klass->GetSuperClass(),
312 interface_fields,
313 skip_java_lang_object,
314 VisitorFalse<UserData>,
315 VisitorFalse<UserData>,
316 VisitorFalse<UserData>,
317 VisitorFalse<UserData>,
318 &field_index);
319 }
320 }
321
322 // Now visit fields for the current klass.
323
324 for (auto& static_field : klass->GetSFields()) {
325 if (static_field.IsPrimitiveType()) {
326 if (static_prim_visitor(obj,
327 klass,
328 static_field,
329 field_index,
330 user_data_)) {
331 return true;
332 }
333 } else {
334 if (static_ref_visitor(obj,
335 klass,
336 static_field,
337 field_index,
338 user_data_)) {
339 return true;
340 }
341 }
342 field_index++;
343 }
344
345 for (auto& instance_field : klass->GetIFields()) {
346 if (instance_field.IsPrimitiveType()) {
347 if (instance_prim_visitor(obj,
348 klass,
349 instance_field,
350 field_index,
351 user_data_)) {
352 return true;
353 }
354 } else {
355 if (instance_ref_visitor(obj,
356 klass,
357 instance_field,
358 field_index,
359 user_data_)) {
360 return true;
361 }
362 }
363 field_index++;
364 }
365
366 *field_index_out = field_index;
367 return false;
368 }
369
370 // Implements a visit of the implemented interfaces of a given class.
371 template <typename T>
372 struct RecursiveInterfaceVisit {
373 static void VisitStatic(art::Thread* self, art::ObjPtr<art::mirror::Class> klass, T& visitor)
374 REQUIRES_SHARED(art::Locks::mutator_lock_) {
375 RecursiveInterfaceVisit rv;
376 rv.Visit(self, klass, visitor);
377 }
378
379 void Visit(art::Thread* self, art::ObjPtr<art::mirror::Class> klass, T& visitor)
380 REQUIRES_SHARED(art::Locks::mutator_lock_) {
381 // First visit the parent, to get the order right.
382 // (We do this in preparation for actual visiting of interface fields.)
383 if (klass->GetSuperClass() != nullptr) {
384 Visit(self, klass->GetSuperClass(), visitor);
385 }
386 for (uint32_t i = 0; i != klass->NumDirectInterfaces(); ++i) {
387 art::ObjPtr<art::mirror::Class> inf_klass =
388 art::mirror::Class::GetDirectInterface(self, klass, i);
389 DCHECK(inf_klass != nullptr);
390 VisitInterface(self, inf_klass, visitor);
391 }
392 }
393
394 void VisitInterface(art::Thread* self, art::ObjPtr<art::mirror::Class> inf_klass, T& visitor)
395 REQUIRES_SHARED(art::Locks::mutator_lock_) {
396 auto it = visited_interfaces.find(inf_klass.Ptr());
397 if (it != visited_interfaces.end()) {
398 return;
399 }
400 visited_interfaces.insert(inf_klass.Ptr());
401
402 // Let the visitor know about this one. Note that this order is acceptable, as the ordering
403 // of these fields never matters for known visitors.
404 visitor(inf_klass);
405
406 // Now visit the superinterfaces.
407 for (uint32_t i = 0; i != inf_klass->NumDirectInterfaces(); ++i) {
408 art::ObjPtr<art::mirror::Class> super_inf_klass =
409 art::mirror::Class::GetDirectInterface(self, inf_klass, i);
410 DCHECK(super_inf_klass != nullptr);
411 VisitInterface(self, super_inf_klass, visitor);
412 }
413 }
414
415 std::unordered_set<art::mirror::Class*> visited_interfaces;
416 };
417
418 // Counting interface fields. Note that we cannot use the interface table, as that only contains
419 // "non-marker" interfaces (= interfaces with methods).
420 static size_t CountInterfaceFields(art::ObjPtr<art::mirror::Class> klass)
421 REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampe9e38a502017-03-06 08:19:26 -0800422 // Do we have a cached value?
423 IndexCache tmp;
424 if (gIndexCachingTable.GetTag(klass.Ptr(), &tmp)) {
425 return tmp.interface_fields;
426 }
427
Andreas Gampee7316932017-02-25 09:15:05 -0800428 size_t count = 0;
429 auto visitor = [&count](art::ObjPtr<art::mirror::Class> inf_klass)
430 REQUIRES_SHARED(art::Locks::mutator_lock_) {
431 DCHECK(inf_klass->IsInterface());
432 DCHECK_EQ(0u, inf_klass->NumInstanceFields());
433 count += inf_klass->NumStaticFields();
434 };
435 RecursiveInterfaceVisit<decltype(visitor)>::VisitStatic(art::Thread::Current(), klass, visitor);
Andreas Gampee7316932017-02-25 09:15:05 -0800436
Andreas Gampe9e38a502017-03-06 08:19:26 -0800437 // Store this into the cache.
438 tmp.interface_fields = count;
439 gIndexCachingTable.Set(klass.Ptr(), tmp);
440
441 return count;
Andreas Gampee7316932017-02-25 09:15:05 -0800442 }
443
444 UserData* user_data_;
445};
446
447// Debug helper. Prints the structure of an object.
448template <bool kStatic, bool kRef>
449struct DumpVisitor {
450 static bool Callback(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
451 art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
452 art::ArtField& field,
453 size_t field_index,
454 void* user_data ATTRIBUTE_UNUSED)
455 REQUIRES_SHARED(art::Locks::mutator_lock_) {
456 LOG(ERROR) << (kStatic ? "static " : "instance ")
457 << (kRef ? "ref " : "primitive ")
458 << field.PrettyField()
459 << " @ "
460 << field_index;
461 return false;
462 }
463};
464ATTRIBUTE_UNUSED
465void DumpObjectFields(art::ObjPtr<art::mirror::Object> obj)
466 REQUIRES_SHARED(art::Locks::mutator_lock_) {
467 if (obj->IsClass()) {
Andreas Gampe95114602017-02-28 15:47:44 -0800468 FieldVisitor<void, false>:: ReportFields(obj,
469 nullptr,
470 DumpVisitor<true, false>::Callback,
471 DumpVisitor<true, true>::Callback,
472 DumpVisitor<false, false>::Callback,
473 DumpVisitor<false, true>::Callback);
Andreas Gampee7316932017-02-25 09:15:05 -0800474 } else {
Andreas Gampe95114602017-02-28 15:47:44 -0800475 FieldVisitor<void, true>::ReportFields(obj,
476 nullptr,
477 DumpVisitor<true, false>::Callback,
478 DumpVisitor<true, true>::Callback,
479 DumpVisitor<false, false>::Callback,
480 DumpVisitor<false, true>::Callback);
Andreas Gampee7316932017-02-25 09:15:05 -0800481 }
482}
483
484class ReportPrimitiveField {
485 public:
486 static bool Report(art::ObjPtr<art::mirror::Object> obj,
487 ObjectTagTable* tag_table,
488 const jvmtiHeapCallbacks* cb,
489 const void* user_data)
490 REQUIRES_SHARED(art::Locks::mutator_lock_) {
491 if (UNLIKELY(cb->primitive_field_callback != nullptr)) {
492 jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
493 ReportPrimitiveField rpf(tag_table, class_tag, cb, user_data);
494 if (obj->IsClass()) {
Andreas Gampe95114602017-02-28 15:47:44 -0800495 return FieldVisitor<ReportPrimitiveField, false>::ReportFields(
496 obj,
497 &rpf,
498 ReportPrimitiveFieldCallback<true>,
499 VisitorFalse<ReportPrimitiveField>,
500 VisitorFalse<ReportPrimitiveField>,
501 VisitorFalse<ReportPrimitiveField>);
Andreas Gampee7316932017-02-25 09:15:05 -0800502 } else {
Andreas Gampe95114602017-02-28 15:47:44 -0800503 return FieldVisitor<ReportPrimitiveField, true>::ReportFields(
504 obj,
505 &rpf,
506 VisitorFalse<ReportPrimitiveField>,
507 VisitorFalse<ReportPrimitiveField>,
508 ReportPrimitiveFieldCallback<false>,
509 VisitorFalse<ReportPrimitiveField>);
Andreas Gampee7316932017-02-25 09:15:05 -0800510 }
511 }
512 return false;
513 }
514
515
516 private:
517 ReportPrimitiveField(ObjectTagTable* tag_table,
518 jlong class_tag,
519 const jvmtiHeapCallbacks* cb,
520 const void* user_data)
521 : tag_table_(tag_table), class_tag_(class_tag), cb_(cb), user_data_(user_data) {}
522
523 template <bool kReportStatic>
524 static bool ReportPrimitiveFieldCallback(art::ObjPtr<art::mirror::Object> obj,
525 art::ObjPtr<art::mirror::Class> klass,
526 art::ArtField& field,
527 size_t field_index,
528 ReportPrimitiveField* user_data)
529 REQUIRES_SHARED(art::Locks::mutator_lock_) {
530 art::Primitive::Type art_prim_type = field.GetTypeAsPrimitiveType();
531 jvmtiPrimitiveType prim_type =
532 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
533 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
534 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
535 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
536 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
537 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
538 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
539 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
540 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
541 jvmtiHeapReferenceInfo info;
542 info.field.index = field_index;
543
544 jvalue value;
545 memset(&value, 0, sizeof(jvalue));
546 art::ObjPtr<art::mirror::Object> src = kReportStatic ? klass : obj;
547 switch (art_prim_type) {
548 case art::Primitive::Type::kPrimBoolean:
549 value.z = field.GetBoolean(src) == 0 ? JNI_FALSE : JNI_TRUE;
550 break;
551 case art::Primitive::Type::kPrimByte:
552 value.b = field.GetByte(src);
553 break;
554 case art::Primitive::Type::kPrimChar:
555 value.c = field.GetChar(src);
556 break;
557 case art::Primitive::Type::kPrimShort:
558 value.s = field.GetShort(src);
559 break;
560 case art::Primitive::Type::kPrimInt:
561 value.i = field.GetInt(src);
562 break;
563 case art::Primitive::Type::kPrimLong:
564 value.j = field.GetLong(src);
565 break;
566 case art::Primitive::Type::kPrimFloat:
567 value.f = field.GetFloat(src);
568 break;
569 case art::Primitive::Type::kPrimDouble:
570 value.d = field.GetDouble(src);
571 break;
572 case art::Primitive::Type::kPrimVoid:
573 case art::Primitive::Type::kPrimNot: {
574 LOG(FATAL) << "Should not reach here";
575 UNREACHABLE();
576 }
577 }
578
579 jlong obj_tag = user_data->tag_table_->GetTagOrZero(src.Ptr());
580 const jlong saved_obj_tag = obj_tag;
581
582 jint ret = user_data->cb_->primitive_field_callback(kReportStatic
583 ? JVMTI_HEAP_REFERENCE_STATIC_FIELD
584 : JVMTI_HEAP_REFERENCE_FIELD,
585 &info,
586 user_data->class_tag_,
587 &obj_tag,
588 value,
589 prim_type,
590 const_cast<void*>(user_data->user_data_));
591
592 if (saved_obj_tag != obj_tag) {
593 user_data->tag_table_->Set(src.Ptr(), obj_tag);
594 }
595
596 if ((ret & JVMTI_VISIT_ABORT) != 0) {
597 return true;
598 }
599
600 return false;
601 }
602
603 ObjectTagTable* tag_table_;
604 jlong class_tag_;
605 const jvmtiHeapCallbacks* cb_;
606 const void* user_data_;
607};
608
Andreas Gampe6ea06072017-02-24 18:01:19 +0000609struct HeapFilter {
610 explicit HeapFilter(jint heap_filter)
611 : filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
Andreas Gampee54d9922016-10-11 19:55:37 -0700612 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
613 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
614 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
615 any_filter(filter_out_tagged ||
616 filter_out_untagged ||
617 filter_out_class_tagged ||
Andreas Gampe6ea06072017-02-24 18:01:19 +0000618 filter_out_class_untagged) {
Andreas Gampee54d9922016-10-11 19:55:37 -0700619 }
620
Andreas Gampe6ea06072017-02-24 18:01:19 +0000621 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) const {
Andreas Gampee54d9922016-10-11 19:55:37 -0700622 if (!any_filter) {
623 return true;
624 }
625
626 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
627 return false;
628 }
629
630 if ((class_tag == 0 && filter_out_class_untagged) ||
631 (class_tag != 0 && filter_out_class_tagged)) {
632 return false;
633 }
634
635 return true;
636 }
637
Andreas Gampee54d9922016-10-11 19:55:37 -0700638 const bool filter_out_tagged;
639 const bool filter_out_untagged;
640 const bool filter_out_class_tagged;
641 const bool filter_out_class_untagged;
642 const bool any_filter;
Andreas Gampe6ea06072017-02-24 18:01:19 +0000643};
644
645} // namespace
646
Andreas Gampe9e38a502017-03-06 08:19:26 -0800647void HeapUtil::Register() {
648 art::Runtime::Current()->AddSystemWeakHolder(&gIndexCachingTable);
649}
650
651void HeapUtil::Unregister() {
652 art::Runtime::Current()->RemoveSystemWeakHolder(&gIndexCachingTable);
653}
654
Andreas Gampef787fd32017-05-09 16:04:20 -0700655template <typename Callback>
Andreas Gampe6ea06072017-02-24 18:01:19 +0000656struct IterateThroughHeapData {
Andreas Gampef787fd32017-05-09 16:04:20 -0700657 IterateThroughHeapData(Callback _cb,
658 ObjectTagTable* _tag_table,
Andreas Gampe6ea06072017-02-24 18:01:19 +0000659 jvmtiEnv* _env,
660 art::ObjPtr<art::mirror::Class> klass,
661 jint _heap_filter,
662 const jvmtiHeapCallbacks* _callbacks,
663 const void* _user_data)
Andreas Gampef787fd32017-05-09 16:04:20 -0700664 : cb(_cb),
665 tag_table(_tag_table),
Andreas Gampe6ea06072017-02-24 18:01:19 +0000666 heap_filter(_heap_filter),
667 filter_klass(klass),
668 env(_env),
669 callbacks(_callbacks),
670 user_data(_user_data),
671 stop_reports(false) {
672 }
673
Andreas Gampef787fd32017-05-09 16:04:20 -0700674 static void ObjectCallback(art::mirror::Object* obj, void* arg)
675 REQUIRES_SHARED(art::Locks::mutator_lock_) {
676 IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
677 ithd->ObjectCallback(obj);
678 }
679
680 void ObjectCallback(art::mirror::Object* obj)
681 REQUIRES_SHARED(art::Locks::mutator_lock_) {
682 // Early return, as we can't really stop visiting.
683 if (stop_reports) {
684 return;
685 }
686
687 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
688
689 jlong tag = 0;
690 tag_table->GetTag(obj, &tag);
691
692 jlong class_tag = 0;
693 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
694 tag_table->GetTag(klass.Ptr(), &class_tag);
695 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
696
697 if (!heap_filter.ShouldReportByHeapFilter(tag, class_tag)) {
698 return;
699 }
700
701 if (filter_klass != nullptr) {
702 if (filter_klass != klass) {
703 return;
704 }
705 }
706
707 jlong size = obj->SizeOf();
708
709 jint length = -1;
710 if (obj->IsArrayInstance()) {
711 length = obj->AsArray()->GetLength();
712 }
713
714 jlong saved_tag = tag;
715 jint ret = cb(obj, callbacks, class_tag, size, &tag, length, const_cast<void*>(user_data));
716
717 if (tag != saved_tag) {
718 tag_table->Set(obj, tag);
719 }
720
721 stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
722
723 if (!stop_reports) {
724 jint string_ret = ReportString(obj, env, tag_table, callbacks, user_data);
725 stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
726 }
727
728 if (!stop_reports) {
729 jint array_ret = ReportPrimitiveArray(obj, env, tag_table, callbacks, user_data);
730 stop_reports = (array_ret & JVMTI_VISIT_ABORT) != 0;
731 }
732
733 if (!stop_reports) {
734 stop_reports = ReportPrimitiveField::Report(obj, tag_table, callbacks, user_data);
735 }
736 }
737
738 Callback cb;
739 ObjectTagTable* tag_table;
Andreas Gampe6ea06072017-02-24 18:01:19 +0000740 const HeapFilter heap_filter;
741 art::ObjPtr<art::mirror::Class> filter_klass;
742 jvmtiEnv* env;
743 const jvmtiHeapCallbacks* callbacks;
744 const void* user_data;
Andreas Gampee54d9922016-10-11 19:55:37 -0700745
746 bool stop_reports;
747};
748
Andreas Gampef787fd32017-05-09 16:04:20 -0700749template <typename T>
750static jvmtiError DoIterateThroughHeap(T fn,
751 jvmtiEnv* env,
752 ObjectTagTable* tag_table,
753 jint heap_filter,
754 jclass klass,
755 const jvmtiHeapCallbacks* callbacks,
756 const void* user_data) {
Andreas Gampee54d9922016-10-11 19:55:37 -0700757 if (callbacks == nullptr) {
758 return ERR(NULL_POINTER);
759 }
760
Andreas Gampee54d9922016-10-11 19:55:37 -0700761 art::Thread* self = art::Thread::Current();
762 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
763
Andreas Gampef787fd32017-05-09 16:04:20 -0700764 using Iterator = IterateThroughHeapData<T>;
765 Iterator ithd(fn,
766 tag_table,
767 env,
768 soa.Decode<art::mirror::Class>(klass),
769 heap_filter,
770 callbacks,
771 user_data);
Andreas Gampee54d9922016-10-11 19:55:37 -0700772
Andreas Gampef787fd32017-05-09 16:04:20 -0700773 art::Runtime::Current()->GetHeap()->VisitObjects(Iterator::ObjectCallback, &ithd);
Andreas Gampee54d9922016-10-11 19:55:37 -0700774
775 return ERR(NONE);
776}
777
Andreas Gampef787fd32017-05-09 16:04:20 -0700778jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
779 jint heap_filter,
780 jclass klass,
781 const jvmtiHeapCallbacks* callbacks,
782 const void* user_data) {
783 auto JvmtiIterateHeap = [](art::mirror::Object* obj ATTRIBUTE_UNUSED,
784 const jvmtiHeapCallbacks* cb_callbacks,
785 jlong class_tag,
786 jlong size,
787 jlong* tag,
788 jint length,
789 void* cb_user_data)
790 REQUIRES_SHARED(art::Locks::mutator_lock_) {
791 return cb_callbacks->heap_iteration_callback(class_tag,
792 size,
793 tag,
794 length,
795 cb_user_data);
796 };
797 return DoIterateThroughHeap(JvmtiIterateHeap,
798 env,
799 ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get(),
800 heap_filter,
801 klass,
802 callbacks,
803 user_data);
804}
805
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700806class FollowReferencesHelper FINAL {
807 public:
808 FollowReferencesHelper(HeapUtil* h,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800809 jvmtiEnv* jvmti_env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800810 art::ObjPtr<art::mirror::Object> initial_object,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700811 const jvmtiHeapCallbacks* callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800812 art::ObjPtr<art::mirror::Class> class_filter,
Andreas Gampe6ea06072017-02-24 18:01:19 +0000813 jint heap_filter,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700814 const void* user_data)
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800815 : env(jvmti_env),
816 tag_table_(h->GetTags()),
Andreas Gampe638a6932016-12-02 19:11:17 -0800817 initial_object_(initial_object),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700818 callbacks_(callbacks),
Andreas Gampe38da9f22017-02-20 13:35:36 -0800819 class_filter_(class_filter),
Andreas Gampe6ea06072017-02-24 18:01:19 +0000820 heap_filter_(heap_filter),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700821 user_data_(user_data),
822 start_(0),
823 stop_reports_(false) {
824 }
825
826 void Init()
827 REQUIRES_SHARED(art::Locks::mutator_lock_)
828 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
Andreas Gampe638a6932016-12-02 19:11:17 -0800829 if (initial_object_.IsNull()) {
830 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800831
832 // We need precise info (e.g., vregs).
833 constexpr art::VisitRootFlags kRootFlags = static_cast<art::VisitRootFlags>(
834 art::VisitRootFlags::kVisitRootFlagAllRoots | art::VisitRootFlags::kVisitRootFlagPrecise);
835 art::Runtime::Current()->VisitRoots(&carrv, kRootFlags);
836
Andreas Gampe638a6932016-12-02 19:11:17 -0800837 art::Runtime::Current()->VisitImageRoots(&carrv);
838 stop_reports_ = carrv.IsStopReports();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700839
Andreas Gampe638a6932016-12-02 19:11:17 -0800840 if (stop_reports_) {
841 worklist_.clear();
842 }
843 } else {
844 visited_.insert(initial_object_.Ptr());
845 worklist_.push_back(initial_object_.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700846 }
847 }
848
849 void Work()
850 REQUIRES_SHARED(art::Locks::mutator_lock_)
851 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
852 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
853 // from the head of the work list, instead postponing until there's a gap that's "large."
854 //
855 // Alternatively, we can implement a DFS and use the work list as a stack.
856 while (start_ < worklist_.size()) {
857 art::mirror::Object* cur_obj = worklist_[start_];
858 start_++;
859
860 if (start_ >= kMaxStart) {
861 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
862 start_ = 0;
863 }
864
865 VisitObject(cur_obj);
866
867 if (stop_reports_) {
868 break;
869 }
870 }
871 }
872
873 private:
874 class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
875 public:
876 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
877 ObjectTagTable* tag_table,
878 std::vector<art::mirror::Object*>* worklist,
879 std::unordered_set<art::mirror::Object*>* visited)
880 : helper_(helper),
881 tag_table_(tag_table),
882 worklist_(worklist),
883 visited_(visited),
884 stop_reports_(false) {}
885
886 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
887 OVERRIDE
888 REQUIRES_SHARED(art::Locks::mutator_lock_)
889 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
890 for (size_t i = 0; i != count; ++i) {
891 AddRoot(*roots[i], info);
892 }
893 }
894
895 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
896 size_t count,
897 const art::RootInfo& info)
898 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
899 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
900 for (size_t i = 0; i != count; ++i) {
901 AddRoot(roots[i]->AsMirrorPtr(), info);
902 }
903 }
904
905 bool IsStopReports() {
906 return stop_reports_;
907 }
908
909 private:
910 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
911 REQUIRES_SHARED(art::Locks::mutator_lock_)
912 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
Andreas Gampec756f082017-03-29 17:58:28 -0700913 if (stop_reports_) {
914 return;
915 }
916 bool add_to_worklist = ReportRoot(root_obj, info);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700917 // We use visited_ to mark roots already so we do not need another set.
918 if (visited_->find(root_obj) == visited_->end()) {
Andreas Gampec756f082017-03-29 17:58:28 -0700919 if (add_to_worklist) {
Andreas Gampee0f8ed92017-04-13 16:52:23 -0700920 visited_->insert(root_obj);
Andreas Gampec756f082017-03-29 17:58:28 -0700921 worklist_->push_back(root_obj);
922 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700923 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700924 }
925
Andreas Gampe93c30902016-11-18 13:30:30 -0800926 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
927 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
928 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
929 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
930 }
931
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700932 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
933 jvmtiHeapReferenceInfo* ref_info)
934 REQUIRES_SHARED(art::Locks::mutator_lock_) {
935 // TODO: Fill in ref_info.
936 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
937
938 switch (info.GetType()) {
939 case art::RootType::kRootJNIGlobal:
940 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
941
942 case art::RootType::kRootJNILocal:
Andreas Gampe93c30902016-11-18 13:30:30 -0800943 {
944 uint32_t thread_id = info.GetThreadId();
945 ref_info->jni_local.thread_id = thread_id;
946
947 art::Thread* thread = FindThread(info);
948 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800949 art::mirror::Object* thread_obj;
Andreas Gampe93c30902016-11-18 13:30:30 -0800950 if (thread->IsStillStarting()) {
951 thread_obj = nullptr;
952 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800953 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampe93c30902016-11-18 13:30:30 -0800954 }
955 if (thread_obj != nullptr) {
956 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
957 }
958 }
959
960 // TODO: We don't have this info.
961 if (thread != nullptr) {
962 ref_info->jni_local.depth = 0;
963 art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
964 if (method != nullptr) {
965 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
966 }
967 }
968
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700969 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
Andreas Gampe93c30902016-11-18 13:30:30 -0800970 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700971
972 case art::RootType::kRootJavaFrame:
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800973 {
974 uint32_t thread_id = info.GetThreadId();
975 ref_info->stack_local.thread_id = thread_id;
976
977 art::Thread* thread = FindThread(info);
978 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800979 art::mirror::Object* thread_obj;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800980 if (thread->IsStillStarting()) {
981 thread_obj = nullptr;
982 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800983 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800984 }
985 if (thread_obj != nullptr) {
986 ref_info->stack_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
987 }
988 }
989
990 auto& java_info = static_cast<const art::JavaFrameRootInfo&>(info);
991 ref_info->stack_local.slot = static_cast<jint>(java_info.GetVReg());
992 const art::StackVisitor* visitor = java_info.GetVisitor();
993 ref_info->stack_local.location =
994 static_cast<jlocation>(visitor->GetDexPc(false /* abort_on_failure */));
995 ref_info->stack_local.depth = static_cast<jint>(visitor->GetFrameDepth());
996 art::ArtMethod* method = visitor->GetMethod();
997 if (method != nullptr) {
998 ref_info->stack_local.method = art::jni::EncodeArtMethod(method);
999 }
1000
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001001 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
Andreas Gampef10dfcd2016-12-02 14:42:33 -08001002 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001003
1004 case art::RootType::kRootNativeStack:
1005 case art::RootType::kRootThreadBlock:
1006 case art::RootType::kRootThreadObject:
1007 return JVMTI_HEAP_REFERENCE_THREAD;
1008
1009 case art::RootType::kRootStickyClass:
1010 case art::RootType::kRootInternedString:
1011 // Note: this isn't a root in the RI.
1012 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
1013
1014 case art::RootType::kRootMonitorUsed:
1015 case art::RootType::kRootJNIMonitor:
1016 return JVMTI_HEAP_REFERENCE_MONITOR;
1017
1018 case art::RootType::kRootFinalizing:
1019 case art::RootType::kRootDebugger:
1020 case art::RootType::kRootReferenceCleanup:
1021 case art::RootType::kRootVMInternal:
1022 case art::RootType::kRootUnknown:
1023 return JVMTI_HEAP_REFERENCE_OTHER;
1024 }
1025 LOG(FATAL) << "Unreachable";
1026 UNREACHABLE();
1027 }
1028
Andreas Gampec756f082017-03-29 17:58:28 -07001029 bool ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001030 REQUIRES_SHARED(art::Locks::mutator_lock_)
1031 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1032 jvmtiHeapReferenceInfo ref_info;
1033 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
1034 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
1035 if ((result & JVMTI_VISIT_ABORT) != 0) {
1036 stop_reports_ = true;
1037 }
Andreas Gampec756f082017-03-29 17:58:28 -07001038 return (result & JVMTI_VISIT_OBJECTS) != 0;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001039 }
1040
1041 private:
1042 FollowReferencesHelper* helper_;
1043 ObjectTagTable* tag_table_;
1044 std::vector<art::mirror::Object*>* worklist_;
1045 std::unordered_set<art::mirror::Object*>* visited_;
1046 bool stop_reports_;
1047 };
1048
1049 void VisitObject(art::mirror::Object* obj)
1050 REQUIRES_SHARED(art::Locks::mutator_lock_)
1051 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1052 if (obj->IsClass()) {
1053 VisitClass(obj->AsClass());
1054 return;
1055 }
1056 if (obj->IsArrayInstance()) {
1057 VisitArray(obj);
1058 return;
1059 }
1060
Andreas Gampe95114602017-02-28 15:47:44 -08001061 // All instance fields.
1062 auto report_instance_field = [&](art::ObjPtr<art::mirror::Object> src,
1063 art::ObjPtr<art::mirror::Class> obj_klass ATTRIBUTE_UNUSED,
1064 art::ArtField& field,
1065 size_t field_index,
1066 void* user_data ATTRIBUTE_UNUSED)
1067 REQUIRES_SHARED(art::Locks::mutator_lock_)
1068 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1069 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(src);
1070 if (field_value != nullptr) {
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001071 jvmtiHeapReferenceInfo reference_info;
1072 memset(&reference_info, 0, sizeof(reference_info));
1073
Andreas Gampe95114602017-02-28 15:47:44 -08001074 reference_info.field.index = field_index;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001075
1076 jvmtiHeapReferenceKind kind =
Andreas Gampe95114602017-02-28 15:47:44 -08001077 field.GetOffset().Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001078 ? JVMTI_HEAP_REFERENCE_CLASS
1079 : JVMTI_HEAP_REFERENCE_FIELD;
1080 const jvmtiHeapReferenceInfo* reference_info_ptr =
1081 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
1082
Andreas Gampe95114602017-02-28 15:47:44 -08001083 return !ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src.Ptr(), field_value.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001084 }
Andreas Gampe95114602017-02-28 15:47:44 -08001085 return false;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001086 };
Andreas Gampe95114602017-02-28 15:47:44 -08001087 stop_reports_ = FieldVisitor<void, true>::ReportFields(obj,
1088 nullptr,
1089 VisitorFalse<void>,
1090 VisitorFalse<void>,
1091 VisitorFalse<void>,
1092 report_instance_field);
1093 if (stop_reports_) {
1094 return;
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001095 }
Andreas Gampee7316932017-02-25 09:15:05 -08001096
Andreas Gampe95114602017-02-28 15:47:44 -08001097 jint string_ret = ReportString(obj, env, tag_table_, callbacks_, user_data_);
1098 stop_reports_ = (string_ret & JVMTI_VISIT_ABORT) != 0;
1099 if (stop_reports_) {
1100 return;
Andreas Gampee7316932017-02-25 09:15:05 -08001101 }
Andreas Gampe95114602017-02-28 15:47:44 -08001102
1103 stop_reports_ = ReportPrimitiveField::Report(obj, tag_table_, callbacks_, user_data_);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001104 }
1105
1106 void VisitArray(art::mirror::Object* array)
1107 REQUIRES_SHARED(art::Locks::mutator_lock_)
1108 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1109 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
1110 nullptr,
1111 array,
1112 array->GetClass());
1113 if (stop_reports_) {
1114 return;
1115 }
1116
1117 if (array->IsObjectArray()) {
1118 art::mirror::ObjectArray<art::mirror::Object>* obj_array =
1119 array->AsObjectArray<art::mirror::Object>();
1120 int32_t length = obj_array->GetLength();
1121 for (int32_t i = 0; i != length; ++i) {
1122 art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
1123 if (elem != nullptr) {
1124 jvmtiHeapReferenceInfo reference_info;
1125 reference_info.array.index = i;
1126 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
1127 &reference_info,
1128 array,
1129 elem);
1130 if (stop_reports_) {
1131 break;
1132 }
1133 }
1134 }
Andreas Gampebecd6ad2017-02-22 19:20:37 -08001135 } else {
1136 if (!stop_reports_) {
1137 jint array_ret = ReportPrimitiveArray(array, env, tag_table_, callbacks_, user_data_);
1138 stop_reports_ = (array_ret & JVMTI_VISIT_ABORT) != 0;
1139 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001140 }
1141 }
1142
1143 void VisitClass(art::mirror::Class* klass)
1144 REQUIRES_SHARED(art::Locks::mutator_lock_)
1145 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1146 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
1147 if (!klass->IsResolved()) {
1148 return;
1149 }
1150
1151 // Superclass.
1152 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
1153 nullptr,
1154 klass,
1155 klass->GetSuperClass());
1156 if (stop_reports_) {
1157 return;
1158 }
1159
1160 // Directly implemented or extended interfaces.
1161 art::Thread* self = art::Thread::Current();
1162 art::StackHandleScope<1> hs(self);
1163 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
1164 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
1165 art::ObjPtr<art::mirror::Class> inf_klass =
Vladimir Marko19a4d372016-12-08 14:41:46 +00001166 art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001167 if (inf_klass == nullptr) {
1168 // TODO: With a resolved class this should not happen...
1169 self->ClearException();
1170 break;
1171 }
1172
1173 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
1174 nullptr,
1175 klass,
1176 inf_klass.Ptr());
1177 if (stop_reports_) {
1178 return;
1179 }
1180 }
1181
1182 // Classloader.
1183 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
1184 // fake BootClassLoader?
1185 if (klass->GetClassLoader() != nullptr) {
1186 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
1187 nullptr,
1188 klass,
1189 klass->GetClassLoader());
1190 if (stop_reports_) {
1191 return;
1192 }
1193 }
1194 DCHECK_EQ(h_klass.Get(), klass);
1195
1196 // Declared static fields.
Andreas Gampe95114602017-02-28 15:47:44 -08001197 auto report_static_field = [&](art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
1198 art::ObjPtr<art::mirror::Class> obj_klass,
1199 art::ArtField& field,
1200 size_t field_index,
1201 void* user_data ATTRIBUTE_UNUSED)
1202 REQUIRES_SHARED(art::Locks::mutator_lock_)
1203 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1204 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(obj_klass);
1205 if (field_value != nullptr) {
1206 jvmtiHeapReferenceInfo reference_info;
1207 memset(&reference_info, 0, sizeof(reference_info));
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001208
Andreas Gampe95114602017-02-28 15:47:44 -08001209 reference_info.field.index = static_cast<jint>(field_index);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001210
Andreas Gampe95114602017-02-28 15:47:44 -08001211 return !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1212 &reference_info,
1213 obj_klass.Ptr(),
1214 field_value.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001215 }
Andreas Gampe95114602017-02-28 15:47:44 -08001216 return false;
1217 };
1218 stop_reports_ = FieldVisitor<void, false>::ReportFields(klass,
1219 nullptr,
1220 VisitorFalse<void>,
1221 report_static_field,
1222 VisitorFalse<void>,
1223 VisitorFalse<void>);
1224 if (stop_reports_) {
1225 return;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001226 }
Andreas Gampee7316932017-02-25 09:15:05 -08001227
Andreas Gampe95114602017-02-28 15:47:44 -08001228 stop_reports_ = ReportPrimitiveField::Report(klass, tag_table_, callbacks_, user_data_);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001229 }
1230
1231 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
1232 if (visited_.find(obj) == visited_.end()) {
1233 worklist_.push_back(obj);
1234 visited_.insert(obj);
1235 }
1236 }
1237
1238 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
1239 const jvmtiHeapReferenceInfo* reference_info,
1240 art::mirror::Object* referree,
1241 art::mirror::Object* referrer)
1242 REQUIRES_SHARED(art::Locks::mutator_lock_)
1243 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1244 jint result = ReportReference(kind, reference_info, referree, referrer);
1245 if ((result & JVMTI_VISIT_ABORT) == 0) {
1246 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
1247 MaybeEnqueue(referrer);
1248 }
1249 return true;
1250 } else {
1251 return false;
1252 }
1253 }
1254
1255 jint ReportReference(jvmtiHeapReferenceKind kind,
1256 const jvmtiHeapReferenceInfo* reference_info,
1257 art::mirror::Object* referrer,
1258 art::mirror::Object* referree)
1259 REQUIRES_SHARED(art::Locks::mutator_lock_)
1260 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1261 if (referree == nullptr || stop_reports_) {
1262 return 0;
1263 }
1264
Andreas Gampe38da9f22017-02-20 13:35:36 -08001265 if (UNLIKELY(class_filter_ != nullptr) && class_filter_ != referree->GetClass()) {
1266 return JVMTI_VISIT_OBJECTS;
1267 }
1268
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001269 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
Andreas Gampe6ea06072017-02-24 18:01:19 +00001270 jlong tag = tag_table_->GetTagOrZero(referree);
1271
1272 if (!heap_filter_.ShouldReportByHeapFilter(tag, class_tag)) {
1273 return JVMTI_VISIT_OBJECTS;
1274 }
1275
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001276 const jlong referrer_class_tag =
1277 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
1278 const jlong size = static_cast<jlong>(referree->SizeOf());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001279 jlong saved_tag = tag;
1280 jlong referrer_tag = 0;
1281 jlong saved_referrer_tag = 0;
1282 jlong* referrer_tag_ptr;
1283 if (referrer == nullptr) {
1284 referrer_tag_ptr = nullptr;
1285 } else {
1286 if (referrer == referree) {
1287 referrer_tag_ptr = &tag;
1288 } else {
1289 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
1290 referrer_tag_ptr = &referrer_tag;
1291 }
1292 }
Andreas Gampe38da9f22017-02-20 13:35:36 -08001293
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001294 jint length = -1;
1295 if (referree->IsArrayInstance()) {
1296 length = referree->AsArray()->GetLength();
1297 }
1298
1299 jint result = callbacks_->heap_reference_callback(kind,
1300 reference_info,
1301 class_tag,
1302 referrer_class_tag,
1303 size,
1304 &tag,
1305 referrer_tag_ptr,
1306 length,
1307 const_cast<void*>(user_data_));
1308
1309 if (tag != saved_tag) {
1310 tag_table_->Set(referree, tag);
1311 }
1312 if (referrer_tag != saved_referrer_tag) {
1313 tag_table_->Set(referrer, referrer_tag);
1314 }
1315
1316 return result;
1317 }
1318
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001319 jvmtiEnv* env;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001320 ObjectTagTable* tag_table_;
Andreas Gampe638a6932016-12-02 19:11:17 -08001321 art::ObjPtr<art::mirror::Object> initial_object_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001322 const jvmtiHeapCallbacks* callbacks_;
Andreas Gampe38da9f22017-02-20 13:35:36 -08001323 art::ObjPtr<art::mirror::Class> class_filter_;
Andreas Gampe6ea06072017-02-24 18:01:19 +00001324 const HeapFilter heap_filter_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001325 const void* user_data_;
1326
1327 std::vector<art::mirror::Object*> worklist_;
1328 size_t start_;
1329 static constexpr size_t kMaxStart = 1000000U;
1330
1331 std::unordered_set<art::mirror::Object*> visited_;
1332
1333 bool stop_reports_;
1334
1335 friend class CollectAndReportRootsVisitor;
1336};
1337
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001338jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env,
Andreas Gampe6ea06072017-02-24 18:01:19 +00001339 jint heap_filter,
Andreas Gampe38da9f22017-02-20 13:35:36 -08001340 jclass klass,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001341 jobject initial_object,
1342 const jvmtiHeapCallbacks* callbacks,
1343 const void* user_data) {
1344 if (callbacks == nullptr) {
1345 return ERR(NULL_POINTER);
1346 }
1347
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001348 art::Thread* self = art::Thread::Current();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001349
Andreas Gampe638a6932016-12-02 19:11:17 -08001350 art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
1351 if (heap->IsGcConcurrentAndMoving()) {
1352 // Need to take a heap dump while GC isn't running. See the
1353 // comment in Heap::VisitObjects().
1354 heap->IncrementDisableMovingGC(self);
1355 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001356 {
Andreas Gampe638a6932016-12-02 19:11:17 -08001357 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001358 art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
1359 art::ScopedSuspendAll ssa("FollowReferences");
1360
Andreas Gampe38da9f22017-02-20 13:35:36 -08001361 art::ObjPtr<art::mirror::Class> class_filter = klass == nullptr
1362 ? nullptr
1363 : art::ObjPtr<art::mirror::Class>::DownCast(self->DecodeJObject(klass));
Andreas Gampe638a6932016-12-02 19:11:17 -08001364 FollowReferencesHelper frh(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001365 env,
Andreas Gampe638a6932016-12-02 19:11:17 -08001366 self->DecodeJObject(initial_object),
1367 callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -08001368 class_filter,
Andreas Gampe6ea06072017-02-24 18:01:19 +00001369 heap_filter,
Andreas Gampe638a6932016-12-02 19:11:17 -08001370 user_data);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001371 frh.Init();
1372 frh.Work();
1373 }
Andreas Gampe638a6932016-12-02 19:11:17 -08001374 if (heap->IsGcConcurrentAndMoving()) {
1375 heap->DecrementDisableMovingGC(self);
1376 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001377
1378 return ERR(NONE);
1379}
1380
Andreas Gampeaa8b60c2016-10-12 12:51:25 -07001381jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
1382 jint* class_count_ptr,
1383 jclass** classes_ptr) {
1384 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
1385 return ERR(NULL_POINTER);
1386 }
1387
1388 class ReportClassVisitor : public art::ClassVisitor {
1389 public:
1390 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
1391
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001392 bool operator()(art::ObjPtr<art::mirror::Class> klass)
1393 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampeef54d8d2016-10-25 09:55:53 -07001394 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
Andreas Gampeaa8b60c2016-10-12 12:51:25 -07001395 return true;
1396 }
1397
1398 art::Thread* self_;
1399 std::vector<jclass> classes_;
1400 };
1401
1402 art::Thread* self = art::Thread::Current();
1403 ReportClassVisitor rcv(self);
1404 {
1405 art::ScopedObjectAccess soa(self);
1406 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
1407 }
1408
1409 size_t size = rcv.classes_.size();
1410 jclass* classes = nullptr;
1411 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
1412 reinterpret_cast<unsigned char**>(&classes));
1413 if (alloc_ret != ERR(NONE)) {
1414 return alloc_ret;
1415 }
1416
1417 for (size_t i = 0; i < size; ++i) {
1418 classes[i] = rcv.classes_[i];
1419 }
1420 *classes_ptr = classes;
1421 *class_count_ptr = static_cast<jint>(size);
1422
1423 return ERR(NONE);
1424}
1425
Andreas Gampe8da6d032016-10-31 19:31:03 -07001426jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
1427 art::Runtime::Current()->GetHeap()->CollectGarbage(false);
1428
1429 return ERR(NONE);
1430}
Andreas Gamped73aba42017-05-03 21:40:26 -07001431
1432static constexpr jint kHeapIdDefault = 0;
1433static constexpr jint kHeapIdImage = 1;
1434static constexpr jint kHeapIdZygote = 2;
1435static constexpr jint kHeapIdApp = 3;
1436
Andreas Gampe2eb25e42017-05-09 17:14:58 -07001437static jint GetHeapId(art::ObjPtr<art::mirror::Object> obj)
1438 REQUIRES_SHARED(art::Locks::mutator_lock_) {
1439 if (obj == nullptr) {
1440 return -1;
1441 }
1442
1443 art::gc::Heap* const heap = art::Runtime::Current()->GetHeap();
1444 const art::gc::space::ContinuousSpace* const space =
1445 heap->FindContinuousSpaceFromObject(obj, true);
1446 jint heap_type = kHeapIdApp;
1447 if (space != nullptr) {
1448 if (space->IsZygoteSpace()) {
1449 heap_type = kHeapIdZygote;
1450 } else if (space->IsImageSpace() && heap->ObjectIsInBootImageSpace(obj)) {
1451 // Only count objects in the boot image as HPROF_HEAP_IMAGE, this leaves app image objects
1452 // as HPROF_HEAP_APP. b/35762934
1453 heap_type = kHeapIdImage;
1454 }
1455 } else {
1456 const auto* los = heap->GetLargeObjectsSpace();
1457 if (los->Contains(obj.Ptr()) && los->IsZygoteLargeObject(art::Thread::Current(), obj.Ptr())) {
1458 heap_type = kHeapIdZygote;
1459 }
1460 }
1461 return heap_type;
1462};
1463
Andreas Gamped73aba42017-05-03 21:40:26 -07001464jvmtiError HeapExtensions::GetObjectHeapId(jvmtiEnv* env, jlong tag, jint* heap_id, ...) {
1465 if (heap_id == nullptr) {
1466 return ERR(NULL_POINTER);
1467 }
1468
1469 art::Thread* self = art::Thread::Current();
1470
1471 auto work = [&]() REQUIRES_SHARED(art::Locks::mutator_lock_) {
1472 ObjectTagTable* tag_table = ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get();
1473 art::ObjPtr<art::mirror::Object> obj = tag_table->Find(tag);
Andreas Gampe2eb25e42017-05-09 17:14:58 -07001474 jint heap_type = GetHeapId(obj);
1475 if (heap_type == -1) {
Andreas Gamped73aba42017-05-03 21:40:26 -07001476 return ERR(NOT_FOUND);
1477 }
Andreas Gamped73aba42017-05-03 21:40:26 -07001478 *heap_id = heap_type;
1479 return ERR(NONE);
1480 };
1481
1482 if (!art::Locks::mutator_lock_->IsSharedHeld(self)) {
1483 if (!self->IsThreadSuspensionAllowable()) {
1484 return ERR(INTERNAL);
1485 }
1486 art::ScopedObjectAccess soa(self);
1487 return work();
1488 } else {
1489 // We cannot use SOA in this case. We might be holding the lock, but may not be in the
1490 // runnable state (e.g., during GC).
1491 art::Locks::mutator_lock_->AssertSharedHeld(self);
1492 // TODO: Investigate why ASSERT_SHARED_CAPABILITY doesn't work.
1493 auto annotalysis_workaround = [&]() NO_THREAD_SAFETY_ANALYSIS {
1494 return work();
1495 };
1496 return annotalysis_workaround();
1497 }
1498}
1499
1500static jvmtiError CopyStringAndReturn(jvmtiEnv* env, const char* in, char** out) {
1501 jvmtiError error;
1502 JvmtiUniquePtr<char[]> param_name = CopyString(env, in, &error);
1503 if (param_name == nullptr) {
1504 return error;
1505 }
1506 *out = param_name.release();
1507 return ERR(NONE);
1508}
1509
1510static constexpr const char* kHeapIdDefaultName = "default";
1511static constexpr const char* kHeapIdImageName = "image";
1512static constexpr const char* kHeapIdZygoteName = "zygote";
1513static constexpr const char* kHeapIdAppName = "app";
1514
1515jvmtiError HeapExtensions::GetHeapName(jvmtiEnv* env, jint heap_id, char** heap_name, ...) {
1516 switch (heap_id) {
1517 case kHeapIdDefault:
1518 return CopyStringAndReturn(env, kHeapIdDefaultName, heap_name);
1519 case kHeapIdImage:
1520 return CopyStringAndReturn(env, kHeapIdImageName, heap_name);
1521 case kHeapIdZygote:
1522 return CopyStringAndReturn(env, kHeapIdZygoteName, heap_name);
1523 case kHeapIdApp:
1524 return CopyStringAndReturn(env, kHeapIdAppName, heap_name);
1525
1526 default:
1527 return ERR(ILLEGAL_ARGUMENT);
1528 }
1529}
1530
Andreas Gampe2eb25e42017-05-09 17:14:58 -07001531jvmtiError HeapExtensions::IterateThroughHeapExt(jvmtiEnv* env,
1532 jint heap_filter,
1533 jclass klass,
1534 const jvmtiHeapCallbacks* callbacks,
1535 const void* user_data) {
1536 if (ArtJvmTiEnv::AsArtJvmTiEnv(env)->capabilities.can_tag_objects != 1) { \
1537 return ERR(MUST_POSSESS_CAPABILITY); \
1538 }
1539
1540 // ART extension API: Also pass the heap id.
1541 auto ArtIterateHeap = [](art::mirror::Object* obj,
1542 const jvmtiHeapCallbacks* cb_callbacks,
1543 jlong class_tag,
1544 jlong size,
1545 jlong* tag,
1546 jint length,
1547 void* cb_user_data)
1548 REQUIRES_SHARED(art::Locks::mutator_lock_) {
1549 jint heap_id = GetHeapId(obj);
1550 using ArtExtensionAPI = jint (*)(jlong, jlong, jlong*, jint length, void*, jint);
1551 return reinterpret_cast<ArtExtensionAPI>(cb_callbacks->heap_iteration_callback)(
1552 class_tag, size, tag, length, cb_user_data, heap_id);
1553 };
1554 return DoIterateThroughHeap(ArtIterateHeap,
1555 env,
1556 ArtJvmTiEnv::AsArtJvmTiEnv(env)->object_tag_table.get(),
1557 heap_filter,
1558 klass,
1559 callbacks,
1560 user_data);
1561}
1562
Andreas Gampee54d9922016-10-11 19:55:37 -07001563} // namespace openjdkjvmti