blob: d52f0ea290a9d5d0431cfd8657bd96019a9b3416 [file] [log] [blame]
Andreas Gampee54d9922016-10-11 19:55:37 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampeba8df692016-11-01 10:30:44 -070017#include "ti_heap.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070018
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070019#include "art_field-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070020#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070023#include "class_linker.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070024#include "gc/heap.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070025#include "gc_root-inl.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070026#include "jni_env_ext.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070027#include "jni_internal.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070028#include "mirror/class.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070029#include "mirror/object-inl.h"
30#include "mirror/object_array-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070031#include "object_callbacks.h"
32#include "object_tagging.h"
33#include "obj_ptr-inl.h"
Andreas Gampebecd6ad2017-02-22 19:20:37 -080034#include "primitive.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070035#include "runtime.h"
36#include "scoped_thread_state_change-inl.h"
37#include "thread-inl.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070038#include "thread_list.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070039
40namespace openjdkjvmti {
41
Andreas Gampe3ec8e402017-02-21 15:49:53 -080042namespace {
43
44// Report the contents of a string, if a callback is set.
45jint ReportString(art::ObjPtr<art::mirror::Object> obj,
46 jvmtiEnv* env,
47 ObjectTagTable* tag_table,
48 const jvmtiHeapCallbacks* cb,
49 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
50 if (UNLIKELY(cb->string_primitive_value_callback != nullptr) && obj->IsString()) {
51 art::ObjPtr<art::mirror::String> str = obj->AsString();
52 int32_t string_length = str->GetLength();
Andreas Gampe5f942032017-02-27 19:59:40 -080053 JvmtiUniquePtr<uint16_t[]> data;
Andreas Gampe3ec8e402017-02-21 15:49:53 -080054
Andreas Gampe5f942032017-02-27 19:59:40 -080055 if (string_length > 0) {
56 jvmtiError alloc_error;
57 data = AllocJvmtiUniquePtr<uint16_t[]>(env, string_length, &alloc_error);
58 if (data == nullptr) {
59 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
60 // back? For now just warn.
61 LOG(WARNING) << "Unable to allocate buffer for string reporting! Silently dropping value."
62 << " >" << str->ToModifiedUtf8() << "<";
63 return 0;
Andreas Gampe3ec8e402017-02-21 15:49:53 -080064 }
Andreas Gampe5f942032017-02-27 19:59:40 -080065
66 if (str->IsCompressed()) {
67 uint8_t* compressed_data = str->GetValueCompressed();
68 for (int32_t i = 0; i != string_length; ++i) {
69 data[i] = compressed_data[i];
70 }
71 } else {
72 // Can copy directly.
73 memcpy(data.get(), str->GetValue(), string_length * sizeof(uint16_t));
74 }
Andreas Gampe3ec8e402017-02-21 15:49:53 -080075 }
76
77 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
78 jlong string_tag = tag_table->GetTagOrZero(obj.Ptr());
79 const jlong saved_string_tag = string_tag;
80
81 jint result = cb->string_primitive_value_callback(class_tag,
82 obj->SizeOf(),
83 &string_tag,
84 data.get(),
85 string_length,
86 const_cast<void*>(user_data));
87 if (string_tag != saved_string_tag) {
88 tag_table->Set(obj.Ptr(), string_tag);
89 }
90
91 return result;
92 }
93 return 0;
94}
95
Andreas Gampebecd6ad2017-02-22 19:20:37 -080096// Report the contents of a primitive array, if a callback is set.
97jint ReportPrimitiveArray(art::ObjPtr<art::mirror::Object> obj,
98 jvmtiEnv* env,
99 ObjectTagTable* tag_table,
100 const jvmtiHeapCallbacks* cb,
101 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
102 if (UNLIKELY(cb->array_primitive_value_callback != nullptr) &&
103 obj->IsArrayInstance() &&
104 !obj->IsObjectArray()) {
105 art::ObjPtr<art::mirror::Array> array = obj->AsArray();
106 int32_t array_length = array->GetLength();
107 size_t component_size = array->GetClass()->GetComponentSize();
108 art::Primitive::Type art_prim_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
109 jvmtiPrimitiveType prim_type =
110 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
111 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
112 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
113 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
114 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
115 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
116 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
117 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
118 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
119
120 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
121 jlong array_tag = tag_table->GetTagOrZero(obj.Ptr());
122 const jlong saved_array_tag = array_tag;
123
124 jint result;
125 if (array_length == 0) {
126 result = cb->array_primitive_value_callback(class_tag,
127 obj->SizeOf(),
128 &array_tag,
129 0,
130 prim_type,
131 nullptr,
132 const_cast<void*>(user_data));
133 } else {
134 jvmtiError alloc_error;
135 JvmtiUniquePtr<char[]> data = AllocJvmtiUniquePtr<char[]>(env,
136 array_length * component_size,
137 &alloc_error);
138 if (data == nullptr) {
139 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
140 // back? For now just warn.
141 LOG(WARNING) << "Unable to allocate buffer for array reporting! Silently dropping value.";
142 return 0;
143 }
144
145 memcpy(data.get(), array->GetRawData(component_size, 0), array_length * component_size);
146
147 result = cb->array_primitive_value_callback(class_tag,
148 obj->SizeOf(),
149 &array_tag,
150 array_length,
151 prim_type,
152 data.get(),
153 const_cast<void*>(user_data));
154 }
155
156 if (array_tag != saved_array_tag) {
157 tag_table->Set(obj.Ptr(), array_tag);
158 }
159
160 return result;
161 }
162 return 0;
163}
164
Andreas Gampee7316932017-02-25 09:15:05 -0800165template <typename UserData>
166bool VisitorFalse(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
167 art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
168 art::ArtField& field ATTRIBUTE_UNUSED,
169 size_t field_index ATTRIBUTE_UNUSED,
170 UserData* user_data ATTRIBUTE_UNUSED) {
171 return false;
172}
173
Andreas Gampe95114602017-02-28 15:47:44 -0800174template <typename UserData, bool kCallVisitorOnRecursion>
Andreas Gampee7316932017-02-25 09:15:05 -0800175class FieldVisitor {
176 public:
177 // Report the contents of a primitive fields of the given object, if a callback is set.
Andreas Gampe95114602017-02-28 15:47:44 -0800178 template <typename StaticPrimitiveVisitor,
179 typename StaticReferenceVisitor,
180 typename InstancePrimitiveVisitor,
181 typename InstanceReferenceVisitor>
Andreas Gampee7316932017-02-25 09:15:05 -0800182 static bool ReportFields(art::ObjPtr<art::mirror::Object> obj,
183 UserData* user_data,
184 StaticPrimitiveVisitor& static_prim_visitor,
185 StaticReferenceVisitor& static_ref_visitor,
186 InstancePrimitiveVisitor& instance_prim_visitor,
187 InstanceReferenceVisitor& instance_ref_visitor)
188 REQUIRES_SHARED(art::Locks::mutator_lock_) {
189 FieldVisitor fv(user_data);
190
191 if (obj->IsClass()) {
192 // When visiting a class, we only visit the static fields of the given class. No field of
193 // superclasses is visited.
194 art::ObjPtr<art::mirror::Class> klass = obj->AsClass();
195 // Only report fields on resolved classes. We need valid field data.
196 if (!klass->IsResolved()) {
197 return false;
198 }
199 return fv.ReportFieldsImpl(nullptr,
200 obj->AsClass(),
201 obj->AsClass()->IsInterface(),
202 static_prim_visitor,
203 static_ref_visitor,
204 instance_prim_visitor,
205 instance_ref_visitor);
206 } else {
207 // See comment above. Just double-checking here, but an instance *should* mean the class was
208 // resolved.
209 DCHECK(obj->GetClass()->IsResolved() || obj->GetClass()->IsErroneousResolved());
210 return fv.ReportFieldsImpl(obj,
211 obj->GetClass(),
212 false,
213 static_prim_visitor,
214 static_ref_visitor,
215 instance_prim_visitor,
216 instance_ref_visitor);
217 }
218 }
219
220 private:
221 explicit FieldVisitor(UserData* user_data) : user_data_(user_data) {}
222
223 // Report the contents of fields of the given object. If obj is null, report the static fields,
224 // otherwise the instance fields.
Andreas Gampe95114602017-02-28 15:47:44 -0800225 template <typename StaticPrimitiveVisitor,
226 typename StaticReferenceVisitor,
227 typename InstancePrimitiveVisitor,
228 typename InstanceReferenceVisitor>
Andreas Gampee7316932017-02-25 09:15:05 -0800229 bool ReportFieldsImpl(art::ObjPtr<art::mirror::Object> obj,
230 art::ObjPtr<art::mirror::Class> klass,
231 bool skip_java_lang_object,
232 StaticPrimitiveVisitor& static_prim_visitor,
233 StaticReferenceVisitor& static_ref_visitor,
234 InstancePrimitiveVisitor& instance_prim_visitor,
235 InstanceReferenceVisitor& instance_ref_visitor)
236 REQUIRES_SHARED(art::Locks::mutator_lock_) {
237 // Compute the offset of field indices.
238 size_t interface_field_count = CountInterfaceFields(klass);
239
240 size_t tmp;
241 bool aborted = ReportFieldsRecursive(obj,
242 klass,
243 interface_field_count,
244 skip_java_lang_object,
245 static_prim_visitor,
246 static_ref_visitor,
247 instance_prim_visitor,
248 instance_ref_visitor,
249 &tmp);
250 return aborted;
251 }
252
253 // Visit primitive fields in an object (instance). Return true if the visit was aborted.
Andreas Gampe95114602017-02-28 15:47:44 -0800254 template <typename StaticPrimitiveVisitor,
255 typename StaticReferenceVisitor,
256 typename InstancePrimitiveVisitor,
257 typename InstanceReferenceVisitor>
Andreas Gampee7316932017-02-25 09:15:05 -0800258 bool ReportFieldsRecursive(art::ObjPtr<art::mirror::Object> obj,
259 art::ObjPtr<art::mirror::Class> klass,
260 size_t interface_fields,
261 bool skip_java_lang_object,
262 StaticPrimitiveVisitor& static_prim_visitor,
263 StaticReferenceVisitor& static_ref_visitor,
264 InstancePrimitiveVisitor& instance_prim_visitor,
265 InstanceReferenceVisitor& instance_ref_visitor,
266 size_t* field_index_out)
267 REQUIRES_SHARED(art::Locks::mutator_lock_) {
268 DCHECK(klass != nullptr);
269 size_t field_index;
270 if (klass->GetSuperClass() == nullptr) {
271 // j.l.Object. Start with the fields from interfaces.
272 field_index = interface_fields;
273 if (skip_java_lang_object) {
274 *field_index_out = field_index;
275 return false;
276 }
277 } else {
278 // Report superclass fields.
279 if (kCallVisitorOnRecursion) {
280 if (ReportFieldsRecursive(obj,
281 klass->GetSuperClass(),
282 interface_fields,
283 skip_java_lang_object,
284 static_prim_visitor,
285 static_ref_visitor,
286 instance_prim_visitor,
287 instance_ref_visitor,
288 &field_index)) {
289 return true;
290 }
291 } else {
292 // Still call, but with empty visitor. This is required for correct counting.
293 ReportFieldsRecursive(obj,
294 klass->GetSuperClass(),
295 interface_fields,
296 skip_java_lang_object,
297 VisitorFalse<UserData>,
298 VisitorFalse<UserData>,
299 VisitorFalse<UserData>,
300 VisitorFalse<UserData>,
301 &field_index);
302 }
303 }
304
305 // Now visit fields for the current klass.
306
307 for (auto& static_field : klass->GetSFields()) {
308 if (static_field.IsPrimitiveType()) {
309 if (static_prim_visitor(obj,
310 klass,
311 static_field,
312 field_index,
313 user_data_)) {
314 return true;
315 }
316 } else {
317 if (static_ref_visitor(obj,
318 klass,
319 static_field,
320 field_index,
321 user_data_)) {
322 return true;
323 }
324 }
325 field_index++;
326 }
327
328 for (auto& instance_field : klass->GetIFields()) {
329 if (instance_field.IsPrimitiveType()) {
330 if (instance_prim_visitor(obj,
331 klass,
332 instance_field,
333 field_index,
334 user_data_)) {
335 return true;
336 }
337 } else {
338 if (instance_ref_visitor(obj,
339 klass,
340 instance_field,
341 field_index,
342 user_data_)) {
343 return true;
344 }
345 }
346 field_index++;
347 }
348
349 *field_index_out = field_index;
350 return false;
351 }
352
353 // Implements a visit of the implemented interfaces of a given class.
354 template <typename T>
355 struct RecursiveInterfaceVisit {
356 static void VisitStatic(art::Thread* self, art::ObjPtr<art::mirror::Class> klass, T& visitor)
357 REQUIRES_SHARED(art::Locks::mutator_lock_) {
358 RecursiveInterfaceVisit rv;
359 rv.Visit(self, klass, visitor);
360 }
361
362 void Visit(art::Thread* self, art::ObjPtr<art::mirror::Class> klass, T& visitor)
363 REQUIRES_SHARED(art::Locks::mutator_lock_) {
364 // First visit the parent, to get the order right.
365 // (We do this in preparation for actual visiting of interface fields.)
366 if (klass->GetSuperClass() != nullptr) {
367 Visit(self, klass->GetSuperClass(), visitor);
368 }
369 for (uint32_t i = 0; i != klass->NumDirectInterfaces(); ++i) {
370 art::ObjPtr<art::mirror::Class> inf_klass =
371 art::mirror::Class::GetDirectInterface(self, klass, i);
372 DCHECK(inf_klass != nullptr);
373 VisitInterface(self, inf_klass, visitor);
374 }
375 }
376
377 void VisitInterface(art::Thread* self, art::ObjPtr<art::mirror::Class> inf_klass, T& visitor)
378 REQUIRES_SHARED(art::Locks::mutator_lock_) {
379 auto it = visited_interfaces.find(inf_klass.Ptr());
380 if (it != visited_interfaces.end()) {
381 return;
382 }
383 visited_interfaces.insert(inf_klass.Ptr());
384
385 // Let the visitor know about this one. Note that this order is acceptable, as the ordering
386 // of these fields never matters for known visitors.
387 visitor(inf_klass);
388
389 // Now visit the superinterfaces.
390 for (uint32_t i = 0; i != inf_klass->NumDirectInterfaces(); ++i) {
391 art::ObjPtr<art::mirror::Class> super_inf_klass =
392 art::mirror::Class::GetDirectInterface(self, inf_klass, i);
393 DCHECK(super_inf_klass != nullptr);
394 VisitInterface(self, super_inf_klass, visitor);
395 }
396 }
397
398 std::unordered_set<art::mirror::Class*> visited_interfaces;
399 };
400
401 // Counting interface fields. Note that we cannot use the interface table, as that only contains
402 // "non-marker" interfaces (= interfaces with methods).
403 static size_t CountInterfaceFields(art::ObjPtr<art::mirror::Class> klass)
404 REQUIRES_SHARED(art::Locks::mutator_lock_) {
405 size_t count = 0;
406 auto visitor = [&count](art::ObjPtr<art::mirror::Class> inf_klass)
407 REQUIRES_SHARED(art::Locks::mutator_lock_) {
408 DCHECK(inf_klass->IsInterface());
409 DCHECK_EQ(0u, inf_klass->NumInstanceFields());
410 count += inf_klass->NumStaticFields();
411 };
412 RecursiveInterfaceVisit<decltype(visitor)>::VisitStatic(art::Thread::Current(), klass, visitor);
413 return count;
414
415 // TODO: Implement caching.
416 }
417
418 UserData* user_data_;
419};
420
421// Debug helper. Prints the structure of an object.
422template <bool kStatic, bool kRef>
423struct DumpVisitor {
424 static bool Callback(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
425 art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
426 art::ArtField& field,
427 size_t field_index,
428 void* user_data ATTRIBUTE_UNUSED)
429 REQUIRES_SHARED(art::Locks::mutator_lock_) {
430 LOG(ERROR) << (kStatic ? "static " : "instance ")
431 << (kRef ? "ref " : "primitive ")
432 << field.PrettyField()
433 << " @ "
434 << field_index;
435 return false;
436 }
437};
438ATTRIBUTE_UNUSED
439void DumpObjectFields(art::ObjPtr<art::mirror::Object> obj)
440 REQUIRES_SHARED(art::Locks::mutator_lock_) {
441 if (obj->IsClass()) {
Andreas Gampe95114602017-02-28 15:47:44 -0800442 FieldVisitor<void, false>:: ReportFields(obj,
443 nullptr,
444 DumpVisitor<true, false>::Callback,
445 DumpVisitor<true, true>::Callback,
446 DumpVisitor<false, false>::Callback,
447 DumpVisitor<false, true>::Callback);
Andreas Gampee7316932017-02-25 09:15:05 -0800448 } else {
Andreas Gampe95114602017-02-28 15:47:44 -0800449 FieldVisitor<void, true>::ReportFields(obj,
450 nullptr,
451 DumpVisitor<true, false>::Callback,
452 DumpVisitor<true, true>::Callback,
453 DumpVisitor<false, false>::Callback,
454 DumpVisitor<false, true>::Callback);
Andreas Gampee7316932017-02-25 09:15:05 -0800455 }
456}
457
458class ReportPrimitiveField {
459 public:
460 static bool Report(art::ObjPtr<art::mirror::Object> obj,
461 ObjectTagTable* tag_table,
462 const jvmtiHeapCallbacks* cb,
463 const void* user_data)
464 REQUIRES_SHARED(art::Locks::mutator_lock_) {
465 if (UNLIKELY(cb->primitive_field_callback != nullptr)) {
466 jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
467 ReportPrimitiveField rpf(tag_table, class_tag, cb, user_data);
468 if (obj->IsClass()) {
Andreas Gampe95114602017-02-28 15:47:44 -0800469 return FieldVisitor<ReportPrimitiveField, false>::ReportFields(
470 obj,
471 &rpf,
472 ReportPrimitiveFieldCallback<true>,
473 VisitorFalse<ReportPrimitiveField>,
474 VisitorFalse<ReportPrimitiveField>,
475 VisitorFalse<ReportPrimitiveField>);
Andreas Gampee7316932017-02-25 09:15:05 -0800476 } else {
Andreas Gampe95114602017-02-28 15:47:44 -0800477 return FieldVisitor<ReportPrimitiveField, true>::ReportFields(
478 obj,
479 &rpf,
480 VisitorFalse<ReportPrimitiveField>,
481 VisitorFalse<ReportPrimitiveField>,
482 ReportPrimitiveFieldCallback<false>,
483 VisitorFalse<ReportPrimitiveField>);
Andreas Gampee7316932017-02-25 09:15:05 -0800484 }
485 }
486 return false;
487 }
488
489
490 private:
491 ReportPrimitiveField(ObjectTagTable* tag_table,
492 jlong class_tag,
493 const jvmtiHeapCallbacks* cb,
494 const void* user_data)
495 : tag_table_(tag_table), class_tag_(class_tag), cb_(cb), user_data_(user_data) {}
496
497 template <bool kReportStatic>
498 static bool ReportPrimitiveFieldCallback(art::ObjPtr<art::mirror::Object> obj,
499 art::ObjPtr<art::mirror::Class> klass,
500 art::ArtField& field,
501 size_t field_index,
502 ReportPrimitiveField* user_data)
503 REQUIRES_SHARED(art::Locks::mutator_lock_) {
504 art::Primitive::Type art_prim_type = field.GetTypeAsPrimitiveType();
505 jvmtiPrimitiveType prim_type =
506 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
507 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
508 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
509 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
510 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
511 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
512 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
513 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
514 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
515 jvmtiHeapReferenceInfo info;
516 info.field.index = field_index;
517
518 jvalue value;
519 memset(&value, 0, sizeof(jvalue));
520 art::ObjPtr<art::mirror::Object> src = kReportStatic ? klass : obj;
521 switch (art_prim_type) {
522 case art::Primitive::Type::kPrimBoolean:
523 value.z = field.GetBoolean(src) == 0 ? JNI_FALSE : JNI_TRUE;
524 break;
525 case art::Primitive::Type::kPrimByte:
526 value.b = field.GetByte(src);
527 break;
528 case art::Primitive::Type::kPrimChar:
529 value.c = field.GetChar(src);
530 break;
531 case art::Primitive::Type::kPrimShort:
532 value.s = field.GetShort(src);
533 break;
534 case art::Primitive::Type::kPrimInt:
535 value.i = field.GetInt(src);
536 break;
537 case art::Primitive::Type::kPrimLong:
538 value.j = field.GetLong(src);
539 break;
540 case art::Primitive::Type::kPrimFloat:
541 value.f = field.GetFloat(src);
542 break;
543 case art::Primitive::Type::kPrimDouble:
544 value.d = field.GetDouble(src);
545 break;
546 case art::Primitive::Type::kPrimVoid:
547 case art::Primitive::Type::kPrimNot: {
548 LOG(FATAL) << "Should not reach here";
549 UNREACHABLE();
550 }
551 }
552
553 jlong obj_tag = user_data->tag_table_->GetTagOrZero(src.Ptr());
554 const jlong saved_obj_tag = obj_tag;
555
556 jint ret = user_data->cb_->primitive_field_callback(kReportStatic
557 ? JVMTI_HEAP_REFERENCE_STATIC_FIELD
558 : JVMTI_HEAP_REFERENCE_FIELD,
559 &info,
560 user_data->class_tag_,
561 &obj_tag,
562 value,
563 prim_type,
564 const_cast<void*>(user_data->user_data_));
565
566 if (saved_obj_tag != obj_tag) {
567 user_data->tag_table_->Set(src.Ptr(), obj_tag);
568 }
569
570 if ((ret & JVMTI_VISIT_ABORT) != 0) {
571 return true;
572 }
573
574 return false;
575 }
576
577 ObjectTagTable* tag_table_;
578 jlong class_tag_;
579 const jvmtiHeapCallbacks* cb_;
580 const void* user_data_;
581};
582
Andreas Gampe6ea06072017-02-24 18:01:19 +0000583struct HeapFilter {
584 explicit HeapFilter(jint heap_filter)
585 : filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
Andreas Gampee54d9922016-10-11 19:55:37 -0700586 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
587 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
588 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
589 any_filter(filter_out_tagged ||
590 filter_out_untagged ||
591 filter_out_class_tagged ||
Andreas Gampe6ea06072017-02-24 18:01:19 +0000592 filter_out_class_untagged) {
Andreas Gampee54d9922016-10-11 19:55:37 -0700593 }
594
Andreas Gampe6ea06072017-02-24 18:01:19 +0000595 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) const {
Andreas Gampee54d9922016-10-11 19:55:37 -0700596 if (!any_filter) {
597 return true;
598 }
599
600 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
601 return false;
602 }
603
604 if ((class_tag == 0 && filter_out_class_untagged) ||
605 (class_tag != 0 && filter_out_class_tagged)) {
606 return false;
607 }
608
609 return true;
610 }
611
Andreas Gampee54d9922016-10-11 19:55:37 -0700612 const bool filter_out_tagged;
613 const bool filter_out_untagged;
614 const bool filter_out_class_tagged;
615 const bool filter_out_class_untagged;
616 const bool any_filter;
Andreas Gampe6ea06072017-02-24 18:01:19 +0000617};
618
619} // namespace
620
621struct IterateThroughHeapData {
622 IterateThroughHeapData(HeapUtil* _heap_util,
623 jvmtiEnv* _env,
624 art::ObjPtr<art::mirror::Class> klass,
625 jint _heap_filter,
626 const jvmtiHeapCallbacks* _callbacks,
627 const void* _user_data)
628 : heap_util(_heap_util),
629 heap_filter(_heap_filter),
630 filter_klass(klass),
631 env(_env),
632 callbacks(_callbacks),
633 user_data(_user_data),
634 stop_reports(false) {
635 }
636
637 HeapUtil* heap_util;
638 const HeapFilter heap_filter;
639 art::ObjPtr<art::mirror::Class> filter_klass;
640 jvmtiEnv* env;
641 const jvmtiHeapCallbacks* callbacks;
642 const void* user_data;
Andreas Gampee54d9922016-10-11 19:55:37 -0700643
644 bool stop_reports;
645};
646
647static void IterateThroughHeapObjectCallback(art::mirror::Object* obj, void* arg)
648 REQUIRES_SHARED(art::Locks::mutator_lock_) {
649 IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
650 // Early return, as we can't really stop visiting.
651 if (ithd->stop_reports) {
652 return;
653 }
654
655 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
656
657 jlong tag = 0;
658 ithd->heap_util->GetTags()->GetTag(obj, &tag);
659
660 jlong class_tag = 0;
661 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
662 ithd->heap_util->GetTags()->GetTag(klass.Ptr(), &class_tag);
663 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
664
Andreas Gampe6ea06072017-02-24 18:01:19 +0000665 if (!ithd->heap_filter.ShouldReportByHeapFilter(tag, class_tag)) {
Andreas Gampee54d9922016-10-11 19:55:37 -0700666 return;
667 }
668
Andreas Gampee54d9922016-10-11 19:55:37 -0700669 if (ithd->filter_klass != nullptr) {
670 if (ithd->filter_klass != klass) {
671 return;
672 }
673 }
674
675 jlong size = obj->SizeOf();
676
677 jint length = -1;
678 if (obj->IsArrayInstance()) {
679 length = obj->AsArray()->GetLength();
680 }
681
682 jlong saved_tag = tag;
683 jint ret = ithd->callbacks->heap_iteration_callback(class_tag,
684 size,
685 &tag,
686 length,
687 const_cast<void*>(ithd->user_data));
688
689 if (tag != saved_tag) {
690 ithd->heap_util->GetTags()->Set(obj, tag);
691 }
692
693 ithd->stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
694
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800695 if (!ithd->stop_reports) {
696 jint string_ret = ReportString(obj,
697 ithd->env,
698 ithd->heap_util->GetTags(),
699 ithd->callbacks,
700 ithd->user_data);
701 ithd->stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
702 }
703
Andreas Gampebecd6ad2017-02-22 19:20:37 -0800704 if (!ithd->stop_reports) {
705 jint array_ret = ReportPrimitiveArray(obj,
706 ithd->env,
707 ithd->heap_util->GetTags(),
708 ithd->callbacks,
709 ithd->user_data);
710 ithd->stop_reports = (array_ret & JVMTI_VISIT_ABORT) != 0;
711 }
712
Andreas Gampee7316932017-02-25 09:15:05 -0800713 if (!ithd->stop_reports) {
714 ithd->stop_reports = ReportPrimitiveField::Report(obj,
715 ithd->heap_util->GetTags(),
716 ithd->callbacks,
717 ithd->user_data);
718 }
Andreas Gampee54d9922016-10-11 19:55:37 -0700719}
720
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800721jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
Andreas Gampee54d9922016-10-11 19:55:37 -0700722 jint heap_filter,
723 jclass klass,
724 const jvmtiHeapCallbacks* callbacks,
725 const void* user_data) {
726 if (callbacks == nullptr) {
727 return ERR(NULL_POINTER);
728 }
729
Andreas Gampee54d9922016-10-11 19:55:37 -0700730 art::Thread* self = art::Thread::Current();
731 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
732
733 IterateThroughHeapData ithd(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800734 env,
Nicolas Geoffray2cb576c2017-02-24 09:40:37 +0000735 soa.Decode<art::mirror::Class>(klass),
Andreas Gampe6ea06072017-02-24 18:01:19 +0000736 heap_filter,
Andreas Gampee54d9922016-10-11 19:55:37 -0700737 callbacks,
738 user_data);
739
740 art::Runtime::Current()->GetHeap()->VisitObjects(IterateThroughHeapObjectCallback, &ithd);
741
742 return ERR(NONE);
743}
744
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700745class FollowReferencesHelper FINAL {
746 public:
747 FollowReferencesHelper(HeapUtil* h,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800748 jvmtiEnv* jvmti_env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800749 art::ObjPtr<art::mirror::Object> initial_object,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700750 const jvmtiHeapCallbacks* callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800751 art::ObjPtr<art::mirror::Class> class_filter,
Andreas Gampe6ea06072017-02-24 18:01:19 +0000752 jint heap_filter,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700753 const void* user_data)
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800754 : env(jvmti_env),
755 tag_table_(h->GetTags()),
Andreas Gampe638a6932016-12-02 19:11:17 -0800756 initial_object_(initial_object),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700757 callbacks_(callbacks),
Andreas Gampe38da9f22017-02-20 13:35:36 -0800758 class_filter_(class_filter),
Andreas Gampe6ea06072017-02-24 18:01:19 +0000759 heap_filter_(heap_filter),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700760 user_data_(user_data),
761 start_(0),
762 stop_reports_(false) {
763 }
764
765 void Init()
766 REQUIRES_SHARED(art::Locks::mutator_lock_)
767 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
Andreas Gampe638a6932016-12-02 19:11:17 -0800768 if (initial_object_.IsNull()) {
769 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800770
771 // We need precise info (e.g., vregs).
772 constexpr art::VisitRootFlags kRootFlags = static_cast<art::VisitRootFlags>(
773 art::VisitRootFlags::kVisitRootFlagAllRoots | art::VisitRootFlags::kVisitRootFlagPrecise);
774 art::Runtime::Current()->VisitRoots(&carrv, kRootFlags);
775
Andreas Gampe638a6932016-12-02 19:11:17 -0800776 art::Runtime::Current()->VisitImageRoots(&carrv);
777 stop_reports_ = carrv.IsStopReports();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700778
Andreas Gampe638a6932016-12-02 19:11:17 -0800779 if (stop_reports_) {
780 worklist_.clear();
781 }
782 } else {
783 visited_.insert(initial_object_.Ptr());
784 worklist_.push_back(initial_object_.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700785 }
786 }
787
788 void Work()
789 REQUIRES_SHARED(art::Locks::mutator_lock_)
790 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
791 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
792 // from the head of the work list, instead postponing until there's a gap that's "large."
793 //
794 // Alternatively, we can implement a DFS and use the work list as a stack.
795 while (start_ < worklist_.size()) {
796 art::mirror::Object* cur_obj = worklist_[start_];
797 start_++;
798
799 if (start_ >= kMaxStart) {
800 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
801 start_ = 0;
802 }
803
804 VisitObject(cur_obj);
805
806 if (stop_reports_) {
807 break;
808 }
809 }
810 }
811
812 private:
813 class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
814 public:
815 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
816 ObjectTagTable* tag_table,
817 std::vector<art::mirror::Object*>* worklist,
818 std::unordered_set<art::mirror::Object*>* visited)
819 : helper_(helper),
820 tag_table_(tag_table),
821 worklist_(worklist),
822 visited_(visited),
823 stop_reports_(false) {}
824
825 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
826 OVERRIDE
827 REQUIRES_SHARED(art::Locks::mutator_lock_)
828 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
829 for (size_t i = 0; i != count; ++i) {
830 AddRoot(*roots[i], info);
831 }
832 }
833
834 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
835 size_t count,
836 const art::RootInfo& info)
837 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
838 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
839 for (size_t i = 0; i != count; ++i) {
840 AddRoot(roots[i]->AsMirrorPtr(), info);
841 }
842 }
843
844 bool IsStopReports() {
845 return stop_reports_;
846 }
847
848 private:
849 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
850 REQUIRES_SHARED(art::Locks::mutator_lock_)
851 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
852 // We use visited_ to mark roots already so we do not need another set.
853 if (visited_->find(root_obj) == visited_->end()) {
854 visited_->insert(root_obj);
855 worklist_->push_back(root_obj);
856 }
857 ReportRoot(root_obj, info);
858 }
859
Andreas Gampe93c30902016-11-18 13:30:30 -0800860 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
861 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
862 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
863 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
864 }
865
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700866 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
867 jvmtiHeapReferenceInfo* ref_info)
868 REQUIRES_SHARED(art::Locks::mutator_lock_) {
869 // TODO: Fill in ref_info.
870 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
871
872 switch (info.GetType()) {
873 case art::RootType::kRootJNIGlobal:
874 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
875
876 case art::RootType::kRootJNILocal:
Andreas Gampe93c30902016-11-18 13:30:30 -0800877 {
878 uint32_t thread_id = info.GetThreadId();
879 ref_info->jni_local.thread_id = thread_id;
880
881 art::Thread* thread = FindThread(info);
882 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800883 art::mirror::Object* thread_obj;
Andreas Gampe93c30902016-11-18 13:30:30 -0800884 if (thread->IsStillStarting()) {
885 thread_obj = nullptr;
886 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800887 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampe93c30902016-11-18 13:30:30 -0800888 }
889 if (thread_obj != nullptr) {
890 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
891 }
892 }
893
894 // TODO: We don't have this info.
895 if (thread != nullptr) {
896 ref_info->jni_local.depth = 0;
897 art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
898 if (method != nullptr) {
899 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
900 }
901 }
902
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700903 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
Andreas Gampe93c30902016-11-18 13:30:30 -0800904 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700905
906 case art::RootType::kRootJavaFrame:
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800907 {
908 uint32_t thread_id = info.GetThreadId();
909 ref_info->stack_local.thread_id = thread_id;
910
911 art::Thread* thread = FindThread(info);
912 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800913 art::mirror::Object* thread_obj;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800914 if (thread->IsStillStarting()) {
915 thread_obj = nullptr;
916 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800917 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800918 }
919 if (thread_obj != nullptr) {
920 ref_info->stack_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
921 }
922 }
923
924 auto& java_info = static_cast<const art::JavaFrameRootInfo&>(info);
925 ref_info->stack_local.slot = static_cast<jint>(java_info.GetVReg());
926 const art::StackVisitor* visitor = java_info.GetVisitor();
927 ref_info->stack_local.location =
928 static_cast<jlocation>(visitor->GetDexPc(false /* abort_on_failure */));
929 ref_info->stack_local.depth = static_cast<jint>(visitor->GetFrameDepth());
930 art::ArtMethod* method = visitor->GetMethod();
931 if (method != nullptr) {
932 ref_info->stack_local.method = art::jni::EncodeArtMethod(method);
933 }
934
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700935 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800936 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700937
938 case art::RootType::kRootNativeStack:
939 case art::RootType::kRootThreadBlock:
940 case art::RootType::kRootThreadObject:
941 return JVMTI_HEAP_REFERENCE_THREAD;
942
943 case art::RootType::kRootStickyClass:
944 case art::RootType::kRootInternedString:
945 // Note: this isn't a root in the RI.
946 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
947
948 case art::RootType::kRootMonitorUsed:
949 case art::RootType::kRootJNIMonitor:
950 return JVMTI_HEAP_REFERENCE_MONITOR;
951
952 case art::RootType::kRootFinalizing:
953 case art::RootType::kRootDebugger:
954 case art::RootType::kRootReferenceCleanup:
955 case art::RootType::kRootVMInternal:
956 case art::RootType::kRootUnknown:
957 return JVMTI_HEAP_REFERENCE_OTHER;
958 }
959 LOG(FATAL) << "Unreachable";
960 UNREACHABLE();
961 }
962
963 void ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
964 REQUIRES_SHARED(art::Locks::mutator_lock_)
965 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
966 jvmtiHeapReferenceInfo ref_info;
967 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
968 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
969 if ((result & JVMTI_VISIT_ABORT) != 0) {
970 stop_reports_ = true;
971 }
972 }
973
974 private:
975 FollowReferencesHelper* helper_;
976 ObjectTagTable* tag_table_;
977 std::vector<art::mirror::Object*>* worklist_;
978 std::unordered_set<art::mirror::Object*>* visited_;
979 bool stop_reports_;
980 };
981
982 void VisitObject(art::mirror::Object* obj)
983 REQUIRES_SHARED(art::Locks::mutator_lock_)
984 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
985 if (obj->IsClass()) {
986 VisitClass(obj->AsClass());
987 return;
988 }
989 if (obj->IsArrayInstance()) {
990 VisitArray(obj);
991 return;
992 }
993
Andreas Gampe95114602017-02-28 15:47:44 -0800994 // All instance fields.
995 auto report_instance_field = [&](art::ObjPtr<art::mirror::Object> src,
996 art::ObjPtr<art::mirror::Class> obj_klass ATTRIBUTE_UNUSED,
997 art::ArtField& field,
998 size_t field_index,
999 void* user_data ATTRIBUTE_UNUSED)
1000 REQUIRES_SHARED(art::Locks::mutator_lock_)
1001 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1002 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(src);
1003 if (field_value != nullptr) {
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001004 jvmtiHeapReferenceInfo reference_info;
1005 memset(&reference_info, 0, sizeof(reference_info));
1006
1007 // TODO: Implement spec-compliant numbering.
Andreas Gampe95114602017-02-28 15:47:44 -08001008 reference_info.field.index = field_index;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001009
1010 jvmtiHeapReferenceKind kind =
Andreas Gampe95114602017-02-28 15:47:44 -08001011 field.GetOffset().Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001012 ? JVMTI_HEAP_REFERENCE_CLASS
1013 : JVMTI_HEAP_REFERENCE_FIELD;
1014 const jvmtiHeapReferenceInfo* reference_info_ptr =
1015 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
1016
Andreas Gampe95114602017-02-28 15:47:44 -08001017 return !ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src.Ptr(), field_value.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001018 }
Andreas Gampe95114602017-02-28 15:47:44 -08001019 return false;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001020 };
Andreas Gampe95114602017-02-28 15:47:44 -08001021 stop_reports_ = FieldVisitor<void, true>::ReportFields(obj,
1022 nullptr,
1023 VisitorFalse<void>,
1024 VisitorFalse<void>,
1025 VisitorFalse<void>,
1026 report_instance_field);
1027 if (stop_reports_) {
1028 return;
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001029 }
Andreas Gampee7316932017-02-25 09:15:05 -08001030
Andreas Gampe95114602017-02-28 15:47:44 -08001031 jint string_ret = ReportString(obj, env, tag_table_, callbacks_, user_data_);
1032 stop_reports_ = (string_ret & JVMTI_VISIT_ABORT) != 0;
1033 if (stop_reports_) {
1034 return;
Andreas Gampee7316932017-02-25 09:15:05 -08001035 }
Andreas Gampe95114602017-02-28 15:47:44 -08001036
1037 stop_reports_ = ReportPrimitiveField::Report(obj, tag_table_, callbacks_, user_data_);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001038 }
1039
1040 void VisitArray(art::mirror::Object* array)
1041 REQUIRES_SHARED(art::Locks::mutator_lock_)
1042 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1043 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
1044 nullptr,
1045 array,
1046 array->GetClass());
1047 if (stop_reports_) {
1048 return;
1049 }
1050
1051 if (array->IsObjectArray()) {
1052 art::mirror::ObjectArray<art::mirror::Object>* obj_array =
1053 array->AsObjectArray<art::mirror::Object>();
1054 int32_t length = obj_array->GetLength();
1055 for (int32_t i = 0; i != length; ++i) {
1056 art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
1057 if (elem != nullptr) {
1058 jvmtiHeapReferenceInfo reference_info;
1059 reference_info.array.index = i;
1060 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
1061 &reference_info,
1062 array,
1063 elem);
1064 if (stop_reports_) {
1065 break;
1066 }
1067 }
1068 }
Andreas Gampebecd6ad2017-02-22 19:20:37 -08001069 } else {
1070 if (!stop_reports_) {
1071 jint array_ret = ReportPrimitiveArray(array, env, tag_table_, callbacks_, user_data_);
1072 stop_reports_ = (array_ret & JVMTI_VISIT_ABORT) != 0;
1073 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001074 }
1075 }
1076
1077 void VisitClass(art::mirror::Class* klass)
1078 REQUIRES_SHARED(art::Locks::mutator_lock_)
1079 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1080 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
1081 if (!klass->IsResolved()) {
1082 return;
1083 }
1084
1085 // Superclass.
1086 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
1087 nullptr,
1088 klass,
1089 klass->GetSuperClass());
1090 if (stop_reports_) {
1091 return;
1092 }
1093
1094 // Directly implemented or extended interfaces.
1095 art::Thread* self = art::Thread::Current();
1096 art::StackHandleScope<1> hs(self);
1097 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
1098 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
1099 art::ObjPtr<art::mirror::Class> inf_klass =
Vladimir Marko19a4d372016-12-08 14:41:46 +00001100 art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001101 if (inf_klass == nullptr) {
1102 // TODO: With a resolved class this should not happen...
1103 self->ClearException();
1104 break;
1105 }
1106
1107 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
1108 nullptr,
1109 klass,
1110 inf_klass.Ptr());
1111 if (stop_reports_) {
1112 return;
1113 }
1114 }
1115
1116 // Classloader.
1117 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
1118 // fake BootClassLoader?
1119 if (klass->GetClassLoader() != nullptr) {
1120 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
1121 nullptr,
1122 klass,
1123 klass->GetClassLoader());
1124 if (stop_reports_) {
1125 return;
1126 }
1127 }
1128 DCHECK_EQ(h_klass.Get(), klass);
1129
1130 // Declared static fields.
Andreas Gampe95114602017-02-28 15:47:44 -08001131 auto report_static_field = [&](art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
1132 art::ObjPtr<art::mirror::Class> obj_klass,
1133 art::ArtField& field,
1134 size_t field_index,
1135 void* user_data ATTRIBUTE_UNUSED)
1136 REQUIRES_SHARED(art::Locks::mutator_lock_)
1137 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1138 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(obj_klass);
1139 if (field_value != nullptr) {
1140 jvmtiHeapReferenceInfo reference_info;
1141 memset(&reference_info, 0, sizeof(reference_info));
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001142
Andreas Gampe95114602017-02-28 15:47:44 -08001143 reference_info.field.index = static_cast<jint>(field_index);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001144
Andreas Gampe95114602017-02-28 15:47:44 -08001145 return !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1146 &reference_info,
1147 obj_klass.Ptr(),
1148 field_value.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001149 }
Andreas Gampe95114602017-02-28 15:47:44 -08001150 return false;
1151 };
1152 stop_reports_ = FieldVisitor<void, false>::ReportFields(klass,
1153 nullptr,
1154 VisitorFalse<void>,
1155 report_static_field,
1156 VisitorFalse<void>,
1157 VisitorFalse<void>);
1158 if (stop_reports_) {
1159 return;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001160 }
Andreas Gampee7316932017-02-25 09:15:05 -08001161
Andreas Gampe95114602017-02-28 15:47:44 -08001162 stop_reports_ = ReportPrimitiveField::Report(klass, tag_table_, callbacks_, user_data_);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001163 }
1164
1165 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
1166 if (visited_.find(obj) == visited_.end()) {
1167 worklist_.push_back(obj);
1168 visited_.insert(obj);
1169 }
1170 }
1171
1172 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
1173 const jvmtiHeapReferenceInfo* reference_info,
1174 art::mirror::Object* referree,
1175 art::mirror::Object* referrer)
1176 REQUIRES_SHARED(art::Locks::mutator_lock_)
1177 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1178 jint result = ReportReference(kind, reference_info, referree, referrer);
1179 if ((result & JVMTI_VISIT_ABORT) == 0) {
1180 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
1181 MaybeEnqueue(referrer);
1182 }
1183 return true;
1184 } else {
1185 return false;
1186 }
1187 }
1188
1189 jint ReportReference(jvmtiHeapReferenceKind kind,
1190 const jvmtiHeapReferenceInfo* reference_info,
1191 art::mirror::Object* referrer,
1192 art::mirror::Object* referree)
1193 REQUIRES_SHARED(art::Locks::mutator_lock_)
1194 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1195 if (referree == nullptr || stop_reports_) {
1196 return 0;
1197 }
1198
Andreas Gampe38da9f22017-02-20 13:35:36 -08001199 if (UNLIKELY(class_filter_ != nullptr) && class_filter_ != referree->GetClass()) {
1200 return JVMTI_VISIT_OBJECTS;
1201 }
1202
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001203 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
Andreas Gampe6ea06072017-02-24 18:01:19 +00001204 jlong tag = tag_table_->GetTagOrZero(referree);
1205
1206 if (!heap_filter_.ShouldReportByHeapFilter(tag, class_tag)) {
1207 return JVMTI_VISIT_OBJECTS;
1208 }
1209
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001210 const jlong referrer_class_tag =
1211 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
1212 const jlong size = static_cast<jlong>(referree->SizeOf());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001213 jlong saved_tag = tag;
1214 jlong referrer_tag = 0;
1215 jlong saved_referrer_tag = 0;
1216 jlong* referrer_tag_ptr;
1217 if (referrer == nullptr) {
1218 referrer_tag_ptr = nullptr;
1219 } else {
1220 if (referrer == referree) {
1221 referrer_tag_ptr = &tag;
1222 } else {
1223 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
1224 referrer_tag_ptr = &referrer_tag;
1225 }
1226 }
Andreas Gampe38da9f22017-02-20 13:35:36 -08001227
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001228 jint length = -1;
1229 if (referree->IsArrayInstance()) {
1230 length = referree->AsArray()->GetLength();
1231 }
1232
1233 jint result = callbacks_->heap_reference_callback(kind,
1234 reference_info,
1235 class_tag,
1236 referrer_class_tag,
1237 size,
1238 &tag,
1239 referrer_tag_ptr,
1240 length,
1241 const_cast<void*>(user_data_));
1242
1243 if (tag != saved_tag) {
1244 tag_table_->Set(referree, tag);
1245 }
1246 if (referrer_tag != saved_referrer_tag) {
1247 tag_table_->Set(referrer, referrer_tag);
1248 }
1249
1250 return result;
1251 }
1252
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001253 jvmtiEnv* env;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001254 ObjectTagTable* tag_table_;
Andreas Gampe638a6932016-12-02 19:11:17 -08001255 art::ObjPtr<art::mirror::Object> initial_object_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001256 const jvmtiHeapCallbacks* callbacks_;
Andreas Gampe38da9f22017-02-20 13:35:36 -08001257 art::ObjPtr<art::mirror::Class> class_filter_;
Andreas Gampe6ea06072017-02-24 18:01:19 +00001258 const HeapFilter heap_filter_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001259 const void* user_data_;
1260
1261 std::vector<art::mirror::Object*> worklist_;
1262 size_t start_;
1263 static constexpr size_t kMaxStart = 1000000U;
1264
1265 std::unordered_set<art::mirror::Object*> visited_;
1266
1267 bool stop_reports_;
1268
1269 friend class CollectAndReportRootsVisitor;
1270};
1271
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001272jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env,
Andreas Gampe6ea06072017-02-24 18:01:19 +00001273 jint heap_filter,
Andreas Gampe38da9f22017-02-20 13:35:36 -08001274 jclass klass,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001275 jobject initial_object,
1276 const jvmtiHeapCallbacks* callbacks,
1277 const void* user_data) {
1278 if (callbacks == nullptr) {
1279 return ERR(NULL_POINTER);
1280 }
1281
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001282 art::Thread* self = art::Thread::Current();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001283
Andreas Gampe638a6932016-12-02 19:11:17 -08001284 art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
1285 if (heap->IsGcConcurrentAndMoving()) {
1286 // Need to take a heap dump while GC isn't running. See the
1287 // comment in Heap::VisitObjects().
1288 heap->IncrementDisableMovingGC(self);
1289 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001290 {
Andreas Gampe638a6932016-12-02 19:11:17 -08001291 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001292 art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
1293 art::ScopedSuspendAll ssa("FollowReferences");
1294
Andreas Gampe38da9f22017-02-20 13:35:36 -08001295 art::ObjPtr<art::mirror::Class> class_filter = klass == nullptr
1296 ? nullptr
1297 : art::ObjPtr<art::mirror::Class>::DownCast(self->DecodeJObject(klass));
Andreas Gampe638a6932016-12-02 19:11:17 -08001298 FollowReferencesHelper frh(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001299 env,
Andreas Gampe638a6932016-12-02 19:11:17 -08001300 self->DecodeJObject(initial_object),
1301 callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -08001302 class_filter,
Andreas Gampe6ea06072017-02-24 18:01:19 +00001303 heap_filter,
Andreas Gampe638a6932016-12-02 19:11:17 -08001304 user_data);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001305 frh.Init();
1306 frh.Work();
1307 }
Andreas Gampe638a6932016-12-02 19:11:17 -08001308 if (heap->IsGcConcurrentAndMoving()) {
1309 heap->DecrementDisableMovingGC(self);
1310 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001311
1312 return ERR(NONE);
1313}
1314
Andreas Gampeaa8b60c2016-10-12 12:51:25 -07001315jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
1316 jint* class_count_ptr,
1317 jclass** classes_ptr) {
1318 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
1319 return ERR(NULL_POINTER);
1320 }
1321
1322 class ReportClassVisitor : public art::ClassVisitor {
1323 public:
1324 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
1325
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001326 bool operator()(art::ObjPtr<art::mirror::Class> klass)
1327 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampeef54d8d2016-10-25 09:55:53 -07001328 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
Andreas Gampeaa8b60c2016-10-12 12:51:25 -07001329 return true;
1330 }
1331
1332 art::Thread* self_;
1333 std::vector<jclass> classes_;
1334 };
1335
1336 art::Thread* self = art::Thread::Current();
1337 ReportClassVisitor rcv(self);
1338 {
1339 art::ScopedObjectAccess soa(self);
1340 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
1341 }
1342
1343 size_t size = rcv.classes_.size();
1344 jclass* classes = nullptr;
1345 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
1346 reinterpret_cast<unsigned char**>(&classes));
1347 if (alloc_ret != ERR(NONE)) {
1348 return alloc_ret;
1349 }
1350
1351 for (size_t i = 0; i < size; ++i) {
1352 classes[i] = rcv.classes_[i];
1353 }
1354 *classes_ptr = classes;
1355 *class_count_ptr = static_cast<jint>(size);
1356
1357 return ERR(NONE);
1358}
1359
Andreas Gampe8da6d032016-10-31 19:31:03 -07001360jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
1361 art::Runtime::Current()->GetHeap()->CollectGarbage(false);
1362
1363 return ERR(NONE);
1364}
Andreas Gampee54d9922016-10-11 19:55:37 -07001365} // namespace openjdkjvmti