blob: 5f594891a1d7109f07df36ceedcd581cb88b1cee [file] [log] [blame]
Andreas Gampee54d9922016-10-11 19:55:37 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampeba8df692016-11-01 10:30:44 -070017#include "ti_heap.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070018
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070019#include "art_field-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070020#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070023#include "class_linker.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070024#include "gc/heap.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070025#include "gc_root-inl.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070026#include "jni_env_ext.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070027#include "jni_internal.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070028#include "mirror/class.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070029#include "mirror/object-inl.h"
30#include "mirror/object_array-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070031#include "object_callbacks.h"
32#include "object_tagging.h"
33#include "obj_ptr-inl.h"
Andreas Gampebecd6ad2017-02-22 19:20:37 -080034#include "primitive.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070035#include "runtime.h"
36#include "scoped_thread_state_change-inl.h"
37#include "thread-inl.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070038#include "thread_list.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070039
40namespace openjdkjvmti {
41
Andreas Gampe3ec8e402017-02-21 15:49:53 -080042namespace {
43
44// Report the contents of a string, if a callback is set.
45jint ReportString(art::ObjPtr<art::mirror::Object> obj,
46 jvmtiEnv* env,
47 ObjectTagTable* tag_table,
48 const jvmtiHeapCallbacks* cb,
49 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
50 if (UNLIKELY(cb->string_primitive_value_callback != nullptr) && obj->IsString()) {
51 art::ObjPtr<art::mirror::String> str = obj->AsString();
52 int32_t string_length = str->GetLength();
Andreas Gampe5f942032017-02-27 19:59:40 -080053 JvmtiUniquePtr<uint16_t[]> data;
Andreas Gampe3ec8e402017-02-21 15:49:53 -080054
Andreas Gampe5f942032017-02-27 19:59:40 -080055 if (string_length > 0) {
56 jvmtiError alloc_error;
57 data = AllocJvmtiUniquePtr<uint16_t[]>(env, string_length, &alloc_error);
58 if (data == nullptr) {
59 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
60 // back? For now just warn.
61 LOG(WARNING) << "Unable to allocate buffer for string reporting! Silently dropping value."
62 << " >" << str->ToModifiedUtf8() << "<";
63 return 0;
Andreas Gampe3ec8e402017-02-21 15:49:53 -080064 }
Andreas Gampe5f942032017-02-27 19:59:40 -080065
66 if (str->IsCompressed()) {
67 uint8_t* compressed_data = str->GetValueCompressed();
68 for (int32_t i = 0; i != string_length; ++i) {
69 data[i] = compressed_data[i];
70 }
71 } else {
72 // Can copy directly.
73 memcpy(data.get(), str->GetValue(), string_length * sizeof(uint16_t));
74 }
Andreas Gampe3ec8e402017-02-21 15:49:53 -080075 }
76
77 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
78 jlong string_tag = tag_table->GetTagOrZero(obj.Ptr());
79 const jlong saved_string_tag = string_tag;
80
81 jint result = cb->string_primitive_value_callback(class_tag,
82 obj->SizeOf(),
83 &string_tag,
84 data.get(),
85 string_length,
86 const_cast<void*>(user_data));
87 if (string_tag != saved_string_tag) {
88 tag_table->Set(obj.Ptr(), string_tag);
89 }
90
91 return result;
92 }
93 return 0;
94}
95
Andreas Gampebecd6ad2017-02-22 19:20:37 -080096// Report the contents of a primitive array, if a callback is set.
97jint ReportPrimitiveArray(art::ObjPtr<art::mirror::Object> obj,
98 jvmtiEnv* env,
99 ObjectTagTable* tag_table,
100 const jvmtiHeapCallbacks* cb,
101 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
102 if (UNLIKELY(cb->array_primitive_value_callback != nullptr) &&
103 obj->IsArrayInstance() &&
104 !obj->IsObjectArray()) {
105 art::ObjPtr<art::mirror::Array> array = obj->AsArray();
106 int32_t array_length = array->GetLength();
107 size_t component_size = array->GetClass()->GetComponentSize();
108 art::Primitive::Type art_prim_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
109 jvmtiPrimitiveType prim_type =
110 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
111 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
112 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
113 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
114 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
115 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
116 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
117 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
118 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
119
120 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
121 jlong array_tag = tag_table->GetTagOrZero(obj.Ptr());
122 const jlong saved_array_tag = array_tag;
123
124 jint result;
125 if (array_length == 0) {
126 result = cb->array_primitive_value_callback(class_tag,
127 obj->SizeOf(),
128 &array_tag,
129 0,
130 prim_type,
131 nullptr,
132 const_cast<void*>(user_data));
133 } else {
134 jvmtiError alloc_error;
135 JvmtiUniquePtr<char[]> data = AllocJvmtiUniquePtr<char[]>(env,
136 array_length * component_size,
137 &alloc_error);
138 if (data == nullptr) {
139 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
140 // back? For now just warn.
141 LOG(WARNING) << "Unable to allocate buffer for array reporting! Silently dropping value.";
142 return 0;
143 }
144
145 memcpy(data.get(), array->GetRawData(component_size, 0), array_length * component_size);
146
147 result = cb->array_primitive_value_callback(class_tag,
148 obj->SizeOf(),
149 &array_tag,
150 array_length,
151 prim_type,
152 data.get(),
153 const_cast<void*>(user_data));
154 }
155
156 if (array_tag != saved_array_tag) {
157 tag_table->Set(obj.Ptr(), array_tag);
158 }
159
160 return result;
161 }
162 return 0;
163}
164
Andreas Gampee7316932017-02-25 09:15:05 -0800165template <typename UserData>
166bool VisitorFalse(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
167 art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
168 art::ArtField& field ATTRIBUTE_UNUSED,
169 size_t field_index ATTRIBUTE_UNUSED,
170 UserData* user_data ATTRIBUTE_UNUSED) {
171 return false;
172}
173
174template <typename StaticPrimitiveVisitor,
175 typename StaticReferenceVisitor,
176 typename InstancePrimitiveVisitor,
177 typename InstanceReferenceVisitor,
178 typename UserData,
179 bool kCallVisitorOnRecursion>
180class FieldVisitor {
181 public:
182 // Report the contents of a primitive fields of the given object, if a callback is set.
183 static bool ReportFields(art::ObjPtr<art::mirror::Object> obj,
184 UserData* user_data,
185 StaticPrimitiveVisitor& static_prim_visitor,
186 StaticReferenceVisitor& static_ref_visitor,
187 InstancePrimitiveVisitor& instance_prim_visitor,
188 InstanceReferenceVisitor& instance_ref_visitor)
189 REQUIRES_SHARED(art::Locks::mutator_lock_) {
190 FieldVisitor fv(user_data);
191
192 if (obj->IsClass()) {
193 // When visiting a class, we only visit the static fields of the given class. No field of
194 // superclasses is visited.
195 art::ObjPtr<art::mirror::Class> klass = obj->AsClass();
196 // Only report fields on resolved classes. We need valid field data.
197 if (!klass->IsResolved()) {
198 return false;
199 }
200 return fv.ReportFieldsImpl(nullptr,
201 obj->AsClass(),
202 obj->AsClass()->IsInterface(),
203 static_prim_visitor,
204 static_ref_visitor,
205 instance_prim_visitor,
206 instance_ref_visitor);
207 } else {
208 // See comment above. Just double-checking here, but an instance *should* mean the class was
209 // resolved.
210 DCHECK(obj->GetClass()->IsResolved() || obj->GetClass()->IsErroneousResolved());
211 return fv.ReportFieldsImpl(obj,
212 obj->GetClass(),
213 false,
214 static_prim_visitor,
215 static_ref_visitor,
216 instance_prim_visitor,
217 instance_ref_visitor);
218 }
219 }
220
221 private:
222 explicit FieldVisitor(UserData* user_data) : user_data_(user_data) {}
223
224 // Report the contents of fields of the given object. If obj is null, report the static fields,
225 // otherwise the instance fields.
226 bool ReportFieldsImpl(art::ObjPtr<art::mirror::Object> obj,
227 art::ObjPtr<art::mirror::Class> klass,
228 bool skip_java_lang_object,
229 StaticPrimitiveVisitor& static_prim_visitor,
230 StaticReferenceVisitor& static_ref_visitor,
231 InstancePrimitiveVisitor& instance_prim_visitor,
232 InstanceReferenceVisitor& instance_ref_visitor)
233 REQUIRES_SHARED(art::Locks::mutator_lock_) {
234 // Compute the offset of field indices.
235 size_t interface_field_count = CountInterfaceFields(klass);
236
237 size_t tmp;
238 bool aborted = ReportFieldsRecursive(obj,
239 klass,
240 interface_field_count,
241 skip_java_lang_object,
242 static_prim_visitor,
243 static_ref_visitor,
244 instance_prim_visitor,
245 instance_ref_visitor,
246 &tmp);
247 return aborted;
248 }
249
250 // Visit primitive fields in an object (instance). Return true if the visit was aborted.
251 bool ReportFieldsRecursive(art::ObjPtr<art::mirror::Object> obj,
252 art::ObjPtr<art::mirror::Class> klass,
253 size_t interface_fields,
254 bool skip_java_lang_object,
255 StaticPrimitiveVisitor& static_prim_visitor,
256 StaticReferenceVisitor& static_ref_visitor,
257 InstancePrimitiveVisitor& instance_prim_visitor,
258 InstanceReferenceVisitor& instance_ref_visitor,
259 size_t* field_index_out)
260 REQUIRES_SHARED(art::Locks::mutator_lock_) {
261 DCHECK(klass != nullptr);
262 size_t field_index;
263 if (klass->GetSuperClass() == nullptr) {
264 // j.l.Object. Start with the fields from interfaces.
265 field_index = interface_fields;
266 if (skip_java_lang_object) {
267 *field_index_out = field_index;
268 return false;
269 }
270 } else {
271 // Report superclass fields.
272 if (kCallVisitorOnRecursion) {
273 if (ReportFieldsRecursive(obj,
274 klass->GetSuperClass(),
275 interface_fields,
276 skip_java_lang_object,
277 static_prim_visitor,
278 static_ref_visitor,
279 instance_prim_visitor,
280 instance_ref_visitor,
281 &field_index)) {
282 return true;
283 }
284 } else {
285 // Still call, but with empty visitor. This is required for correct counting.
286 ReportFieldsRecursive(obj,
287 klass->GetSuperClass(),
288 interface_fields,
289 skip_java_lang_object,
290 VisitorFalse<UserData>,
291 VisitorFalse<UserData>,
292 VisitorFalse<UserData>,
293 VisitorFalse<UserData>,
294 &field_index);
295 }
296 }
297
298 // Now visit fields for the current klass.
299
300 for (auto& static_field : klass->GetSFields()) {
301 if (static_field.IsPrimitiveType()) {
302 if (static_prim_visitor(obj,
303 klass,
304 static_field,
305 field_index,
306 user_data_)) {
307 return true;
308 }
309 } else {
310 if (static_ref_visitor(obj,
311 klass,
312 static_field,
313 field_index,
314 user_data_)) {
315 return true;
316 }
317 }
318 field_index++;
319 }
320
321 for (auto& instance_field : klass->GetIFields()) {
322 if (instance_field.IsPrimitiveType()) {
323 if (instance_prim_visitor(obj,
324 klass,
325 instance_field,
326 field_index,
327 user_data_)) {
328 return true;
329 }
330 } else {
331 if (instance_ref_visitor(obj,
332 klass,
333 instance_field,
334 field_index,
335 user_data_)) {
336 return true;
337 }
338 }
339 field_index++;
340 }
341
342 *field_index_out = field_index;
343 return false;
344 }
345
346 // Implements a visit of the implemented interfaces of a given class.
347 template <typename T>
348 struct RecursiveInterfaceVisit {
349 static void VisitStatic(art::Thread* self, art::ObjPtr<art::mirror::Class> klass, T& visitor)
350 REQUIRES_SHARED(art::Locks::mutator_lock_) {
351 RecursiveInterfaceVisit rv;
352 rv.Visit(self, klass, visitor);
353 }
354
355 void Visit(art::Thread* self, art::ObjPtr<art::mirror::Class> klass, T& visitor)
356 REQUIRES_SHARED(art::Locks::mutator_lock_) {
357 // First visit the parent, to get the order right.
358 // (We do this in preparation for actual visiting of interface fields.)
359 if (klass->GetSuperClass() != nullptr) {
360 Visit(self, klass->GetSuperClass(), visitor);
361 }
362 for (uint32_t i = 0; i != klass->NumDirectInterfaces(); ++i) {
363 art::ObjPtr<art::mirror::Class> inf_klass =
364 art::mirror::Class::GetDirectInterface(self, klass, i);
365 DCHECK(inf_klass != nullptr);
366 VisitInterface(self, inf_klass, visitor);
367 }
368 }
369
370 void VisitInterface(art::Thread* self, art::ObjPtr<art::mirror::Class> inf_klass, T& visitor)
371 REQUIRES_SHARED(art::Locks::mutator_lock_) {
372 auto it = visited_interfaces.find(inf_klass.Ptr());
373 if (it != visited_interfaces.end()) {
374 return;
375 }
376 visited_interfaces.insert(inf_klass.Ptr());
377
378 // Let the visitor know about this one. Note that this order is acceptable, as the ordering
379 // of these fields never matters for known visitors.
380 visitor(inf_klass);
381
382 // Now visit the superinterfaces.
383 for (uint32_t i = 0; i != inf_klass->NumDirectInterfaces(); ++i) {
384 art::ObjPtr<art::mirror::Class> super_inf_klass =
385 art::mirror::Class::GetDirectInterface(self, inf_klass, i);
386 DCHECK(super_inf_klass != nullptr);
387 VisitInterface(self, super_inf_klass, visitor);
388 }
389 }
390
391 std::unordered_set<art::mirror::Class*> visited_interfaces;
392 };
393
394 // Counting interface fields. Note that we cannot use the interface table, as that only contains
395 // "non-marker" interfaces (= interfaces with methods).
396 static size_t CountInterfaceFields(art::ObjPtr<art::mirror::Class> klass)
397 REQUIRES_SHARED(art::Locks::mutator_lock_) {
398 size_t count = 0;
399 auto visitor = [&count](art::ObjPtr<art::mirror::Class> inf_klass)
400 REQUIRES_SHARED(art::Locks::mutator_lock_) {
401 DCHECK(inf_klass->IsInterface());
402 DCHECK_EQ(0u, inf_klass->NumInstanceFields());
403 count += inf_klass->NumStaticFields();
404 };
405 RecursiveInterfaceVisit<decltype(visitor)>::VisitStatic(art::Thread::Current(), klass, visitor);
406 return count;
407
408 // TODO: Implement caching.
409 }
410
411 UserData* user_data_;
412};
413
414// Debug helper. Prints the structure of an object.
415template <bool kStatic, bool kRef>
416struct DumpVisitor {
417 static bool Callback(art::ObjPtr<art::mirror::Object> obj ATTRIBUTE_UNUSED,
418 art::ObjPtr<art::mirror::Class> klass ATTRIBUTE_UNUSED,
419 art::ArtField& field,
420 size_t field_index,
421 void* user_data ATTRIBUTE_UNUSED)
422 REQUIRES_SHARED(art::Locks::mutator_lock_) {
423 LOG(ERROR) << (kStatic ? "static " : "instance ")
424 << (kRef ? "ref " : "primitive ")
425 << field.PrettyField()
426 << " @ "
427 << field_index;
428 return false;
429 }
430};
431ATTRIBUTE_UNUSED
432void DumpObjectFields(art::ObjPtr<art::mirror::Object> obj)
433 REQUIRES_SHARED(art::Locks::mutator_lock_) {
434 if (obj->IsClass()) {
435 FieldVisitor<decltype(DumpVisitor<true, false>::Callback),
436 decltype(DumpVisitor<true, true>::Callback),
437 decltype(DumpVisitor<false, false>::Callback),
438 decltype(DumpVisitor<false, true>::Callback),
439 void,
440 false>::
441 ReportFields(obj,
442 nullptr,
443 DumpVisitor<true, false>::Callback,
444 DumpVisitor<true, true>::Callback,
445 DumpVisitor<false, false>::Callback,
446 DumpVisitor<false, true>::Callback);
447 } else {
448 FieldVisitor<decltype(DumpVisitor<true, false>::Callback),
449 decltype(DumpVisitor<true, true>::Callback),
450 decltype(DumpVisitor<false, false>::Callback),
451 decltype(DumpVisitor<false, true>::Callback),
452 void,
453 true>::
454 ReportFields(obj,
455 nullptr,
456 DumpVisitor<true, false>::Callback,
457 DumpVisitor<true, true>::Callback,
458 DumpVisitor<false, false>::Callback,
459 DumpVisitor<false, true>::Callback);
460 }
461}
462
463class ReportPrimitiveField {
464 public:
465 static bool Report(art::ObjPtr<art::mirror::Object> obj,
466 ObjectTagTable* tag_table,
467 const jvmtiHeapCallbacks* cb,
468 const void* user_data)
469 REQUIRES_SHARED(art::Locks::mutator_lock_) {
470 if (UNLIKELY(cb->primitive_field_callback != nullptr)) {
471 jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
472 ReportPrimitiveField rpf(tag_table, class_tag, cb, user_data);
473 if (obj->IsClass()) {
474 return FieldVisitor<decltype(ReportPrimitiveFieldCallback<true>),
475 decltype(VisitorFalse<ReportPrimitiveField>),
476 decltype(VisitorFalse<ReportPrimitiveField>),
477 decltype(VisitorFalse<ReportPrimitiveField>),
478 ReportPrimitiveField,
479 false>::
480 ReportFields(obj,
481 &rpf,
482 ReportPrimitiveFieldCallback<true>,
483 VisitorFalse<ReportPrimitiveField>,
484 VisitorFalse<ReportPrimitiveField>,
485 VisitorFalse<ReportPrimitiveField>);
486 } else {
487 return FieldVisitor<decltype(VisitorFalse<ReportPrimitiveField>),
488 decltype(VisitorFalse<ReportPrimitiveField>),
489 decltype(ReportPrimitiveFieldCallback<false>),
490 decltype(VisitorFalse<ReportPrimitiveField>),
491 ReportPrimitiveField,
492 true>::
493 ReportFields(obj,
494 &rpf,
495 VisitorFalse<ReportPrimitiveField>,
496 VisitorFalse<ReportPrimitiveField>,
497 ReportPrimitiveFieldCallback<false>,
498 VisitorFalse<ReportPrimitiveField>);
499 }
500 }
501 return false;
502 }
503
504
505 private:
506 ReportPrimitiveField(ObjectTagTable* tag_table,
507 jlong class_tag,
508 const jvmtiHeapCallbacks* cb,
509 const void* user_data)
510 : tag_table_(tag_table), class_tag_(class_tag), cb_(cb), user_data_(user_data) {}
511
512 template <bool kReportStatic>
513 static bool ReportPrimitiveFieldCallback(art::ObjPtr<art::mirror::Object> obj,
514 art::ObjPtr<art::mirror::Class> klass,
515 art::ArtField& field,
516 size_t field_index,
517 ReportPrimitiveField* user_data)
518 REQUIRES_SHARED(art::Locks::mutator_lock_) {
519 art::Primitive::Type art_prim_type = field.GetTypeAsPrimitiveType();
520 jvmtiPrimitiveType prim_type =
521 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
522 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
523 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
524 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
525 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
526 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
527 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
528 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
529 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
530 jvmtiHeapReferenceInfo info;
531 info.field.index = field_index;
532
533 jvalue value;
534 memset(&value, 0, sizeof(jvalue));
535 art::ObjPtr<art::mirror::Object> src = kReportStatic ? klass : obj;
536 switch (art_prim_type) {
537 case art::Primitive::Type::kPrimBoolean:
538 value.z = field.GetBoolean(src) == 0 ? JNI_FALSE : JNI_TRUE;
539 break;
540 case art::Primitive::Type::kPrimByte:
541 value.b = field.GetByte(src);
542 break;
543 case art::Primitive::Type::kPrimChar:
544 value.c = field.GetChar(src);
545 break;
546 case art::Primitive::Type::kPrimShort:
547 value.s = field.GetShort(src);
548 break;
549 case art::Primitive::Type::kPrimInt:
550 value.i = field.GetInt(src);
551 break;
552 case art::Primitive::Type::kPrimLong:
553 value.j = field.GetLong(src);
554 break;
555 case art::Primitive::Type::kPrimFloat:
556 value.f = field.GetFloat(src);
557 break;
558 case art::Primitive::Type::kPrimDouble:
559 value.d = field.GetDouble(src);
560 break;
561 case art::Primitive::Type::kPrimVoid:
562 case art::Primitive::Type::kPrimNot: {
563 LOG(FATAL) << "Should not reach here";
564 UNREACHABLE();
565 }
566 }
567
568 jlong obj_tag = user_data->tag_table_->GetTagOrZero(src.Ptr());
569 const jlong saved_obj_tag = obj_tag;
570
571 jint ret = user_data->cb_->primitive_field_callback(kReportStatic
572 ? JVMTI_HEAP_REFERENCE_STATIC_FIELD
573 : JVMTI_HEAP_REFERENCE_FIELD,
574 &info,
575 user_data->class_tag_,
576 &obj_tag,
577 value,
578 prim_type,
579 const_cast<void*>(user_data->user_data_));
580
581 if (saved_obj_tag != obj_tag) {
582 user_data->tag_table_->Set(src.Ptr(), obj_tag);
583 }
584
585 if ((ret & JVMTI_VISIT_ABORT) != 0) {
586 return true;
587 }
588
589 return false;
590 }
591
592 ObjectTagTable* tag_table_;
593 jlong class_tag_;
594 const jvmtiHeapCallbacks* cb_;
595 const void* user_data_;
596};
597
Andreas Gampe6ea06072017-02-24 18:01:19 +0000598struct HeapFilter {
599 explicit HeapFilter(jint heap_filter)
600 : filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
Andreas Gampee54d9922016-10-11 19:55:37 -0700601 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
602 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
603 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
604 any_filter(filter_out_tagged ||
605 filter_out_untagged ||
606 filter_out_class_tagged ||
Andreas Gampe6ea06072017-02-24 18:01:19 +0000607 filter_out_class_untagged) {
Andreas Gampee54d9922016-10-11 19:55:37 -0700608 }
609
Andreas Gampe6ea06072017-02-24 18:01:19 +0000610 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) const {
Andreas Gampee54d9922016-10-11 19:55:37 -0700611 if (!any_filter) {
612 return true;
613 }
614
615 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
616 return false;
617 }
618
619 if ((class_tag == 0 && filter_out_class_untagged) ||
620 (class_tag != 0 && filter_out_class_tagged)) {
621 return false;
622 }
623
624 return true;
625 }
626
Andreas Gampee54d9922016-10-11 19:55:37 -0700627 const bool filter_out_tagged;
628 const bool filter_out_untagged;
629 const bool filter_out_class_tagged;
630 const bool filter_out_class_untagged;
631 const bool any_filter;
Andreas Gampe6ea06072017-02-24 18:01:19 +0000632};
633
634} // namespace
635
636struct IterateThroughHeapData {
637 IterateThroughHeapData(HeapUtil* _heap_util,
638 jvmtiEnv* _env,
639 art::ObjPtr<art::mirror::Class> klass,
640 jint _heap_filter,
641 const jvmtiHeapCallbacks* _callbacks,
642 const void* _user_data)
643 : heap_util(_heap_util),
644 heap_filter(_heap_filter),
645 filter_klass(klass),
646 env(_env),
647 callbacks(_callbacks),
648 user_data(_user_data),
649 stop_reports(false) {
650 }
651
652 HeapUtil* heap_util;
653 const HeapFilter heap_filter;
654 art::ObjPtr<art::mirror::Class> filter_klass;
655 jvmtiEnv* env;
656 const jvmtiHeapCallbacks* callbacks;
657 const void* user_data;
Andreas Gampee54d9922016-10-11 19:55:37 -0700658
659 bool stop_reports;
660};
661
662static void IterateThroughHeapObjectCallback(art::mirror::Object* obj, void* arg)
663 REQUIRES_SHARED(art::Locks::mutator_lock_) {
664 IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
665 // Early return, as we can't really stop visiting.
666 if (ithd->stop_reports) {
667 return;
668 }
669
670 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
671
672 jlong tag = 0;
673 ithd->heap_util->GetTags()->GetTag(obj, &tag);
674
675 jlong class_tag = 0;
676 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
677 ithd->heap_util->GetTags()->GetTag(klass.Ptr(), &class_tag);
678 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
679
Andreas Gampe6ea06072017-02-24 18:01:19 +0000680 if (!ithd->heap_filter.ShouldReportByHeapFilter(tag, class_tag)) {
Andreas Gampee54d9922016-10-11 19:55:37 -0700681 return;
682 }
683
Andreas Gampee54d9922016-10-11 19:55:37 -0700684 if (ithd->filter_klass != nullptr) {
685 if (ithd->filter_klass != klass) {
686 return;
687 }
688 }
689
690 jlong size = obj->SizeOf();
691
692 jint length = -1;
693 if (obj->IsArrayInstance()) {
694 length = obj->AsArray()->GetLength();
695 }
696
697 jlong saved_tag = tag;
698 jint ret = ithd->callbacks->heap_iteration_callback(class_tag,
699 size,
700 &tag,
701 length,
702 const_cast<void*>(ithd->user_data));
703
704 if (tag != saved_tag) {
705 ithd->heap_util->GetTags()->Set(obj, tag);
706 }
707
708 ithd->stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
709
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800710 if (!ithd->stop_reports) {
711 jint string_ret = ReportString(obj,
712 ithd->env,
713 ithd->heap_util->GetTags(),
714 ithd->callbacks,
715 ithd->user_data);
716 ithd->stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
717 }
718
Andreas Gampebecd6ad2017-02-22 19:20:37 -0800719 if (!ithd->stop_reports) {
720 jint array_ret = ReportPrimitiveArray(obj,
721 ithd->env,
722 ithd->heap_util->GetTags(),
723 ithd->callbacks,
724 ithd->user_data);
725 ithd->stop_reports = (array_ret & JVMTI_VISIT_ABORT) != 0;
726 }
727
Andreas Gampee7316932017-02-25 09:15:05 -0800728 if (!ithd->stop_reports) {
729 ithd->stop_reports = ReportPrimitiveField::Report(obj,
730 ithd->heap_util->GetTags(),
731 ithd->callbacks,
732 ithd->user_data);
733 }
Andreas Gampee54d9922016-10-11 19:55:37 -0700734}
735
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800736jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
Andreas Gampee54d9922016-10-11 19:55:37 -0700737 jint heap_filter,
738 jclass klass,
739 const jvmtiHeapCallbacks* callbacks,
740 const void* user_data) {
741 if (callbacks == nullptr) {
742 return ERR(NULL_POINTER);
743 }
744
Andreas Gampee54d9922016-10-11 19:55:37 -0700745 art::Thread* self = art::Thread::Current();
746 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
747
748 IterateThroughHeapData ithd(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800749 env,
Nicolas Geoffray2cb576c2017-02-24 09:40:37 +0000750 soa.Decode<art::mirror::Class>(klass),
Andreas Gampe6ea06072017-02-24 18:01:19 +0000751 heap_filter,
Andreas Gampee54d9922016-10-11 19:55:37 -0700752 callbacks,
753 user_data);
754
755 art::Runtime::Current()->GetHeap()->VisitObjects(IterateThroughHeapObjectCallback, &ithd);
756
757 return ERR(NONE);
758}
759
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700760class FollowReferencesHelper FINAL {
761 public:
762 FollowReferencesHelper(HeapUtil* h,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800763 jvmtiEnv* jvmti_env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800764 art::ObjPtr<art::mirror::Object> initial_object,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700765 const jvmtiHeapCallbacks* callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800766 art::ObjPtr<art::mirror::Class> class_filter,
Andreas Gampe6ea06072017-02-24 18:01:19 +0000767 jint heap_filter,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700768 const void* user_data)
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800769 : env(jvmti_env),
770 tag_table_(h->GetTags()),
Andreas Gampe638a6932016-12-02 19:11:17 -0800771 initial_object_(initial_object),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700772 callbacks_(callbacks),
Andreas Gampe38da9f22017-02-20 13:35:36 -0800773 class_filter_(class_filter),
Andreas Gampe6ea06072017-02-24 18:01:19 +0000774 heap_filter_(heap_filter),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700775 user_data_(user_data),
776 start_(0),
777 stop_reports_(false) {
778 }
779
780 void Init()
781 REQUIRES_SHARED(art::Locks::mutator_lock_)
782 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
Andreas Gampe638a6932016-12-02 19:11:17 -0800783 if (initial_object_.IsNull()) {
784 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800785
786 // We need precise info (e.g., vregs).
787 constexpr art::VisitRootFlags kRootFlags = static_cast<art::VisitRootFlags>(
788 art::VisitRootFlags::kVisitRootFlagAllRoots | art::VisitRootFlags::kVisitRootFlagPrecise);
789 art::Runtime::Current()->VisitRoots(&carrv, kRootFlags);
790
Andreas Gampe638a6932016-12-02 19:11:17 -0800791 art::Runtime::Current()->VisitImageRoots(&carrv);
792 stop_reports_ = carrv.IsStopReports();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700793
Andreas Gampe638a6932016-12-02 19:11:17 -0800794 if (stop_reports_) {
795 worklist_.clear();
796 }
797 } else {
798 visited_.insert(initial_object_.Ptr());
799 worklist_.push_back(initial_object_.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700800 }
801 }
802
803 void Work()
804 REQUIRES_SHARED(art::Locks::mutator_lock_)
805 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
806 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
807 // from the head of the work list, instead postponing until there's a gap that's "large."
808 //
809 // Alternatively, we can implement a DFS and use the work list as a stack.
810 while (start_ < worklist_.size()) {
811 art::mirror::Object* cur_obj = worklist_[start_];
812 start_++;
813
814 if (start_ >= kMaxStart) {
815 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
816 start_ = 0;
817 }
818
819 VisitObject(cur_obj);
820
821 if (stop_reports_) {
822 break;
823 }
824 }
825 }
826
827 private:
828 class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
829 public:
830 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
831 ObjectTagTable* tag_table,
832 std::vector<art::mirror::Object*>* worklist,
833 std::unordered_set<art::mirror::Object*>* visited)
834 : helper_(helper),
835 tag_table_(tag_table),
836 worklist_(worklist),
837 visited_(visited),
838 stop_reports_(false) {}
839
840 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
841 OVERRIDE
842 REQUIRES_SHARED(art::Locks::mutator_lock_)
843 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
844 for (size_t i = 0; i != count; ++i) {
845 AddRoot(*roots[i], info);
846 }
847 }
848
849 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
850 size_t count,
851 const art::RootInfo& info)
852 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
853 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
854 for (size_t i = 0; i != count; ++i) {
855 AddRoot(roots[i]->AsMirrorPtr(), info);
856 }
857 }
858
859 bool IsStopReports() {
860 return stop_reports_;
861 }
862
863 private:
864 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
865 REQUIRES_SHARED(art::Locks::mutator_lock_)
866 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
867 // We use visited_ to mark roots already so we do not need another set.
868 if (visited_->find(root_obj) == visited_->end()) {
869 visited_->insert(root_obj);
870 worklist_->push_back(root_obj);
871 }
872 ReportRoot(root_obj, info);
873 }
874
Andreas Gampe93c30902016-11-18 13:30:30 -0800875 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
876 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
877 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
878 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
879 }
880
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700881 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
882 jvmtiHeapReferenceInfo* ref_info)
883 REQUIRES_SHARED(art::Locks::mutator_lock_) {
884 // TODO: Fill in ref_info.
885 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
886
887 switch (info.GetType()) {
888 case art::RootType::kRootJNIGlobal:
889 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
890
891 case art::RootType::kRootJNILocal:
Andreas Gampe93c30902016-11-18 13:30:30 -0800892 {
893 uint32_t thread_id = info.GetThreadId();
894 ref_info->jni_local.thread_id = thread_id;
895
896 art::Thread* thread = FindThread(info);
897 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800898 art::mirror::Object* thread_obj;
Andreas Gampe93c30902016-11-18 13:30:30 -0800899 if (thread->IsStillStarting()) {
900 thread_obj = nullptr;
901 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800902 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampe93c30902016-11-18 13:30:30 -0800903 }
904 if (thread_obj != nullptr) {
905 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
906 }
907 }
908
909 // TODO: We don't have this info.
910 if (thread != nullptr) {
911 ref_info->jni_local.depth = 0;
912 art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
913 if (method != nullptr) {
914 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
915 }
916 }
917
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700918 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
Andreas Gampe93c30902016-11-18 13:30:30 -0800919 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700920
921 case art::RootType::kRootJavaFrame:
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800922 {
923 uint32_t thread_id = info.GetThreadId();
924 ref_info->stack_local.thread_id = thread_id;
925
926 art::Thread* thread = FindThread(info);
927 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800928 art::mirror::Object* thread_obj;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800929 if (thread->IsStillStarting()) {
930 thread_obj = nullptr;
931 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800932 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800933 }
934 if (thread_obj != nullptr) {
935 ref_info->stack_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
936 }
937 }
938
939 auto& java_info = static_cast<const art::JavaFrameRootInfo&>(info);
940 ref_info->stack_local.slot = static_cast<jint>(java_info.GetVReg());
941 const art::StackVisitor* visitor = java_info.GetVisitor();
942 ref_info->stack_local.location =
943 static_cast<jlocation>(visitor->GetDexPc(false /* abort_on_failure */));
944 ref_info->stack_local.depth = static_cast<jint>(visitor->GetFrameDepth());
945 art::ArtMethod* method = visitor->GetMethod();
946 if (method != nullptr) {
947 ref_info->stack_local.method = art::jni::EncodeArtMethod(method);
948 }
949
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700950 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800951 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700952
953 case art::RootType::kRootNativeStack:
954 case art::RootType::kRootThreadBlock:
955 case art::RootType::kRootThreadObject:
956 return JVMTI_HEAP_REFERENCE_THREAD;
957
958 case art::RootType::kRootStickyClass:
959 case art::RootType::kRootInternedString:
960 // Note: this isn't a root in the RI.
961 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
962
963 case art::RootType::kRootMonitorUsed:
964 case art::RootType::kRootJNIMonitor:
965 return JVMTI_HEAP_REFERENCE_MONITOR;
966
967 case art::RootType::kRootFinalizing:
968 case art::RootType::kRootDebugger:
969 case art::RootType::kRootReferenceCleanup:
970 case art::RootType::kRootVMInternal:
971 case art::RootType::kRootUnknown:
972 return JVMTI_HEAP_REFERENCE_OTHER;
973 }
974 LOG(FATAL) << "Unreachable";
975 UNREACHABLE();
976 }
977
978 void ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
979 REQUIRES_SHARED(art::Locks::mutator_lock_)
980 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
981 jvmtiHeapReferenceInfo ref_info;
982 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
983 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
984 if ((result & JVMTI_VISIT_ABORT) != 0) {
985 stop_reports_ = true;
986 }
987 }
988
989 private:
990 FollowReferencesHelper* helper_;
991 ObjectTagTable* tag_table_;
992 std::vector<art::mirror::Object*>* worklist_;
993 std::unordered_set<art::mirror::Object*>* visited_;
994 bool stop_reports_;
995 };
996
997 void VisitObject(art::mirror::Object* obj)
998 REQUIRES_SHARED(art::Locks::mutator_lock_)
999 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1000 if (obj->IsClass()) {
1001 VisitClass(obj->AsClass());
1002 return;
1003 }
1004 if (obj->IsArrayInstance()) {
1005 VisitArray(obj);
1006 return;
1007 }
1008
1009 // TODO: We'll probably have to rewrite this completely with our own visiting logic, if we
1010 // want to have a chance of getting the field indices computed halfway efficiently. For
1011 // now, ignore them altogether.
1012
1013 struct InstanceReferenceVisitor {
1014 explicit InstanceReferenceVisitor(FollowReferencesHelper* helper_)
1015 : helper(helper_), stop_reports(false) {}
1016
1017 void operator()(art::mirror::Object* src,
1018 art::MemberOffset field_offset,
1019 bool is_static ATTRIBUTE_UNUSED) const
1020 REQUIRES_SHARED(art::Locks::mutator_lock_)
1021 REQUIRES(!*helper->tag_table_->GetAllowDisallowLock()) {
1022 if (stop_reports) {
1023 return;
1024 }
1025
1026 art::mirror::Object* trg = src->GetFieldObjectReferenceAddr(field_offset)->AsMirrorPtr();
1027 jvmtiHeapReferenceInfo reference_info;
1028 memset(&reference_info, 0, sizeof(reference_info));
1029
1030 // TODO: Implement spec-compliant numbering.
1031 reference_info.field.index = field_offset.Int32Value();
1032
1033 jvmtiHeapReferenceKind kind =
1034 field_offset.Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
1035 ? JVMTI_HEAP_REFERENCE_CLASS
1036 : JVMTI_HEAP_REFERENCE_FIELD;
1037 const jvmtiHeapReferenceInfo* reference_info_ptr =
1038 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
1039
1040 stop_reports = !helper->ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src, trg);
1041 }
1042
1043 void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED)
1044 const {
1045 LOG(FATAL) << "Unreachable";
1046 }
1047 void VisitRootIfNonNull(
1048 art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const {
1049 LOG(FATAL) << "Unreachable";
1050 }
1051
1052 // "mutable" required by the visitor API.
1053 mutable FollowReferencesHelper* helper;
1054 mutable bool stop_reports;
1055 };
1056
1057 InstanceReferenceVisitor visitor(this);
1058 // Visit references, not native roots.
1059 obj->VisitReferences<false>(visitor, art::VoidFunctor());
1060
1061 stop_reports_ = visitor.stop_reports;
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001062
1063 if (!stop_reports_) {
1064 jint string_ret = ReportString(obj, env, tag_table_, callbacks_, user_data_);
1065 stop_reports_ = (string_ret & JVMTI_VISIT_ABORT) != 0;
1066 }
Andreas Gampee7316932017-02-25 09:15:05 -08001067
1068 if (!stop_reports_) {
1069 stop_reports_ = ReportPrimitiveField::Report(obj, tag_table_, callbacks_, user_data_);
1070 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001071 }
1072
1073 void VisitArray(art::mirror::Object* array)
1074 REQUIRES_SHARED(art::Locks::mutator_lock_)
1075 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1076 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
1077 nullptr,
1078 array,
1079 array->GetClass());
1080 if (stop_reports_) {
1081 return;
1082 }
1083
1084 if (array->IsObjectArray()) {
1085 art::mirror::ObjectArray<art::mirror::Object>* obj_array =
1086 array->AsObjectArray<art::mirror::Object>();
1087 int32_t length = obj_array->GetLength();
1088 for (int32_t i = 0; i != length; ++i) {
1089 art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
1090 if (elem != nullptr) {
1091 jvmtiHeapReferenceInfo reference_info;
1092 reference_info.array.index = i;
1093 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
1094 &reference_info,
1095 array,
1096 elem);
1097 if (stop_reports_) {
1098 break;
1099 }
1100 }
1101 }
Andreas Gampebecd6ad2017-02-22 19:20:37 -08001102 } else {
1103 if (!stop_reports_) {
1104 jint array_ret = ReportPrimitiveArray(array, env, tag_table_, callbacks_, user_data_);
1105 stop_reports_ = (array_ret & JVMTI_VISIT_ABORT) != 0;
1106 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001107 }
1108 }
1109
1110 void VisitClass(art::mirror::Class* klass)
1111 REQUIRES_SHARED(art::Locks::mutator_lock_)
1112 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1113 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
1114 if (!klass->IsResolved()) {
1115 return;
1116 }
1117
1118 // Superclass.
1119 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
1120 nullptr,
1121 klass,
1122 klass->GetSuperClass());
1123 if (stop_reports_) {
1124 return;
1125 }
1126
1127 // Directly implemented or extended interfaces.
1128 art::Thread* self = art::Thread::Current();
1129 art::StackHandleScope<1> hs(self);
1130 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
1131 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
1132 art::ObjPtr<art::mirror::Class> inf_klass =
Vladimir Marko19a4d372016-12-08 14:41:46 +00001133 art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001134 if (inf_klass == nullptr) {
1135 // TODO: With a resolved class this should not happen...
1136 self->ClearException();
1137 break;
1138 }
1139
1140 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
1141 nullptr,
1142 klass,
1143 inf_klass.Ptr());
1144 if (stop_reports_) {
1145 return;
1146 }
1147 }
1148
1149 // Classloader.
1150 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
1151 // fake BootClassLoader?
1152 if (klass->GetClassLoader() != nullptr) {
1153 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
1154 nullptr,
1155 klass,
1156 klass->GetClassLoader());
1157 if (stop_reports_) {
1158 return;
1159 }
1160 }
1161 DCHECK_EQ(h_klass.Get(), klass);
1162
1163 // Declared static fields.
1164 for (auto& field : klass->GetSFields()) {
1165 if (!field.IsPrimitiveType()) {
1166 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(klass);
1167 if (field_value != nullptr) {
1168 jvmtiHeapReferenceInfo reference_info;
1169 memset(&reference_info, 0, sizeof(reference_info));
1170
1171 // TODO: Implement spec-compliant numbering.
1172 reference_info.field.index = field.GetOffset().Int32Value();
1173
1174 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1175 &reference_info,
1176 klass,
1177 field_value.Ptr());
1178 if (stop_reports_) {
1179 return;
1180 }
1181 }
1182 }
1183 }
Andreas Gampee7316932017-02-25 09:15:05 -08001184
1185 if (!stop_reports_) {
1186 stop_reports_ = ReportPrimitiveField::Report(klass, tag_table_, callbacks_, user_data_);
1187 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001188 }
1189
1190 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
1191 if (visited_.find(obj) == visited_.end()) {
1192 worklist_.push_back(obj);
1193 visited_.insert(obj);
1194 }
1195 }
1196
1197 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
1198 const jvmtiHeapReferenceInfo* reference_info,
1199 art::mirror::Object* referree,
1200 art::mirror::Object* referrer)
1201 REQUIRES_SHARED(art::Locks::mutator_lock_)
1202 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1203 jint result = ReportReference(kind, reference_info, referree, referrer);
1204 if ((result & JVMTI_VISIT_ABORT) == 0) {
1205 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
1206 MaybeEnqueue(referrer);
1207 }
1208 return true;
1209 } else {
1210 return false;
1211 }
1212 }
1213
1214 jint ReportReference(jvmtiHeapReferenceKind kind,
1215 const jvmtiHeapReferenceInfo* reference_info,
1216 art::mirror::Object* referrer,
1217 art::mirror::Object* referree)
1218 REQUIRES_SHARED(art::Locks::mutator_lock_)
1219 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
1220 if (referree == nullptr || stop_reports_) {
1221 return 0;
1222 }
1223
Andreas Gampe38da9f22017-02-20 13:35:36 -08001224 if (UNLIKELY(class_filter_ != nullptr) && class_filter_ != referree->GetClass()) {
1225 return JVMTI_VISIT_OBJECTS;
1226 }
1227
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001228 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
Andreas Gampe6ea06072017-02-24 18:01:19 +00001229 jlong tag = tag_table_->GetTagOrZero(referree);
1230
1231 if (!heap_filter_.ShouldReportByHeapFilter(tag, class_tag)) {
1232 return JVMTI_VISIT_OBJECTS;
1233 }
1234
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001235 const jlong referrer_class_tag =
1236 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
1237 const jlong size = static_cast<jlong>(referree->SizeOf());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001238 jlong saved_tag = tag;
1239 jlong referrer_tag = 0;
1240 jlong saved_referrer_tag = 0;
1241 jlong* referrer_tag_ptr;
1242 if (referrer == nullptr) {
1243 referrer_tag_ptr = nullptr;
1244 } else {
1245 if (referrer == referree) {
1246 referrer_tag_ptr = &tag;
1247 } else {
1248 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
1249 referrer_tag_ptr = &referrer_tag;
1250 }
1251 }
Andreas Gampe38da9f22017-02-20 13:35:36 -08001252
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001253 jint length = -1;
1254 if (referree->IsArrayInstance()) {
1255 length = referree->AsArray()->GetLength();
1256 }
1257
1258 jint result = callbacks_->heap_reference_callback(kind,
1259 reference_info,
1260 class_tag,
1261 referrer_class_tag,
1262 size,
1263 &tag,
1264 referrer_tag_ptr,
1265 length,
1266 const_cast<void*>(user_data_));
1267
1268 if (tag != saved_tag) {
1269 tag_table_->Set(referree, tag);
1270 }
1271 if (referrer_tag != saved_referrer_tag) {
1272 tag_table_->Set(referrer, referrer_tag);
1273 }
1274
1275 return result;
1276 }
1277
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001278 jvmtiEnv* env;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001279 ObjectTagTable* tag_table_;
Andreas Gampe638a6932016-12-02 19:11:17 -08001280 art::ObjPtr<art::mirror::Object> initial_object_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001281 const jvmtiHeapCallbacks* callbacks_;
Andreas Gampe38da9f22017-02-20 13:35:36 -08001282 art::ObjPtr<art::mirror::Class> class_filter_;
Andreas Gampe6ea06072017-02-24 18:01:19 +00001283 const HeapFilter heap_filter_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001284 const void* user_data_;
1285
1286 std::vector<art::mirror::Object*> worklist_;
1287 size_t start_;
1288 static constexpr size_t kMaxStart = 1000000U;
1289
1290 std::unordered_set<art::mirror::Object*> visited_;
1291
1292 bool stop_reports_;
1293
1294 friend class CollectAndReportRootsVisitor;
1295};
1296
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001297jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env,
Andreas Gampe6ea06072017-02-24 18:01:19 +00001298 jint heap_filter,
Andreas Gampe38da9f22017-02-20 13:35:36 -08001299 jclass klass,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001300 jobject initial_object,
1301 const jvmtiHeapCallbacks* callbacks,
1302 const void* user_data) {
1303 if (callbacks == nullptr) {
1304 return ERR(NULL_POINTER);
1305 }
1306
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001307 art::Thread* self = art::Thread::Current();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001308
Andreas Gampe638a6932016-12-02 19:11:17 -08001309 art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
1310 if (heap->IsGcConcurrentAndMoving()) {
1311 // Need to take a heap dump while GC isn't running. See the
1312 // comment in Heap::VisitObjects().
1313 heap->IncrementDisableMovingGC(self);
1314 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001315 {
Andreas Gampe638a6932016-12-02 19:11:17 -08001316 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001317 art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
1318 art::ScopedSuspendAll ssa("FollowReferences");
1319
Andreas Gampe38da9f22017-02-20 13:35:36 -08001320 art::ObjPtr<art::mirror::Class> class_filter = klass == nullptr
1321 ? nullptr
1322 : art::ObjPtr<art::mirror::Class>::DownCast(self->DecodeJObject(klass));
Andreas Gampe638a6932016-12-02 19:11:17 -08001323 FollowReferencesHelper frh(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -08001324 env,
Andreas Gampe638a6932016-12-02 19:11:17 -08001325 self->DecodeJObject(initial_object),
1326 callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -08001327 class_filter,
Andreas Gampe6ea06072017-02-24 18:01:19 +00001328 heap_filter,
Andreas Gampe638a6932016-12-02 19:11:17 -08001329 user_data);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001330 frh.Init();
1331 frh.Work();
1332 }
Andreas Gampe638a6932016-12-02 19:11:17 -08001333 if (heap->IsGcConcurrentAndMoving()) {
1334 heap->DecrementDisableMovingGC(self);
1335 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -07001336
1337 return ERR(NONE);
1338}
1339
Andreas Gampeaa8b60c2016-10-12 12:51:25 -07001340jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
1341 jint* class_count_ptr,
1342 jclass** classes_ptr) {
1343 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
1344 return ERR(NULL_POINTER);
1345 }
1346
1347 class ReportClassVisitor : public art::ClassVisitor {
1348 public:
1349 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
1350
Mathieu Chartier28357fa2016-10-18 16:27:40 -07001351 bool operator()(art::ObjPtr<art::mirror::Class> klass)
1352 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampeef54d8d2016-10-25 09:55:53 -07001353 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
Andreas Gampeaa8b60c2016-10-12 12:51:25 -07001354 return true;
1355 }
1356
1357 art::Thread* self_;
1358 std::vector<jclass> classes_;
1359 };
1360
1361 art::Thread* self = art::Thread::Current();
1362 ReportClassVisitor rcv(self);
1363 {
1364 art::ScopedObjectAccess soa(self);
1365 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
1366 }
1367
1368 size_t size = rcv.classes_.size();
1369 jclass* classes = nullptr;
1370 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
1371 reinterpret_cast<unsigned char**>(&classes));
1372 if (alloc_ret != ERR(NONE)) {
1373 return alloc_ret;
1374 }
1375
1376 for (size_t i = 0; i < size; ++i) {
1377 classes[i] = rcv.classes_[i];
1378 }
1379 *classes_ptr = classes;
1380 *class_count_ptr = static_cast<jint>(size);
1381
1382 return ERR(NONE);
1383}
1384
Andreas Gampe8da6d032016-10-31 19:31:03 -07001385jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
1386 art::Runtime::Current()->GetHeap()->CollectGarbage(false);
1387
1388 return ERR(NONE);
1389}
Andreas Gampee54d9922016-10-11 19:55:37 -07001390} // namespace openjdkjvmti