blob: 894cec252e115164971a53d6095ffd11a077c695 [file] [log] [blame]
Andreas Gampee54d9922016-10-11 19:55:37 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampeba8df692016-11-01 10:30:44 -070017#include "ti_heap.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070018
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070019#include "art_field-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070020#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070023#include "class_linker.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070024#include "gc/heap.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070025#include "gc_root-inl.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070026#include "jni_env_ext.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070027#include "jni_internal.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070028#include "mirror/class.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070029#include "mirror/object-inl.h"
30#include "mirror/object_array-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070031#include "object_callbacks.h"
32#include "object_tagging.h"
33#include "obj_ptr-inl.h"
34#include "runtime.h"
35#include "scoped_thread_state_change-inl.h"
36#include "thread-inl.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070037#include "thread_list.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070038
39namespace openjdkjvmti {
40
41struct IterateThroughHeapData {
42 IterateThroughHeapData(HeapUtil* _heap_util,
43 jint heap_filter,
44 art::ObjPtr<art::mirror::Class> klass,
45 const jvmtiHeapCallbacks* _callbacks,
46 const void* _user_data)
47 : heap_util(_heap_util),
48 filter_klass(klass),
49 callbacks(_callbacks),
50 user_data(_user_data),
51 filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
52 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
53 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
54 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
55 any_filter(filter_out_tagged ||
56 filter_out_untagged ||
57 filter_out_class_tagged ||
58 filter_out_class_untagged),
59 stop_reports(false) {
60 }
61
62 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) {
63 if (!any_filter) {
64 return true;
65 }
66
67 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
68 return false;
69 }
70
71 if ((class_tag == 0 && filter_out_class_untagged) ||
72 (class_tag != 0 && filter_out_class_tagged)) {
73 return false;
74 }
75
76 return true;
77 }
78
79 HeapUtil* heap_util;
80 art::ObjPtr<art::mirror::Class> filter_klass;
81 const jvmtiHeapCallbacks* callbacks;
82 const void* user_data;
83 const bool filter_out_tagged;
84 const bool filter_out_untagged;
85 const bool filter_out_class_tagged;
86 const bool filter_out_class_untagged;
87 const bool any_filter;
88
89 bool stop_reports;
90};
91
92static void IterateThroughHeapObjectCallback(art::mirror::Object* obj, void* arg)
93 REQUIRES_SHARED(art::Locks::mutator_lock_) {
94 IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
95 // Early return, as we can't really stop visiting.
96 if (ithd->stop_reports) {
97 return;
98 }
99
100 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
101
102 jlong tag = 0;
103 ithd->heap_util->GetTags()->GetTag(obj, &tag);
104
105 jlong class_tag = 0;
106 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
107 ithd->heap_util->GetTags()->GetTag(klass.Ptr(), &class_tag);
108 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
109
110 if (!ithd->ShouldReportByHeapFilter(tag, class_tag)) {
111 return;
112 }
113
114 // TODO: Handle array_primitive_value_callback.
115
116 if (ithd->filter_klass != nullptr) {
117 if (ithd->filter_klass != klass) {
118 return;
119 }
120 }
121
122 jlong size = obj->SizeOf();
123
124 jint length = -1;
125 if (obj->IsArrayInstance()) {
126 length = obj->AsArray()->GetLength();
127 }
128
129 jlong saved_tag = tag;
130 jint ret = ithd->callbacks->heap_iteration_callback(class_tag,
131 size,
132 &tag,
133 length,
134 const_cast<void*>(ithd->user_data));
135
136 if (tag != saved_tag) {
137 ithd->heap_util->GetTags()->Set(obj, tag);
138 }
139
140 ithd->stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
141
142 // TODO Implement array primitive and string primitive callback.
143 // TODO Implement primitive field callback.
144}
145
146jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env ATTRIBUTE_UNUSED,
147 jint heap_filter,
148 jclass klass,
149 const jvmtiHeapCallbacks* callbacks,
150 const void* user_data) {
151 if (callbacks == nullptr) {
152 return ERR(NULL_POINTER);
153 }
154
155 if (callbacks->array_primitive_value_callback != nullptr) {
156 // TODO: Implement.
157 return ERR(NOT_IMPLEMENTED);
158 }
159
160 art::Thread* self = art::Thread::Current();
161 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
162
163 IterateThroughHeapData ithd(this,
164 heap_filter,
165 soa.Decode<art::mirror::Class>(klass),
166 callbacks,
167 user_data);
168
169 art::Runtime::Current()->GetHeap()->VisitObjects(IterateThroughHeapObjectCallback, &ithd);
170
171 return ERR(NONE);
172}
173
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700174class FollowReferencesHelper FINAL {
175 public:
176 FollowReferencesHelper(HeapUtil* h,
Andreas Gampe638a6932016-12-02 19:11:17 -0800177 art::ObjPtr<art::mirror::Object> initial_object,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700178 const jvmtiHeapCallbacks* callbacks,
179 const void* user_data)
180 : tag_table_(h->GetTags()),
Andreas Gampe638a6932016-12-02 19:11:17 -0800181 initial_object_(initial_object),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700182 callbacks_(callbacks),
183 user_data_(user_data),
184 start_(0),
185 stop_reports_(false) {
186 }
187
188 void Init()
189 REQUIRES_SHARED(art::Locks::mutator_lock_)
190 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
Andreas Gampe638a6932016-12-02 19:11:17 -0800191 if (initial_object_.IsNull()) {
192 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
193 art::Runtime::Current()->VisitRoots(&carrv);
194 art::Runtime::Current()->VisitImageRoots(&carrv);
195 stop_reports_ = carrv.IsStopReports();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700196
Andreas Gampe638a6932016-12-02 19:11:17 -0800197 if (stop_reports_) {
198 worklist_.clear();
199 }
200 } else {
201 visited_.insert(initial_object_.Ptr());
202 worklist_.push_back(initial_object_.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700203 }
204 }
205
206 void Work()
207 REQUIRES_SHARED(art::Locks::mutator_lock_)
208 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
209 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
210 // from the head of the work list, instead postponing until there's a gap that's "large."
211 //
212 // Alternatively, we can implement a DFS and use the work list as a stack.
213 while (start_ < worklist_.size()) {
214 art::mirror::Object* cur_obj = worklist_[start_];
215 start_++;
216
217 if (start_ >= kMaxStart) {
218 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
219 start_ = 0;
220 }
221
222 VisitObject(cur_obj);
223
224 if (stop_reports_) {
225 break;
226 }
227 }
228 }
229
230 private:
231 class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
232 public:
233 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
234 ObjectTagTable* tag_table,
235 std::vector<art::mirror::Object*>* worklist,
236 std::unordered_set<art::mirror::Object*>* visited)
237 : helper_(helper),
238 tag_table_(tag_table),
239 worklist_(worklist),
240 visited_(visited),
241 stop_reports_(false) {}
242
243 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
244 OVERRIDE
245 REQUIRES_SHARED(art::Locks::mutator_lock_)
246 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
247 for (size_t i = 0; i != count; ++i) {
248 AddRoot(*roots[i], info);
249 }
250 }
251
252 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
253 size_t count,
254 const art::RootInfo& info)
255 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
256 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
257 for (size_t i = 0; i != count; ++i) {
258 AddRoot(roots[i]->AsMirrorPtr(), info);
259 }
260 }
261
262 bool IsStopReports() {
263 return stop_reports_;
264 }
265
266 private:
267 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
268 REQUIRES_SHARED(art::Locks::mutator_lock_)
269 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
270 // We use visited_ to mark roots already so we do not need another set.
271 if (visited_->find(root_obj) == visited_->end()) {
272 visited_->insert(root_obj);
273 worklist_->push_back(root_obj);
274 }
275 ReportRoot(root_obj, info);
276 }
277
Andreas Gampe93c30902016-11-18 13:30:30 -0800278 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
279 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
280 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
281 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
282 }
283
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700284 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
285 jvmtiHeapReferenceInfo* ref_info)
286 REQUIRES_SHARED(art::Locks::mutator_lock_) {
287 // TODO: Fill in ref_info.
288 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
289
290 switch (info.GetType()) {
291 case art::RootType::kRootJNIGlobal:
292 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
293
294 case art::RootType::kRootJNILocal:
Andreas Gampe93c30902016-11-18 13:30:30 -0800295 {
296 uint32_t thread_id = info.GetThreadId();
297 ref_info->jni_local.thread_id = thread_id;
298
299 art::Thread* thread = FindThread(info);
300 if (thread != nullptr) {
301 art::mirror::Object* thread_obj = thread->GetPeer();
302 if (thread->IsStillStarting()) {
303 thread_obj = nullptr;
304 } else {
305 thread_obj = thread->GetPeer();
306 }
307 if (thread_obj != nullptr) {
308 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
309 }
310 }
311
312 // TODO: We don't have this info.
313 if (thread != nullptr) {
314 ref_info->jni_local.depth = 0;
315 art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
316 if (method != nullptr) {
317 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
318 }
319 }
320
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700321 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
Andreas Gampe93c30902016-11-18 13:30:30 -0800322 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700323
324 case art::RootType::kRootJavaFrame:
325 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
326
327 case art::RootType::kRootNativeStack:
328 case art::RootType::kRootThreadBlock:
329 case art::RootType::kRootThreadObject:
330 return JVMTI_HEAP_REFERENCE_THREAD;
331
332 case art::RootType::kRootStickyClass:
333 case art::RootType::kRootInternedString:
334 // Note: this isn't a root in the RI.
335 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
336
337 case art::RootType::kRootMonitorUsed:
338 case art::RootType::kRootJNIMonitor:
339 return JVMTI_HEAP_REFERENCE_MONITOR;
340
341 case art::RootType::kRootFinalizing:
342 case art::RootType::kRootDebugger:
343 case art::RootType::kRootReferenceCleanup:
344 case art::RootType::kRootVMInternal:
345 case art::RootType::kRootUnknown:
346 return JVMTI_HEAP_REFERENCE_OTHER;
347 }
348 LOG(FATAL) << "Unreachable";
349 UNREACHABLE();
350 }
351
352 void ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
353 REQUIRES_SHARED(art::Locks::mutator_lock_)
354 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
355 jvmtiHeapReferenceInfo ref_info;
356 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
357 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
358 if ((result & JVMTI_VISIT_ABORT) != 0) {
359 stop_reports_ = true;
360 }
361 }
362
363 private:
364 FollowReferencesHelper* helper_;
365 ObjectTagTable* tag_table_;
366 std::vector<art::mirror::Object*>* worklist_;
367 std::unordered_set<art::mirror::Object*>* visited_;
368 bool stop_reports_;
369 };
370
371 void VisitObject(art::mirror::Object* obj)
372 REQUIRES_SHARED(art::Locks::mutator_lock_)
373 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
374 if (obj->IsClass()) {
375 VisitClass(obj->AsClass());
376 return;
377 }
378 if (obj->IsArrayInstance()) {
379 VisitArray(obj);
380 return;
381 }
382
383 // TODO: We'll probably have to rewrite this completely with our own visiting logic, if we
384 // want to have a chance of getting the field indices computed halfway efficiently. For
385 // now, ignore them altogether.
386
387 struct InstanceReferenceVisitor {
388 explicit InstanceReferenceVisitor(FollowReferencesHelper* helper_)
389 : helper(helper_), stop_reports(false) {}
390
391 void operator()(art::mirror::Object* src,
392 art::MemberOffset field_offset,
393 bool is_static ATTRIBUTE_UNUSED) const
394 REQUIRES_SHARED(art::Locks::mutator_lock_)
395 REQUIRES(!*helper->tag_table_->GetAllowDisallowLock()) {
396 if (stop_reports) {
397 return;
398 }
399
400 art::mirror::Object* trg = src->GetFieldObjectReferenceAddr(field_offset)->AsMirrorPtr();
401 jvmtiHeapReferenceInfo reference_info;
402 memset(&reference_info, 0, sizeof(reference_info));
403
404 // TODO: Implement spec-compliant numbering.
405 reference_info.field.index = field_offset.Int32Value();
406
407 jvmtiHeapReferenceKind kind =
408 field_offset.Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
409 ? JVMTI_HEAP_REFERENCE_CLASS
410 : JVMTI_HEAP_REFERENCE_FIELD;
411 const jvmtiHeapReferenceInfo* reference_info_ptr =
412 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
413
414 stop_reports = !helper->ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src, trg);
415 }
416
417 void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED)
418 const {
419 LOG(FATAL) << "Unreachable";
420 }
421 void VisitRootIfNonNull(
422 art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const {
423 LOG(FATAL) << "Unreachable";
424 }
425
426 // "mutable" required by the visitor API.
427 mutable FollowReferencesHelper* helper;
428 mutable bool stop_reports;
429 };
430
431 InstanceReferenceVisitor visitor(this);
432 // Visit references, not native roots.
433 obj->VisitReferences<false>(visitor, art::VoidFunctor());
434
435 stop_reports_ = visitor.stop_reports;
436 }
437
438 void VisitArray(art::mirror::Object* array)
439 REQUIRES_SHARED(art::Locks::mutator_lock_)
440 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
441 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
442 nullptr,
443 array,
444 array->GetClass());
445 if (stop_reports_) {
446 return;
447 }
448
449 if (array->IsObjectArray()) {
450 art::mirror::ObjectArray<art::mirror::Object>* obj_array =
451 array->AsObjectArray<art::mirror::Object>();
452 int32_t length = obj_array->GetLength();
453 for (int32_t i = 0; i != length; ++i) {
454 art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
455 if (elem != nullptr) {
456 jvmtiHeapReferenceInfo reference_info;
457 reference_info.array.index = i;
458 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
459 &reference_info,
460 array,
461 elem);
462 if (stop_reports_) {
463 break;
464 }
465 }
466 }
467 }
468 }
469
470 void VisitClass(art::mirror::Class* klass)
471 REQUIRES_SHARED(art::Locks::mutator_lock_)
472 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
473 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
474 if (!klass->IsResolved()) {
475 return;
476 }
477
478 // Superclass.
479 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
480 nullptr,
481 klass,
482 klass->GetSuperClass());
483 if (stop_reports_) {
484 return;
485 }
486
487 // Directly implemented or extended interfaces.
488 art::Thread* self = art::Thread::Current();
489 art::StackHandleScope<1> hs(self);
490 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
491 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
492 art::ObjPtr<art::mirror::Class> inf_klass =
Vladimir Marko19a4d372016-12-08 14:41:46 +0000493 art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700494 if (inf_klass == nullptr) {
495 // TODO: With a resolved class this should not happen...
496 self->ClearException();
497 break;
498 }
499
500 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
501 nullptr,
502 klass,
503 inf_klass.Ptr());
504 if (stop_reports_) {
505 return;
506 }
507 }
508
509 // Classloader.
510 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
511 // fake BootClassLoader?
512 if (klass->GetClassLoader() != nullptr) {
513 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
514 nullptr,
515 klass,
516 klass->GetClassLoader());
517 if (stop_reports_) {
518 return;
519 }
520 }
521 DCHECK_EQ(h_klass.Get(), klass);
522
523 // Declared static fields.
524 for (auto& field : klass->GetSFields()) {
525 if (!field.IsPrimitiveType()) {
526 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(klass);
527 if (field_value != nullptr) {
528 jvmtiHeapReferenceInfo reference_info;
529 memset(&reference_info, 0, sizeof(reference_info));
530
531 // TODO: Implement spec-compliant numbering.
532 reference_info.field.index = field.GetOffset().Int32Value();
533
534 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
535 &reference_info,
536 klass,
537 field_value.Ptr());
538 if (stop_reports_) {
539 return;
540 }
541 }
542 }
543 }
544 }
545
546 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
547 if (visited_.find(obj) == visited_.end()) {
548 worklist_.push_back(obj);
549 visited_.insert(obj);
550 }
551 }
552
553 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
554 const jvmtiHeapReferenceInfo* reference_info,
555 art::mirror::Object* referree,
556 art::mirror::Object* referrer)
557 REQUIRES_SHARED(art::Locks::mutator_lock_)
558 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
559 jint result = ReportReference(kind, reference_info, referree, referrer);
560 if ((result & JVMTI_VISIT_ABORT) == 0) {
561 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
562 MaybeEnqueue(referrer);
563 }
564 return true;
565 } else {
566 return false;
567 }
568 }
569
570 jint ReportReference(jvmtiHeapReferenceKind kind,
571 const jvmtiHeapReferenceInfo* reference_info,
572 art::mirror::Object* referrer,
573 art::mirror::Object* referree)
574 REQUIRES_SHARED(art::Locks::mutator_lock_)
575 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
576 if (referree == nullptr || stop_reports_) {
577 return 0;
578 }
579
580 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
581 const jlong referrer_class_tag =
582 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
583 const jlong size = static_cast<jlong>(referree->SizeOf());
584 jlong tag = tag_table_->GetTagOrZero(referree);
585 jlong saved_tag = tag;
586 jlong referrer_tag = 0;
587 jlong saved_referrer_tag = 0;
588 jlong* referrer_tag_ptr;
589 if (referrer == nullptr) {
590 referrer_tag_ptr = nullptr;
591 } else {
592 if (referrer == referree) {
593 referrer_tag_ptr = &tag;
594 } else {
595 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
596 referrer_tag_ptr = &referrer_tag;
597 }
598 }
599 jint length = -1;
600 if (referree->IsArrayInstance()) {
601 length = referree->AsArray()->GetLength();
602 }
603
604 jint result = callbacks_->heap_reference_callback(kind,
605 reference_info,
606 class_tag,
607 referrer_class_tag,
608 size,
609 &tag,
610 referrer_tag_ptr,
611 length,
612 const_cast<void*>(user_data_));
613
614 if (tag != saved_tag) {
615 tag_table_->Set(referree, tag);
616 }
617 if (referrer_tag != saved_referrer_tag) {
618 tag_table_->Set(referrer, referrer_tag);
619 }
620
621 return result;
622 }
623
624 ObjectTagTable* tag_table_;
Andreas Gampe638a6932016-12-02 19:11:17 -0800625 art::ObjPtr<art::mirror::Object> initial_object_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700626 const jvmtiHeapCallbacks* callbacks_;
627 const void* user_data_;
628
629 std::vector<art::mirror::Object*> worklist_;
630 size_t start_;
631 static constexpr size_t kMaxStart = 1000000U;
632
633 std::unordered_set<art::mirror::Object*> visited_;
634
635 bool stop_reports_;
636
637 friend class CollectAndReportRootsVisitor;
638};
639
640jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env ATTRIBUTE_UNUSED,
641 jint heap_filter ATTRIBUTE_UNUSED,
642 jclass klass ATTRIBUTE_UNUSED,
643 jobject initial_object,
644 const jvmtiHeapCallbacks* callbacks,
645 const void* user_data) {
646 if (callbacks == nullptr) {
647 return ERR(NULL_POINTER);
648 }
649
650 if (callbacks->array_primitive_value_callback != nullptr) {
651 // TODO: Implement.
652 return ERR(NOT_IMPLEMENTED);
653 }
654
655 art::Thread* self = art::Thread::Current();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700656
Andreas Gampe638a6932016-12-02 19:11:17 -0800657 art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
658 if (heap->IsGcConcurrentAndMoving()) {
659 // Need to take a heap dump while GC isn't running. See the
660 // comment in Heap::VisitObjects().
661 heap->IncrementDisableMovingGC(self);
662 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700663 {
Andreas Gampe638a6932016-12-02 19:11:17 -0800664 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700665 art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
666 art::ScopedSuspendAll ssa("FollowReferences");
667
Andreas Gampe638a6932016-12-02 19:11:17 -0800668 FollowReferencesHelper frh(this,
669 self->DecodeJObject(initial_object),
670 callbacks,
671 user_data);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700672 frh.Init();
673 frh.Work();
674 }
Andreas Gampe638a6932016-12-02 19:11:17 -0800675 if (heap->IsGcConcurrentAndMoving()) {
676 heap->DecrementDisableMovingGC(self);
677 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700678
679 return ERR(NONE);
680}
681
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700682jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
683 jint* class_count_ptr,
684 jclass** classes_ptr) {
685 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
686 return ERR(NULL_POINTER);
687 }
688
689 class ReportClassVisitor : public art::ClassVisitor {
690 public:
691 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
692
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700693 bool operator()(art::ObjPtr<art::mirror::Class> klass)
694 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampeef54d8d2016-10-25 09:55:53 -0700695 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700696 return true;
697 }
698
699 art::Thread* self_;
700 std::vector<jclass> classes_;
701 };
702
703 art::Thread* self = art::Thread::Current();
704 ReportClassVisitor rcv(self);
705 {
706 art::ScopedObjectAccess soa(self);
707 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
708 }
709
710 size_t size = rcv.classes_.size();
711 jclass* classes = nullptr;
712 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
713 reinterpret_cast<unsigned char**>(&classes));
714 if (alloc_ret != ERR(NONE)) {
715 return alloc_ret;
716 }
717
718 for (size_t i = 0; i < size; ++i) {
719 classes[i] = rcv.classes_[i];
720 }
721 *classes_ptr = classes;
722 *class_count_ptr = static_cast<jint>(size);
723
724 return ERR(NONE);
725}
726
Andreas Gampe8da6d032016-10-31 19:31:03 -0700727jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
728 art::Runtime::Current()->GetHeap()->CollectGarbage(false);
729
730 return ERR(NONE);
731}
Andreas Gampee54d9922016-10-11 19:55:37 -0700732} // namespace openjdkjvmti