blob: 5e588a8a36d64318059c2f8f3bf4311ae4e2a719 [file] [log] [blame]
Andreas Gampee54d9922016-10-11 19:55:37 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampeba8df692016-11-01 10:30:44 -070017#include "ti_heap.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070018
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070019#include "art_field-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070020#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070023#include "class_linker.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070024#include "gc/heap.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070025#include "gc_root-inl.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070026#include "jni_env_ext.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070027#include "jni_internal.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070028#include "mirror/class.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070029#include "mirror/object-inl.h"
30#include "mirror/object_array-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070031#include "object_callbacks.h"
32#include "object_tagging.h"
33#include "obj_ptr-inl.h"
34#include "runtime.h"
35#include "scoped_thread_state_change-inl.h"
36#include "thread-inl.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070037#include "thread_list.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070038
39namespace openjdkjvmti {
40
41struct IterateThroughHeapData {
42 IterateThroughHeapData(HeapUtil* _heap_util,
43 jint heap_filter,
44 art::ObjPtr<art::mirror::Class> klass,
45 const jvmtiHeapCallbacks* _callbacks,
46 const void* _user_data)
47 : heap_util(_heap_util),
48 filter_klass(klass),
49 callbacks(_callbacks),
50 user_data(_user_data),
51 filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
52 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
53 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
54 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
55 any_filter(filter_out_tagged ||
56 filter_out_untagged ||
57 filter_out_class_tagged ||
58 filter_out_class_untagged),
59 stop_reports(false) {
60 }
61
62 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) {
63 if (!any_filter) {
64 return true;
65 }
66
67 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
68 return false;
69 }
70
71 if ((class_tag == 0 && filter_out_class_untagged) ||
72 (class_tag != 0 && filter_out_class_tagged)) {
73 return false;
74 }
75
76 return true;
77 }
78
79 HeapUtil* heap_util;
80 art::ObjPtr<art::mirror::Class> filter_klass;
81 const jvmtiHeapCallbacks* callbacks;
82 const void* user_data;
83 const bool filter_out_tagged;
84 const bool filter_out_untagged;
85 const bool filter_out_class_tagged;
86 const bool filter_out_class_untagged;
87 const bool any_filter;
88
89 bool stop_reports;
90};
91
92static void IterateThroughHeapObjectCallback(art::mirror::Object* obj, void* arg)
93 REQUIRES_SHARED(art::Locks::mutator_lock_) {
94 IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
95 // Early return, as we can't really stop visiting.
96 if (ithd->stop_reports) {
97 return;
98 }
99
100 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
101
102 jlong tag = 0;
103 ithd->heap_util->GetTags()->GetTag(obj, &tag);
104
105 jlong class_tag = 0;
106 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
107 ithd->heap_util->GetTags()->GetTag(klass.Ptr(), &class_tag);
108 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
109
110 if (!ithd->ShouldReportByHeapFilter(tag, class_tag)) {
111 return;
112 }
113
114 // TODO: Handle array_primitive_value_callback.
115
116 if (ithd->filter_klass != nullptr) {
117 if (ithd->filter_klass != klass) {
118 return;
119 }
120 }
121
122 jlong size = obj->SizeOf();
123
124 jint length = -1;
125 if (obj->IsArrayInstance()) {
126 length = obj->AsArray()->GetLength();
127 }
128
129 jlong saved_tag = tag;
130 jint ret = ithd->callbacks->heap_iteration_callback(class_tag,
131 size,
132 &tag,
133 length,
134 const_cast<void*>(ithd->user_data));
135
136 if (tag != saved_tag) {
137 ithd->heap_util->GetTags()->Set(obj, tag);
138 }
139
140 ithd->stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
141
142 // TODO Implement array primitive and string primitive callback.
143 // TODO Implement primitive field callback.
144}
145
146jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env ATTRIBUTE_UNUSED,
147 jint heap_filter,
148 jclass klass,
149 const jvmtiHeapCallbacks* callbacks,
150 const void* user_data) {
151 if (callbacks == nullptr) {
152 return ERR(NULL_POINTER);
153 }
154
155 if (callbacks->array_primitive_value_callback != nullptr) {
156 // TODO: Implement.
157 return ERR(NOT_IMPLEMENTED);
158 }
159
160 art::Thread* self = art::Thread::Current();
161 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
162
163 IterateThroughHeapData ithd(this,
164 heap_filter,
165 soa.Decode<art::mirror::Class>(klass),
166 callbacks,
167 user_data);
168
169 art::Runtime::Current()->GetHeap()->VisitObjects(IterateThroughHeapObjectCallback, &ithd);
170
171 return ERR(NONE);
172}
173
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700174class FollowReferencesHelper FINAL {
175 public:
176 FollowReferencesHelper(HeapUtil* h,
177 art::ObjPtr<art::mirror::Object> initial_object ATTRIBUTE_UNUSED,
178 const jvmtiHeapCallbacks* callbacks,
179 const void* user_data)
180 : tag_table_(h->GetTags()),
181 callbacks_(callbacks),
182 user_data_(user_data),
183 start_(0),
184 stop_reports_(false) {
185 }
186
187 void Init()
188 REQUIRES_SHARED(art::Locks::mutator_lock_)
189 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
190 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
191 art::Runtime::Current()->VisitRoots(&carrv);
192 art::Runtime::Current()->VisitImageRoots(&carrv);
193 stop_reports_ = carrv.IsStopReports();
194
195 if (stop_reports_) {
196 worklist_.clear();
197 }
198 }
199
200 void Work()
201 REQUIRES_SHARED(art::Locks::mutator_lock_)
202 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
203 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
204 // from the head of the work list, instead postponing until there's a gap that's "large."
205 //
206 // Alternatively, we can implement a DFS and use the work list as a stack.
207 while (start_ < worklist_.size()) {
208 art::mirror::Object* cur_obj = worklist_[start_];
209 start_++;
210
211 if (start_ >= kMaxStart) {
212 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
213 start_ = 0;
214 }
215
216 VisitObject(cur_obj);
217
218 if (stop_reports_) {
219 break;
220 }
221 }
222 }
223
224 private:
225 class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
226 public:
227 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
228 ObjectTagTable* tag_table,
229 std::vector<art::mirror::Object*>* worklist,
230 std::unordered_set<art::mirror::Object*>* visited)
231 : helper_(helper),
232 tag_table_(tag_table),
233 worklist_(worklist),
234 visited_(visited),
235 stop_reports_(false) {}
236
237 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
238 OVERRIDE
239 REQUIRES_SHARED(art::Locks::mutator_lock_)
240 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
241 for (size_t i = 0; i != count; ++i) {
242 AddRoot(*roots[i], info);
243 }
244 }
245
246 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
247 size_t count,
248 const art::RootInfo& info)
249 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
250 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
251 for (size_t i = 0; i != count; ++i) {
252 AddRoot(roots[i]->AsMirrorPtr(), info);
253 }
254 }
255
256 bool IsStopReports() {
257 return stop_reports_;
258 }
259
260 private:
261 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
262 REQUIRES_SHARED(art::Locks::mutator_lock_)
263 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
264 // We use visited_ to mark roots already so we do not need another set.
265 if (visited_->find(root_obj) == visited_->end()) {
266 visited_->insert(root_obj);
267 worklist_->push_back(root_obj);
268 }
269 ReportRoot(root_obj, info);
270 }
271
Andreas Gampe93c30902016-11-18 13:30:30 -0800272 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
273 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
274 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
275 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
276 }
277
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700278 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
279 jvmtiHeapReferenceInfo* ref_info)
280 REQUIRES_SHARED(art::Locks::mutator_lock_) {
281 // TODO: Fill in ref_info.
282 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
283
284 switch (info.GetType()) {
285 case art::RootType::kRootJNIGlobal:
286 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
287
288 case art::RootType::kRootJNILocal:
Andreas Gampe93c30902016-11-18 13:30:30 -0800289 {
290 uint32_t thread_id = info.GetThreadId();
291 ref_info->jni_local.thread_id = thread_id;
292
293 art::Thread* thread = FindThread(info);
294 if (thread != nullptr) {
295 art::mirror::Object* thread_obj = thread->GetPeer();
296 if (thread->IsStillStarting()) {
297 thread_obj = nullptr;
298 } else {
299 thread_obj = thread->GetPeer();
300 }
301 if (thread_obj != nullptr) {
302 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
303 }
304 }
305
306 // TODO: We don't have this info.
307 if (thread != nullptr) {
308 ref_info->jni_local.depth = 0;
309 art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
310 if (method != nullptr) {
311 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
312 }
313 }
314
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700315 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
Andreas Gampe93c30902016-11-18 13:30:30 -0800316 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700317
318 case art::RootType::kRootJavaFrame:
319 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
320
321 case art::RootType::kRootNativeStack:
322 case art::RootType::kRootThreadBlock:
323 case art::RootType::kRootThreadObject:
324 return JVMTI_HEAP_REFERENCE_THREAD;
325
326 case art::RootType::kRootStickyClass:
327 case art::RootType::kRootInternedString:
328 // Note: this isn't a root in the RI.
329 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
330
331 case art::RootType::kRootMonitorUsed:
332 case art::RootType::kRootJNIMonitor:
333 return JVMTI_HEAP_REFERENCE_MONITOR;
334
335 case art::RootType::kRootFinalizing:
336 case art::RootType::kRootDebugger:
337 case art::RootType::kRootReferenceCleanup:
338 case art::RootType::kRootVMInternal:
339 case art::RootType::kRootUnknown:
340 return JVMTI_HEAP_REFERENCE_OTHER;
341 }
342 LOG(FATAL) << "Unreachable";
343 UNREACHABLE();
344 }
345
346 void ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
347 REQUIRES_SHARED(art::Locks::mutator_lock_)
348 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
349 jvmtiHeapReferenceInfo ref_info;
350 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
351 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
352 if ((result & JVMTI_VISIT_ABORT) != 0) {
353 stop_reports_ = true;
354 }
355 }
356
357 private:
358 FollowReferencesHelper* helper_;
359 ObjectTagTable* tag_table_;
360 std::vector<art::mirror::Object*>* worklist_;
361 std::unordered_set<art::mirror::Object*>* visited_;
362 bool stop_reports_;
363 };
364
365 void VisitObject(art::mirror::Object* obj)
366 REQUIRES_SHARED(art::Locks::mutator_lock_)
367 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
368 if (obj->IsClass()) {
369 VisitClass(obj->AsClass());
370 return;
371 }
372 if (obj->IsArrayInstance()) {
373 VisitArray(obj);
374 return;
375 }
376
377 // TODO: We'll probably have to rewrite this completely with our own visiting logic, if we
378 // want to have a chance of getting the field indices computed halfway efficiently. For
379 // now, ignore them altogether.
380
381 struct InstanceReferenceVisitor {
382 explicit InstanceReferenceVisitor(FollowReferencesHelper* helper_)
383 : helper(helper_), stop_reports(false) {}
384
385 void operator()(art::mirror::Object* src,
386 art::MemberOffset field_offset,
387 bool is_static ATTRIBUTE_UNUSED) const
388 REQUIRES_SHARED(art::Locks::mutator_lock_)
389 REQUIRES(!*helper->tag_table_->GetAllowDisallowLock()) {
390 if (stop_reports) {
391 return;
392 }
393
394 art::mirror::Object* trg = src->GetFieldObjectReferenceAddr(field_offset)->AsMirrorPtr();
395 jvmtiHeapReferenceInfo reference_info;
396 memset(&reference_info, 0, sizeof(reference_info));
397
398 // TODO: Implement spec-compliant numbering.
399 reference_info.field.index = field_offset.Int32Value();
400
401 jvmtiHeapReferenceKind kind =
402 field_offset.Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
403 ? JVMTI_HEAP_REFERENCE_CLASS
404 : JVMTI_HEAP_REFERENCE_FIELD;
405 const jvmtiHeapReferenceInfo* reference_info_ptr =
406 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
407
408 stop_reports = !helper->ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src, trg);
409 }
410
411 void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED)
412 const {
413 LOG(FATAL) << "Unreachable";
414 }
415 void VisitRootIfNonNull(
416 art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const {
417 LOG(FATAL) << "Unreachable";
418 }
419
420 // "mutable" required by the visitor API.
421 mutable FollowReferencesHelper* helper;
422 mutable bool stop_reports;
423 };
424
425 InstanceReferenceVisitor visitor(this);
426 // Visit references, not native roots.
427 obj->VisitReferences<false>(visitor, art::VoidFunctor());
428
429 stop_reports_ = visitor.stop_reports;
430 }
431
432 void VisitArray(art::mirror::Object* array)
433 REQUIRES_SHARED(art::Locks::mutator_lock_)
434 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
435 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
436 nullptr,
437 array,
438 array->GetClass());
439 if (stop_reports_) {
440 return;
441 }
442
443 if (array->IsObjectArray()) {
444 art::mirror::ObjectArray<art::mirror::Object>* obj_array =
445 array->AsObjectArray<art::mirror::Object>();
446 int32_t length = obj_array->GetLength();
447 for (int32_t i = 0; i != length; ++i) {
448 art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
449 if (elem != nullptr) {
450 jvmtiHeapReferenceInfo reference_info;
451 reference_info.array.index = i;
452 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
453 &reference_info,
454 array,
455 elem);
456 if (stop_reports_) {
457 break;
458 }
459 }
460 }
461 }
462 }
463
464 void VisitClass(art::mirror::Class* klass)
465 REQUIRES_SHARED(art::Locks::mutator_lock_)
466 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
467 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
468 if (!klass->IsResolved()) {
469 return;
470 }
471
472 // Superclass.
473 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
474 nullptr,
475 klass,
476 klass->GetSuperClass());
477 if (stop_reports_) {
478 return;
479 }
480
481 // Directly implemented or extended interfaces.
482 art::Thread* self = art::Thread::Current();
483 art::StackHandleScope<1> hs(self);
484 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
485 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
486 art::ObjPtr<art::mirror::Class> inf_klass =
487 art::mirror::Class::GetDirectInterface(self, h_klass, i);
488 if (inf_klass == nullptr) {
489 // TODO: With a resolved class this should not happen...
490 self->ClearException();
491 break;
492 }
493
494 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
495 nullptr,
496 klass,
497 inf_klass.Ptr());
498 if (stop_reports_) {
499 return;
500 }
501 }
502
503 // Classloader.
504 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
505 // fake BootClassLoader?
506 if (klass->GetClassLoader() != nullptr) {
507 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
508 nullptr,
509 klass,
510 klass->GetClassLoader());
511 if (stop_reports_) {
512 return;
513 }
514 }
515 DCHECK_EQ(h_klass.Get(), klass);
516
517 // Declared static fields.
518 for (auto& field : klass->GetSFields()) {
519 if (!field.IsPrimitiveType()) {
520 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(klass);
521 if (field_value != nullptr) {
522 jvmtiHeapReferenceInfo reference_info;
523 memset(&reference_info, 0, sizeof(reference_info));
524
525 // TODO: Implement spec-compliant numbering.
526 reference_info.field.index = field.GetOffset().Int32Value();
527
528 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
529 &reference_info,
530 klass,
531 field_value.Ptr());
532 if (stop_reports_) {
533 return;
534 }
535 }
536 }
537 }
538 }
539
540 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
541 if (visited_.find(obj) == visited_.end()) {
542 worklist_.push_back(obj);
543 visited_.insert(obj);
544 }
545 }
546
547 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
548 const jvmtiHeapReferenceInfo* reference_info,
549 art::mirror::Object* referree,
550 art::mirror::Object* referrer)
551 REQUIRES_SHARED(art::Locks::mutator_lock_)
552 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
553 jint result = ReportReference(kind, reference_info, referree, referrer);
554 if ((result & JVMTI_VISIT_ABORT) == 0) {
555 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
556 MaybeEnqueue(referrer);
557 }
558 return true;
559 } else {
560 return false;
561 }
562 }
563
564 jint ReportReference(jvmtiHeapReferenceKind kind,
565 const jvmtiHeapReferenceInfo* reference_info,
566 art::mirror::Object* referrer,
567 art::mirror::Object* referree)
568 REQUIRES_SHARED(art::Locks::mutator_lock_)
569 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
570 if (referree == nullptr || stop_reports_) {
571 return 0;
572 }
573
574 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
575 const jlong referrer_class_tag =
576 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
577 const jlong size = static_cast<jlong>(referree->SizeOf());
578 jlong tag = tag_table_->GetTagOrZero(referree);
579 jlong saved_tag = tag;
580 jlong referrer_tag = 0;
581 jlong saved_referrer_tag = 0;
582 jlong* referrer_tag_ptr;
583 if (referrer == nullptr) {
584 referrer_tag_ptr = nullptr;
585 } else {
586 if (referrer == referree) {
587 referrer_tag_ptr = &tag;
588 } else {
589 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
590 referrer_tag_ptr = &referrer_tag;
591 }
592 }
593 jint length = -1;
594 if (referree->IsArrayInstance()) {
595 length = referree->AsArray()->GetLength();
596 }
597
598 jint result = callbacks_->heap_reference_callback(kind,
599 reference_info,
600 class_tag,
601 referrer_class_tag,
602 size,
603 &tag,
604 referrer_tag_ptr,
605 length,
606 const_cast<void*>(user_data_));
607
608 if (tag != saved_tag) {
609 tag_table_->Set(referree, tag);
610 }
611 if (referrer_tag != saved_referrer_tag) {
612 tag_table_->Set(referrer, referrer_tag);
613 }
614
615 return result;
616 }
617
618 ObjectTagTable* tag_table_;
619 const jvmtiHeapCallbacks* callbacks_;
620 const void* user_data_;
621
622 std::vector<art::mirror::Object*> worklist_;
623 size_t start_;
624 static constexpr size_t kMaxStart = 1000000U;
625
626 std::unordered_set<art::mirror::Object*> visited_;
627
628 bool stop_reports_;
629
630 friend class CollectAndReportRootsVisitor;
631};
632
633jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env ATTRIBUTE_UNUSED,
634 jint heap_filter ATTRIBUTE_UNUSED,
635 jclass klass ATTRIBUTE_UNUSED,
636 jobject initial_object,
637 const jvmtiHeapCallbacks* callbacks,
638 const void* user_data) {
639 if (callbacks == nullptr) {
640 return ERR(NULL_POINTER);
641 }
642
643 if (callbacks->array_primitive_value_callback != nullptr) {
644 // TODO: Implement.
645 return ERR(NOT_IMPLEMENTED);
646 }
647
648 art::Thread* self = art::Thread::Current();
649 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
650
651 art::Runtime::Current()->GetHeap()->IncrementDisableMovingGC(self);
652 {
653 art::ObjPtr<art::mirror::Object> o_initial = soa.Decode<art::mirror::Object>(initial_object);
654
655 art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
656 art::ScopedSuspendAll ssa("FollowReferences");
657
658 FollowReferencesHelper frh(this, o_initial, callbacks, user_data);
659 frh.Init();
660 frh.Work();
661 }
662 art::Runtime::Current()->GetHeap()->DecrementDisableMovingGC(self);
663
664 return ERR(NONE);
665}
666
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700667jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
668 jint* class_count_ptr,
669 jclass** classes_ptr) {
670 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
671 return ERR(NULL_POINTER);
672 }
673
674 class ReportClassVisitor : public art::ClassVisitor {
675 public:
676 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
677
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700678 bool operator()(art::ObjPtr<art::mirror::Class> klass)
679 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampeef54d8d2016-10-25 09:55:53 -0700680 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700681 return true;
682 }
683
684 art::Thread* self_;
685 std::vector<jclass> classes_;
686 };
687
688 art::Thread* self = art::Thread::Current();
689 ReportClassVisitor rcv(self);
690 {
691 art::ScopedObjectAccess soa(self);
692 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
693 }
694
695 size_t size = rcv.classes_.size();
696 jclass* classes = nullptr;
697 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
698 reinterpret_cast<unsigned char**>(&classes));
699 if (alloc_ret != ERR(NONE)) {
700 return alloc_ret;
701 }
702
703 for (size_t i = 0; i < size; ++i) {
704 classes[i] = rcv.classes_[i];
705 }
706 *classes_ptr = classes;
707 *class_count_ptr = static_cast<jint>(size);
708
709 return ERR(NONE);
710}
711
Andreas Gampe8da6d032016-10-31 19:31:03 -0700712jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
713 art::Runtime::Current()->GetHeap()->CollectGarbage(false);
714
715 return ERR(NONE);
716}
Andreas Gampee54d9922016-10-11 19:55:37 -0700717} // namespace openjdkjvmti