blob: fe3e52b0c1d57b3c972c2fdcbae93400cd3bc6b8 [file] [log] [blame]
Andreas Gampee54d9922016-10-11 19:55:37 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampeba8df692016-11-01 10:30:44 -070017#include "ti_heap.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070018
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070019#include "art_field-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070020#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070023#include "class_linker.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070024#include "gc/heap.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070025#include "gc_root-inl.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070026#include "jni_env_ext.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070027#include "jni_internal.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070028#include "mirror/class.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070029#include "mirror/object-inl.h"
30#include "mirror/object_array-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070031#include "object_callbacks.h"
32#include "object_tagging.h"
33#include "obj_ptr-inl.h"
34#include "runtime.h"
35#include "scoped_thread_state_change-inl.h"
36#include "thread-inl.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070037#include "thread_list.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070038
39namespace openjdkjvmti {
40
41struct IterateThroughHeapData {
42 IterateThroughHeapData(HeapUtil* _heap_util,
43 jint heap_filter,
44 art::ObjPtr<art::mirror::Class> klass,
45 const jvmtiHeapCallbacks* _callbacks,
46 const void* _user_data)
47 : heap_util(_heap_util),
48 filter_klass(klass),
49 callbacks(_callbacks),
50 user_data(_user_data),
51 filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
52 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
53 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
54 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
55 any_filter(filter_out_tagged ||
56 filter_out_untagged ||
57 filter_out_class_tagged ||
58 filter_out_class_untagged),
59 stop_reports(false) {
60 }
61
62 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) {
63 if (!any_filter) {
64 return true;
65 }
66
67 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
68 return false;
69 }
70
71 if ((class_tag == 0 && filter_out_class_untagged) ||
72 (class_tag != 0 && filter_out_class_tagged)) {
73 return false;
74 }
75
76 return true;
77 }
78
79 HeapUtil* heap_util;
80 art::ObjPtr<art::mirror::Class> filter_klass;
81 const jvmtiHeapCallbacks* callbacks;
82 const void* user_data;
83 const bool filter_out_tagged;
84 const bool filter_out_untagged;
85 const bool filter_out_class_tagged;
86 const bool filter_out_class_untagged;
87 const bool any_filter;
88
89 bool stop_reports;
90};
91
92static void IterateThroughHeapObjectCallback(art::mirror::Object* obj, void* arg)
93 REQUIRES_SHARED(art::Locks::mutator_lock_) {
94 IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
95 // Early return, as we can't really stop visiting.
96 if (ithd->stop_reports) {
97 return;
98 }
99
100 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
101
102 jlong tag = 0;
103 ithd->heap_util->GetTags()->GetTag(obj, &tag);
104
105 jlong class_tag = 0;
106 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
107 ithd->heap_util->GetTags()->GetTag(klass.Ptr(), &class_tag);
108 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
109
110 if (!ithd->ShouldReportByHeapFilter(tag, class_tag)) {
111 return;
112 }
113
114 // TODO: Handle array_primitive_value_callback.
115
116 if (ithd->filter_klass != nullptr) {
117 if (ithd->filter_klass != klass) {
118 return;
119 }
120 }
121
122 jlong size = obj->SizeOf();
123
124 jint length = -1;
125 if (obj->IsArrayInstance()) {
126 length = obj->AsArray()->GetLength();
127 }
128
129 jlong saved_tag = tag;
130 jint ret = ithd->callbacks->heap_iteration_callback(class_tag,
131 size,
132 &tag,
133 length,
134 const_cast<void*>(ithd->user_data));
135
136 if (tag != saved_tag) {
137 ithd->heap_util->GetTags()->Set(obj, tag);
138 }
139
140 ithd->stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
141
142 // TODO Implement array primitive and string primitive callback.
143 // TODO Implement primitive field callback.
144}
145
146jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env ATTRIBUTE_UNUSED,
147 jint heap_filter,
148 jclass klass,
149 const jvmtiHeapCallbacks* callbacks,
150 const void* user_data) {
151 if (callbacks == nullptr) {
152 return ERR(NULL_POINTER);
153 }
154
155 if (callbacks->array_primitive_value_callback != nullptr) {
156 // TODO: Implement.
157 return ERR(NOT_IMPLEMENTED);
158 }
159
160 art::Thread* self = art::Thread::Current();
161 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
162
163 IterateThroughHeapData ithd(this,
164 heap_filter,
165 soa.Decode<art::mirror::Class>(klass),
166 callbacks,
167 user_data);
168
169 art::Runtime::Current()->GetHeap()->VisitObjects(IterateThroughHeapObjectCallback, &ithd);
170
171 return ERR(NONE);
172}
173
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700174class FollowReferencesHelper FINAL {
175 public:
176 FollowReferencesHelper(HeapUtil* h,
Andreas Gampe638a6932016-12-02 19:11:17 -0800177 art::ObjPtr<art::mirror::Object> initial_object,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700178 const jvmtiHeapCallbacks* callbacks,
179 const void* user_data)
180 : tag_table_(h->GetTags()),
Andreas Gampe638a6932016-12-02 19:11:17 -0800181 initial_object_(initial_object),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700182 callbacks_(callbacks),
183 user_data_(user_data),
184 start_(0),
185 stop_reports_(false) {
186 }
187
188 void Init()
189 REQUIRES_SHARED(art::Locks::mutator_lock_)
190 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
Andreas Gampe638a6932016-12-02 19:11:17 -0800191 if (initial_object_.IsNull()) {
192 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800193
194 // We need precise info (e.g., vregs).
195 constexpr art::VisitRootFlags kRootFlags = static_cast<art::VisitRootFlags>(
196 art::VisitRootFlags::kVisitRootFlagAllRoots | art::VisitRootFlags::kVisitRootFlagPrecise);
197 art::Runtime::Current()->VisitRoots(&carrv, kRootFlags);
198
Andreas Gampe638a6932016-12-02 19:11:17 -0800199 art::Runtime::Current()->VisitImageRoots(&carrv);
200 stop_reports_ = carrv.IsStopReports();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700201
Andreas Gampe638a6932016-12-02 19:11:17 -0800202 if (stop_reports_) {
203 worklist_.clear();
204 }
205 } else {
206 visited_.insert(initial_object_.Ptr());
207 worklist_.push_back(initial_object_.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700208 }
209 }
210
211 void Work()
212 REQUIRES_SHARED(art::Locks::mutator_lock_)
213 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
214 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
215 // from the head of the work list, instead postponing until there's a gap that's "large."
216 //
217 // Alternatively, we can implement a DFS and use the work list as a stack.
218 while (start_ < worklist_.size()) {
219 art::mirror::Object* cur_obj = worklist_[start_];
220 start_++;
221
222 if (start_ >= kMaxStart) {
223 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
224 start_ = 0;
225 }
226
227 VisitObject(cur_obj);
228
229 if (stop_reports_) {
230 break;
231 }
232 }
233 }
234
235 private:
236 class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
237 public:
238 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
239 ObjectTagTable* tag_table,
240 std::vector<art::mirror::Object*>* worklist,
241 std::unordered_set<art::mirror::Object*>* visited)
242 : helper_(helper),
243 tag_table_(tag_table),
244 worklist_(worklist),
245 visited_(visited),
246 stop_reports_(false) {}
247
248 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
249 OVERRIDE
250 REQUIRES_SHARED(art::Locks::mutator_lock_)
251 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
252 for (size_t i = 0; i != count; ++i) {
253 AddRoot(*roots[i], info);
254 }
255 }
256
257 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
258 size_t count,
259 const art::RootInfo& info)
260 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
261 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
262 for (size_t i = 0; i != count; ++i) {
263 AddRoot(roots[i]->AsMirrorPtr(), info);
264 }
265 }
266
267 bool IsStopReports() {
268 return stop_reports_;
269 }
270
271 private:
272 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
273 REQUIRES_SHARED(art::Locks::mutator_lock_)
274 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
275 // We use visited_ to mark roots already so we do not need another set.
276 if (visited_->find(root_obj) == visited_->end()) {
277 visited_->insert(root_obj);
278 worklist_->push_back(root_obj);
279 }
280 ReportRoot(root_obj, info);
281 }
282
Andreas Gampe93c30902016-11-18 13:30:30 -0800283 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
284 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
285 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
286 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
287 }
288
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700289 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
290 jvmtiHeapReferenceInfo* ref_info)
291 REQUIRES_SHARED(art::Locks::mutator_lock_) {
292 // TODO: Fill in ref_info.
293 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
294
295 switch (info.GetType()) {
296 case art::RootType::kRootJNIGlobal:
297 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
298
299 case art::RootType::kRootJNILocal:
Andreas Gampe93c30902016-11-18 13:30:30 -0800300 {
301 uint32_t thread_id = info.GetThreadId();
302 ref_info->jni_local.thread_id = thread_id;
303
304 art::Thread* thread = FindThread(info);
305 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800306 art::mirror::Object* thread_obj;
Andreas Gampe93c30902016-11-18 13:30:30 -0800307 if (thread->IsStillStarting()) {
308 thread_obj = nullptr;
309 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800310 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampe93c30902016-11-18 13:30:30 -0800311 }
312 if (thread_obj != nullptr) {
313 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
314 }
315 }
316
317 // TODO: We don't have this info.
318 if (thread != nullptr) {
319 ref_info->jni_local.depth = 0;
320 art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
321 if (method != nullptr) {
322 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
323 }
324 }
325
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700326 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
Andreas Gampe93c30902016-11-18 13:30:30 -0800327 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700328
329 case art::RootType::kRootJavaFrame:
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800330 {
331 uint32_t thread_id = info.GetThreadId();
332 ref_info->stack_local.thread_id = thread_id;
333
334 art::Thread* thread = FindThread(info);
335 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800336 art::mirror::Object* thread_obj;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800337 if (thread->IsStillStarting()) {
338 thread_obj = nullptr;
339 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800340 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800341 }
342 if (thread_obj != nullptr) {
343 ref_info->stack_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
344 }
345 }
346
347 auto& java_info = static_cast<const art::JavaFrameRootInfo&>(info);
348 ref_info->stack_local.slot = static_cast<jint>(java_info.GetVReg());
349 const art::StackVisitor* visitor = java_info.GetVisitor();
350 ref_info->stack_local.location =
351 static_cast<jlocation>(visitor->GetDexPc(false /* abort_on_failure */));
352 ref_info->stack_local.depth = static_cast<jint>(visitor->GetFrameDepth());
353 art::ArtMethod* method = visitor->GetMethod();
354 if (method != nullptr) {
355 ref_info->stack_local.method = art::jni::EncodeArtMethod(method);
356 }
357
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700358 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800359 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700360
361 case art::RootType::kRootNativeStack:
362 case art::RootType::kRootThreadBlock:
363 case art::RootType::kRootThreadObject:
364 return JVMTI_HEAP_REFERENCE_THREAD;
365
366 case art::RootType::kRootStickyClass:
367 case art::RootType::kRootInternedString:
368 // Note: this isn't a root in the RI.
369 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
370
371 case art::RootType::kRootMonitorUsed:
372 case art::RootType::kRootJNIMonitor:
373 return JVMTI_HEAP_REFERENCE_MONITOR;
374
375 case art::RootType::kRootFinalizing:
376 case art::RootType::kRootDebugger:
377 case art::RootType::kRootReferenceCleanup:
378 case art::RootType::kRootVMInternal:
379 case art::RootType::kRootUnknown:
380 return JVMTI_HEAP_REFERENCE_OTHER;
381 }
382 LOG(FATAL) << "Unreachable";
383 UNREACHABLE();
384 }
385
386 void ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
387 REQUIRES_SHARED(art::Locks::mutator_lock_)
388 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
389 jvmtiHeapReferenceInfo ref_info;
390 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
391 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
392 if ((result & JVMTI_VISIT_ABORT) != 0) {
393 stop_reports_ = true;
394 }
395 }
396
397 private:
398 FollowReferencesHelper* helper_;
399 ObjectTagTable* tag_table_;
400 std::vector<art::mirror::Object*>* worklist_;
401 std::unordered_set<art::mirror::Object*>* visited_;
402 bool stop_reports_;
403 };
404
405 void VisitObject(art::mirror::Object* obj)
406 REQUIRES_SHARED(art::Locks::mutator_lock_)
407 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
408 if (obj->IsClass()) {
409 VisitClass(obj->AsClass());
410 return;
411 }
412 if (obj->IsArrayInstance()) {
413 VisitArray(obj);
414 return;
415 }
416
417 // TODO: We'll probably have to rewrite this completely with our own visiting logic, if we
418 // want to have a chance of getting the field indices computed halfway efficiently. For
419 // now, ignore them altogether.
420
421 struct InstanceReferenceVisitor {
422 explicit InstanceReferenceVisitor(FollowReferencesHelper* helper_)
423 : helper(helper_), stop_reports(false) {}
424
425 void operator()(art::mirror::Object* src,
426 art::MemberOffset field_offset,
427 bool is_static ATTRIBUTE_UNUSED) const
428 REQUIRES_SHARED(art::Locks::mutator_lock_)
429 REQUIRES(!*helper->tag_table_->GetAllowDisallowLock()) {
430 if (stop_reports) {
431 return;
432 }
433
434 art::mirror::Object* trg = src->GetFieldObjectReferenceAddr(field_offset)->AsMirrorPtr();
435 jvmtiHeapReferenceInfo reference_info;
436 memset(&reference_info, 0, sizeof(reference_info));
437
438 // TODO: Implement spec-compliant numbering.
439 reference_info.field.index = field_offset.Int32Value();
440
441 jvmtiHeapReferenceKind kind =
442 field_offset.Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
443 ? JVMTI_HEAP_REFERENCE_CLASS
444 : JVMTI_HEAP_REFERENCE_FIELD;
445 const jvmtiHeapReferenceInfo* reference_info_ptr =
446 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
447
448 stop_reports = !helper->ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src, trg);
449 }
450
451 void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED)
452 const {
453 LOG(FATAL) << "Unreachable";
454 }
455 void VisitRootIfNonNull(
456 art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const {
457 LOG(FATAL) << "Unreachable";
458 }
459
460 // "mutable" required by the visitor API.
461 mutable FollowReferencesHelper* helper;
462 mutable bool stop_reports;
463 };
464
465 InstanceReferenceVisitor visitor(this);
466 // Visit references, not native roots.
467 obj->VisitReferences<false>(visitor, art::VoidFunctor());
468
469 stop_reports_ = visitor.stop_reports;
470 }
471
472 void VisitArray(art::mirror::Object* array)
473 REQUIRES_SHARED(art::Locks::mutator_lock_)
474 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
475 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
476 nullptr,
477 array,
478 array->GetClass());
479 if (stop_reports_) {
480 return;
481 }
482
483 if (array->IsObjectArray()) {
484 art::mirror::ObjectArray<art::mirror::Object>* obj_array =
485 array->AsObjectArray<art::mirror::Object>();
486 int32_t length = obj_array->GetLength();
487 for (int32_t i = 0; i != length; ++i) {
488 art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
489 if (elem != nullptr) {
490 jvmtiHeapReferenceInfo reference_info;
491 reference_info.array.index = i;
492 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
493 &reference_info,
494 array,
495 elem);
496 if (stop_reports_) {
497 break;
498 }
499 }
500 }
501 }
502 }
503
504 void VisitClass(art::mirror::Class* klass)
505 REQUIRES_SHARED(art::Locks::mutator_lock_)
506 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
507 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
508 if (!klass->IsResolved()) {
509 return;
510 }
511
512 // Superclass.
513 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
514 nullptr,
515 klass,
516 klass->GetSuperClass());
517 if (stop_reports_) {
518 return;
519 }
520
521 // Directly implemented or extended interfaces.
522 art::Thread* self = art::Thread::Current();
523 art::StackHandleScope<1> hs(self);
524 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
525 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
526 art::ObjPtr<art::mirror::Class> inf_klass =
Vladimir Marko19a4d372016-12-08 14:41:46 +0000527 art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700528 if (inf_klass == nullptr) {
529 // TODO: With a resolved class this should not happen...
530 self->ClearException();
531 break;
532 }
533
534 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
535 nullptr,
536 klass,
537 inf_klass.Ptr());
538 if (stop_reports_) {
539 return;
540 }
541 }
542
543 // Classloader.
544 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
545 // fake BootClassLoader?
546 if (klass->GetClassLoader() != nullptr) {
547 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
548 nullptr,
549 klass,
550 klass->GetClassLoader());
551 if (stop_reports_) {
552 return;
553 }
554 }
555 DCHECK_EQ(h_klass.Get(), klass);
556
557 // Declared static fields.
558 for (auto& field : klass->GetSFields()) {
559 if (!field.IsPrimitiveType()) {
560 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(klass);
561 if (field_value != nullptr) {
562 jvmtiHeapReferenceInfo reference_info;
563 memset(&reference_info, 0, sizeof(reference_info));
564
565 // TODO: Implement spec-compliant numbering.
566 reference_info.field.index = field.GetOffset().Int32Value();
567
568 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
569 &reference_info,
570 klass,
571 field_value.Ptr());
572 if (stop_reports_) {
573 return;
574 }
575 }
576 }
577 }
578 }
579
580 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
581 if (visited_.find(obj) == visited_.end()) {
582 worklist_.push_back(obj);
583 visited_.insert(obj);
584 }
585 }
586
587 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
588 const jvmtiHeapReferenceInfo* reference_info,
589 art::mirror::Object* referree,
590 art::mirror::Object* referrer)
591 REQUIRES_SHARED(art::Locks::mutator_lock_)
592 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
593 jint result = ReportReference(kind, reference_info, referree, referrer);
594 if ((result & JVMTI_VISIT_ABORT) == 0) {
595 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
596 MaybeEnqueue(referrer);
597 }
598 return true;
599 } else {
600 return false;
601 }
602 }
603
604 jint ReportReference(jvmtiHeapReferenceKind kind,
605 const jvmtiHeapReferenceInfo* reference_info,
606 art::mirror::Object* referrer,
607 art::mirror::Object* referree)
608 REQUIRES_SHARED(art::Locks::mutator_lock_)
609 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
610 if (referree == nullptr || stop_reports_) {
611 return 0;
612 }
613
614 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
615 const jlong referrer_class_tag =
616 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
617 const jlong size = static_cast<jlong>(referree->SizeOf());
618 jlong tag = tag_table_->GetTagOrZero(referree);
619 jlong saved_tag = tag;
620 jlong referrer_tag = 0;
621 jlong saved_referrer_tag = 0;
622 jlong* referrer_tag_ptr;
623 if (referrer == nullptr) {
624 referrer_tag_ptr = nullptr;
625 } else {
626 if (referrer == referree) {
627 referrer_tag_ptr = &tag;
628 } else {
629 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
630 referrer_tag_ptr = &referrer_tag;
631 }
632 }
633 jint length = -1;
634 if (referree->IsArrayInstance()) {
635 length = referree->AsArray()->GetLength();
636 }
637
638 jint result = callbacks_->heap_reference_callback(kind,
639 reference_info,
640 class_tag,
641 referrer_class_tag,
642 size,
643 &tag,
644 referrer_tag_ptr,
645 length,
646 const_cast<void*>(user_data_));
647
648 if (tag != saved_tag) {
649 tag_table_->Set(referree, tag);
650 }
651 if (referrer_tag != saved_referrer_tag) {
652 tag_table_->Set(referrer, referrer_tag);
653 }
654
655 return result;
656 }
657
658 ObjectTagTable* tag_table_;
Andreas Gampe638a6932016-12-02 19:11:17 -0800659 art::ObjPtr<art::mirror::Object> initial_object_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700660 const jvmtiHeapCallbacks* callbacks_;
661 const void* user_data_;
662
663 std::vector<art::mirror::Object*> worklist_;
664 size_t start_;
665 static constexpr size_t kMaxStart = 1000000U;
666
667 std::unordered_set<art::mirror::Object*> visited_;
668
669 bool stop_reports_;
670
671 friend class CollectAndReportRootsVisitor;
672};
673
674jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env ATTRIBUTE_UNUSED,
675 jint heap_filter ATTRIBUTE_UNUSED,
676 jclass klass ATTRIBUTE_UNUSED,
677 jobject initial_object,
678 const jvmtiHeapCallbacks* callbacks,
679 const void* user_data) {
680 if (callbacks == nullptr) {
681 return ERR(NULL_POINTER);
682 }
683
684 if (callbacks->array_primitive_value_callback != nullptr) {
685 // TODO: Implement.
686 return ERR(NOT_IMPLEMENTED);
687 }
688
689 art::Thread* self = art::Thread::Current();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700690
Andreas Gampe638a6932016-12-02 19:11:17 -0800691 art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
692 if (heap->IsGcConcurrentAndMoving()) {
693 // Need to take a heap dump while GC isn't running. See the
694 // comment in Heap::VisitObjects().
695 heap->IncrementDisableMovingGC(self);
696 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700697 {
Andreas Gampe638a6932016-12-02 19:11:17 -0800698 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700699 art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
700 art::ScopedSuspendAll ssa("FollowReferences");
701
Andreas Gampe638a6932016-12-02 19:11:17 -0800702 FollowReferencesHelper frh(this,
703 self->DecodeJObject(initial_object),
704 callbacks,
705 user_data);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700706 frh.Init();
707 frh.Work();
708 }
Andreas Gampe638a6932016-12-02 19:11:17 -0800709 if (heap->IsGcConcurrentAndMoving()) {
710 heap->DecrementDisableMovingGC(self);
711 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700712
713 return ERR(NONE);
714}
715
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700716jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
717 jint* class_count_ptr,
718 jclass** classes_ptr) {
719 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
720 return ERR(NULL_POINTER);
721 }
722
723 class ReportClassVisitor : public art::ClassVisitor {
724 public:
725 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
726
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700727 bool operator()(art::ObjPtr<art::mirror::Class> klass)
728 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampeef54d8d2016-10-25 09:55:53 -0700729 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700730 return true;
731 }
732
733 art::Thread* self_;
734 std::vector<jclass> classes_;
735 };
736
737 art::Thread* self = art::Thread::Current();
738 ReportClassVisitor rcv(self);
739 {
740 art::ScopedObjectAccess soa(self);
741 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
742 }
743
744 size_t size = rcv.classes_.size();
745 jclass* classes = nullptr;
746 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
747 reinterpret_cast<unsigned char**>(&classes));
748 if (alloc_ret != ERR(NONE)) {
749 return alloc_ret;
750 }
751
752 for (size_t i = 0; i < size; ++i) {
753 classes[i] = rcv.classes_[i];
754 }
755 *classes_ptr = classes;
756 *class_count_ptr = static_cast<jint>(size);
757
758 return ERR(NONE);
759}
760
Andreas Gampe8da6d032016-10-31 19:31:03 -0700761jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
762 art::Runtime::Current()->GetHeap()->CollectGarbage(false);
763
764 return ERR(NONE);
765}
Andreas Gampee54d9922016-10-11 19:55:37 -0700766} // namespace openjdkjvmti