blob: 7efeea7bbd47d1194ca507a49727561110bde713 [file] [log] [blame]
Andreas Gampee54d9922016-10-11 19:55:37 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampeba8df692016-11-01 10:30:44 -070017#include "ti_heap.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070018
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070019#include "art_field-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070020#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070023#include "class_linker.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070024#include "gc/heap.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070025#include "gc_root-inl.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070026#include "jni_env_ext.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070027#include "jni_internal.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070028#include "mirror/class.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070029#include "mirror/object-inl.h"
30#include "mirror/object_array-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070031#include "object_callbacks.h"
32#include "object_tagging.h"
33#include "obj_ptr-inl.h"
34#include "runtime.h"
35#include "scoped_thread_state_change-inl.h"
36#include "thread-inl.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070037#include "thread_list.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070038
39namespace openjdkjvmti {
40
Andreas Gampe3ec8e402017-02-21 15:49:53 -080041namespace {
42
43// Report the contents of a string, if a callback is set.
44jint ReportString(art::ObjPtr<art::mirror::Object> obj,
45 jvmtiEnv* env,
46 ObjectTagTable* tag_table,
47 const jvmtiHeapCallbacks* cb,
48 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
49 if (UNLIKELY(cb->string_primitive_value_callback != nullptr) && obj->IsString()) {
50 art::ObjPtr<art::mirror::String> str = obj->AsString();
51 int32_t string_length = str->GetLength();
52 jvmtiError alloc_error;
53 JvmtiUniquePtr<uint16_t[]> data = AllocJvmtiUniquePtr<uint16_t[]>(env,
54 string_length,
55 &alloc_error);
56 if (data == nullptr) {
57 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
58 // back? For now just warn.
59 LOG(WARNING) << "Unable to allocate buffer for string reporting! Silently dropping value.";
60 return 0;
61 }
62
63 if (str->IsCompressed()) {
64 uint8_t* compressed_data = str->GetValueCompressed();
65 for (int32_t i = 0; i != string_length; ++i) {
66 data[i] = compressed_data[i];
67 }
68 } else {
69 // Can copy directly.
70 memcpy(data.get(), str->GetValue(), string_length * sizeof(uint16_t));
71 }
72
73 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
74 jlong string_tag = tag_table->GetTagOrZero(obj.Ptr());
75 const jlong saved_string_tag = string_tag;
76
77 jint result = cb->string_primitive_value_callback(class_tag,
78 obj->SizeOf(),
79 &string_tag,
80 data.get(),
81 string_length,
82 const_cast<void*>(user_data));
83 if (string_tag != saved_string_tag) {
84 tag_table->Set(obj.Ptr(), string_tag);
85 }
86
87 return result;
88 }
89 return 0;
90}
91
92} // namespace
93
Andreas Gampee54d9922016-10-11 19:55:37 -070094struct IterateThroughHeapData {
95 IterateThroughHeapData(HeapUtil* _heap_util,
Andreas Gampe3ec8e402017-02-21 15:49:53 -080096 jvmtiEnv* _env,
Andreas Gampee54d9922016-10-11 19:55:37 -070097 jint heap_filter,
98 art::ObjPtr<art::mirror::Class> klass,
99 const jvmtiHeapCallbacks* _callbacks,
100 const void* _user_data)
101 : heap_util(_heap_util),
102 filter_klass(klass),
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800103 env(_env),
Andreas Gampee54d9922016-10-11 19:55:37 -0700104 callbacks(_callbacks),
105 user_data(_user_data),
106 filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
107 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
108 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
109 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
110 any_filter(filter_out_tagged ||
111 filter_out_untagged ||
112 filter_out_class_tagged ||
113 filter_out_class_untagged),
114 stop_reports(false) {
115 }
116
117 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) {
118 if (!any_filter) {
119 return true;
120 }
121
122 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
123 return false;
124 }
125
126 if ((class_tag == 0 && filter_out_class_untagged) ||
127 (class_tag != 0 && filter_out_class_tagged)) {
128 return false;
129 }
130
131 return true;
132 }
133
134 HeapUtil* heap_util;
135 art::ObjPtr<art::mirror::Class> filter_klass;
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800136 jvmtiEnv* env;
Andreas Gampee54d9922016-10-11 19:55:37 -0700137 const jvmtiHeapCallbacks* callbacks;
138 const void* user_data;
139 const bool filter_out_tagged;
140 const bool filter_out_untagged;
141 const bool filter_out_class_tagged;
142 const bool filter_out_class_untagged;
143 const bool any_filter;
144
145 bool stop_reports;
146};
147
148static void IterateThroughHeapObjectCallback(art::mirror::Object* obj, void* arg)
149 REQUIRES_SHARED(art::Locks::mutator_lock_) {
150 IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
151 // Early return, as we can't really stop visiting.
152 if (ithd->stop_reports) {
153 return;
154 }
155
156 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
157
158 jlong tag = 0;
159 ithd->heap_util->GetTags()->GetTag(obj, &tag);
160
161 jlong class_tag = 0;
162 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
163 ithd->heap_util->GetTags()->GetTag(klass.Ptr(), &class_tag);
164 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
165
166 if (!ithd->ShouldReportByHeapFilter(tag, class_tag)) {
167 return;
168 }
169
Andreas Gampee54d9922016-10-11 19:55:37 -0700170 if (ithd->filter_klass != nullptr) {
171 if (ithd->filter_klass != klass) {
172 return;
173 }
174 }
175
176 jlong size = obj->SizeOf();
177
178 jint length = -1;
179 if (obj->IsArrayInstance()) {
180 length = obj->AsArray()->GetLength();
181 }
182
183 jlong saved_tag = tag;
184 jint ret = ithd->callbacks->heap_iteration_callback(class_tag,
185 size,
186 &tag,
187 length,
188 const_cast<void*>(ithd->user_data));
189
190 if (tag != saved_tag) {
191 ithd->heap_util->GetTags()->Set(obj, tag);
192 }
193
194 ithd->stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
195
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800196 if (!ithd->stop_reports) {
197 jint string_ret = ReportString(obj,
198 ithd->env,
199 ithd->heap_util->GetTags(),
200 ithd->callbacks,
201 ithd->user_data);
202 ithd->stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
203 }
204
205 // TODO Implement array primitive callback.
Andreas Gampee54d9922016-10-11 19:55:37 -0700206 // TODO Implement primitive field callback.
207}
208
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800209jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
Andreas Gampee54d9922016-10-11 19:55:37 -0700210 jint heap_filter,
211 jclass klass,
212 const jvmtiHeapCallbacks* callbacks,
213 const void* user_data) {
214 if (callbacks == nullptr) {
215 return ERR(NULL_POINTER);
216 }
217
218 if (callbacks->array_primitive_value_callback != nullptr) {
219 // TODO: Implement.
220 return ERR(NOT_IMPLEMENTED);
221 }
222
223 art::Thread* self = art::Thread::Current();
224 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
225
226 IterateThroughHeapData ithd(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800227 env,
Andreas Gampee54d9922016-10-11 19:55:37 -0700228 heap_filter,
229 soa.Decode<art::mirror::Class>(klass),
230 callbacks,
231 user_data);
232
233 art::Runtime::Current()->GetHeap()->VisitObjects(IterateThroughHeapObjectCallback, &ithd);
234
235 return ERR(NONE);
236}
237
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700238class FollowReferencesHelper FINAL {
239 public:
240 FollowReferencesHelper(HeapUtil* h,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800241 jvmtiEnv* jvmti_env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800242 art::ObjPtr<art::mirror::Object> initial_object,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700243 const jvmtiHeapCallbacks* callbacks,
244 const void* user_data)
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800245 : env(jvmti_env),
246 tag_table_(h->GetTags()),
Andreas Gampe638a6932016-12-02 19:11:17 -0800247 initial_object_(initial_object),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700248 callbacks_(callbacks),
249 user_data_(user_data),
250 start_(0),
251 stop_reports_(false) {
252 }
253
254 void Init()
255 REQUIRES_SHARED(art::Locks::mutator_lock_)
256 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
Andreas Gampe638a6932016-12-02 19:11:17 -0800257 if (initial_object_.IsNull()) {
258 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800259
260 // We need precise info (e.g., vregs).
261 constexpr art::VisitRootFlags kRootFlags = static_cast<art::VisitRootFlags>(
262 art::VisitRootFlags::kVisitRootFlagAllRoots | art::VisitRootFlags::kVisitRootFlagPrecise);
263 art::Runtime::Current()->VisitRoots(&carrv, kRootFlags);
264
Andreas Gampe638a6932016-12-02 19:11:17 -0800265 art::Runtime::Current()->VisitImageRoots(&carrv);
266 stop_reports_ = carrv.IsStopReports();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700267
Andreas Gampe638a6932016-12-02 19:11:17 -0800268 if (stop_reports_) {
269 worklist_.clear();
270 }
271 } else {
272 visited_.insert(initial_object_.Ptr());
273 worklist_.push_back(initial_object_.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700274 }
275 }
276
277 void Work()
278 REQUIRES_SHARED(art::Locks::mutator_lock_)
279 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
280 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
281 // from the head of the work list, instead postponing until there's a gap that's "large."
282 //
283 // Alternatively, we can implement a DFS and use the work list as a stack.
284 while (start_ < worklist_.size()) {
285 art::mirror::Object* cur_obj = worklist_[start_];
286 start_++;
287
288 if (start_ >= kMaxStart) {
289 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
290 start_ = 0;
291 }
292
293 VisitObject(cur_obj);
294
295 if (stop_reports_) {
296 break;
297 }
298 }
299 }
300
301 private:
302 class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
303 public:
304 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
305 ObjectTagTable* tag_table,
306 std::vector<art::mirror::Object*>* worklist,
307 std::unordered_set<art::mirror::Object*>* visited)
308 : helper_(helper),
309 tag_table_(tag_table),
310 worklist_(worklist),
311 visited_(visited),
312 stop_reports_(false) {}
313
314 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
315 OVERRIDE
316 REQUIRES_SHARED(art::Locks::mutator_lock_)
317 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
318 for (size_t i = 0; i != count; ++i) {
319 AddRoot(*roots[i], info);
320 }
321 }
322
323 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
324 size_t count,
325 const art::RootInfo& info)
326 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
327 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
328 for (size_t i = 0; i != count; ++i) {
329 AddRoot(roots[i]->AsMirrorPtr(), info);
330 }
331 }
332
333 bool IsStopReports() {
334 return stop_reports_;
335 }
336
337 private:
338 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
339 REQUIRES_SHARED(art::Locks::mutator_lock_)
340 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
341 // We use visited_ to mark roots already so we do not need another set.
342 if (visited_->find(root_obj) == visited_->end()) {
343 visited_->insert(root_obj);
344 worklist_->push_back(root_obj);
345 }
346 ReportRoot(root_obj, info);
347 }
348
Andreas Gampe93c30902016-11-18 13:30:30 -0800349 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
350 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
351 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
352 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
353 }
354
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700355 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
356 jvmtiHeapReferenceInfo* ref_info)
357 REQUIRES_SHARED(art::Locks::mutator_lock_) {
358 // TODO: Fill in ref_info.
359 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
360
361 switch (info.GetType()) {
362 case art::RootType::kRootJNIGlobal:
363 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
364
365 case art::RootType::kRootJNILocal:
Andreas Gampe93c30902016-11-18 13:30:30 -0800366 {
367 uint32_t thread_id = info.GetThreadId();
368 ref_info->jni_local.thread_id = thread_id;
369
370 art::Thread* thread = FindThread(info);
371 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800372 art::mirror::Object* thread_obj;
Andreas Gampe93c30902016-11-18 13:30:30 -0800373 if (thread->IsStillStarting()) {
374 thread_obj = nullptr;
375 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800376 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampe93c30902016-11-18 13:30:30 -0800377 }
378 if (thread_obj != nullptr) {
379 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
380 }
381 }
382
383 // TODO: We don't have this info.
384 if (thread != nullptr) {
385 ref_info->jni_local.depth = 0;
386 art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
387 if (method != nullptr) {
388 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
389 }
390 }
391
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700392 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
Andreas Gampe93c30902016-11-18 13:30:30 -0800393 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700394
395 case art::RootType::kRootJavaFrame:
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800396 {
397 uint32_t thread_id = info.GetThreadId();
398 ref_info->stack_local.thread_id = thread_id;
399
400 art::Thread* thread = FindThread(info);
401 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800402 art::mirror::Object* thread_obj;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800403 if (thread->IsStillStarting()) {
404 thread_obj = nullptr;
405 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800406 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800407 }
408 if (thread_obj != nullptr) {
409 ref_info->stack_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
410 }
411 }
412
413 auto& java_info = static_cast<const art::JavaFrameRootInfo&>(info);
414 ref_info->stack_local.slot = static_cast<jint>(java_info.GetVReg());
415 const art::StackVisitor* visitor = java_info.GetVisitor();
416 ref_info->stack_local.location =
417 static_cast<jlocation>(visitor->GetDexPc(false /* abort_on_failure */));
418 ref_info->stack_local.depth = static_cast<jint>(visitor->GetFrameDepth());
419 art::ArtMethod* method = visitor->GetMethod();
420 if (method != nullptr) {
421 ref_info->stack_local.method = art::jni::EncodeArtMethod(method);
422 }
423
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700424 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800425 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700426
427 case art::RootType::kRootNativeStack:
428 case art::RootType::kRootThreadBlock:
429 case art::RootType::kRootThreadObject:
430 return JVMTI_HEAP_REFERENCE_THREAD;
431
432 case art::RootType::kRootStickyClass:
433 case art::RootType::kRootInternedString:
434 // Note: this isn't a root in the RI.
435 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
436
437 case art::RootType::kRootMonitorUsed:
438 case art::RootType::kRootJNIMonitor:
439 return JVMTI_HEAP_REFERENCE_MONITOR;
440
441 case art::RootType::kRootFinalizing:
442 case art::RootType::kRootDebugger:
443 case art::RootType::kRootReferenceCleanup:
444 case art::RootType::kRootVMInternal:
445 case art::RootType::kRootUnknown:
446 return JVMTI_HEAP_REFERENCE_OTHER;
447 }
448 LOG(FATAL) << "Unreachable";
449 UNREACHABLE();
450 }
451
452 void ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
453 REQUIRES_SHARED(art::Locks::mutator_lock_)
454 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
455 jvmtiHeapReferenceInfo ref_info;
456 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
457 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
458 if ((result & JVMTI_VISIT_ABORT) != 0) {
459 stop_reports_ = true;
460 }
461 }
462
463 private:
464 FollowReferencesHelper* helper_;
465 ObjectTagTable* tag_table_;
466 std::vector<art::mirror::Object*>* worklist_;
467 std::unordered_set<art::mirror::Object*>* visited_;
468 bool stop_reports_;
469 };
470
471 void VisitObject(art::mirror::Object* obj)
472 REQUIRES_SHARED(art::Locks::mutator_lock_)
473 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
474 if (obj->IsClass()) {
475 VisitClass(obj->AsClass());
476 return;
477 }
478 if (obj->IsArrayInstance()) {
479 VisitArray(obj);
480 return;
481 }
482
483 // TODO: We'll probably have to rewrite this completely with our own visiting logic, if we
484 // want to have a chance of getting the field indices computed halfway efficiently. For
485 // now, ignore them altogether.
486
487 struct InstanceReferenceVisitor {
488 explicit InstanceReferenceVisitor(FollowReferencesHelper* helper_)
489 : helper(helper_), stop_reports(false) {}
490
491 void operator()(art::mirror::Object* src,
492 art::MemberOffset field_offset,
493 bool is_static ATTRIBUTE_UNUSED) const
494 REQUIRES_SHARED(art::Locks::mutator_lock_)
495 REQUIRES(!*helper->tag_table_->GetAllowDisallowLock()) {
496 if (stop_reports) {
497 return;
498 }
499
500 art::mirror::Object* trg = src->GetFieldObjectReferenceAddr(field_offset)->AsMirrorPtr();
501 jvmtiHeapReferenceInfo reference_info;
502 memset(&reference_info, 0, sizeof(reference_info));
503
504 // TODO: Implement spec-compliant numbering.
505 reference_info.field.index = field_offset.Int32Value();
506
507 jvmtiHeapReferenceKind kind =
508 field_offset.Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
509 ? JVMTI_HEAP_REFERENCE_CLASS
510 : JVMTI_HEAP_REFERENCE_FIELD;
511 const jvmtiHeapReferenceInfo* reference_info_ptr =
512 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
513
514 stop_reports = !helper->ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src, trg);
515 }
516
517 void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED)
518 const {
519 LOG(FATAL) << "Unreachable";
520 }
521 void VisitRootIfNonNull(
522 art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const {
523 LOG(FATAL) << "Unreachable";
524 }
525
526 // "mutable" required by the visitor API.
527 mutable FollowReferencesHelper* helper;
528 mutable bool stop_reports;
529 };
530
531 InstanceReferenceVisitor visitor(this);
532 // Visit references, not native roots.
533 obj->VisitReferences<false>(visitor, art::VoidFunctor());
534
535 stop_reports_ = visitor.stop_reports;
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800536
537 if (!stop_reports_) {
538 jint string_ret = ReportString(obj, env, tag_table_, callbacks_, user_data_);
539 stop_reports_ = (string_ret & JVMTI_VISIT_ABORT) != 0;
540 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700541 }
542
543 void VisitArray(art::mirror::Object* array)
544 REQUIRES_SHARED(art::Locks::mutator_lock_)
545 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
546 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
547 nullptr,
548 array,
549 array->GetClass());
550 if (stop_reports_) {
551 return;
552 }
553
554 if (array->IsObjectArray()) {
555 art::mirror::ObjectArray<art::mirror::Object>* obj_array =
556 array->AsObjectArray<art::mirror::Object>();
557 int32_t length = obj_array->GetLength();
558 for (int32_t i = 0; i != length; ++i) {
559 art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
560 if (elem != nullptr) {
561 jvmtiHeapReferenceInfo reference_info;
562 reference_info.array.index = i;
563 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
564 &reference_info,
565 array,
566 elem);
567 if (stop_reports_) {
568 break;
569 }
570 }
571 }
572 }
573 }
574
575 void VisitClass(art::mirror::Class* klass)
576 REQUIRES_SHARED(art::Locks::mutator_lock_)
577 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
578 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
579 if (!klass->IsResolved()) {
580 return;
581 }
582
583 // Superclass.
584 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
585 nullptr,
586 klass,
587 klass->GetSuperClass());
588 if (stop_reports_) {
589 return;
590 }
591
592 // Directly implemented or extended interfaces.
593 art::Thread* self = art::Thread::Current();
594 art::StackHandleScope<1> hs(self);
595 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
596 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
597 art::ObjPtr<art::mirror::Class> inf_klass =
Vladimir Marko19a4d372016-12-08 14:41:46 +0000598 art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700599 if (inf_klass == nullptr) {
600 // TODO: With a resolved class this should not happen...
601 self->ClearException();
602 break;
603 }
604
605 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
606 nullptr,
607 klass,
608 inf_klass.Ptr());
609 if (stop_reports_) {
610 return;
611 }
612 }
613
614 // Classloader.
615 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
616 // fake BootClassLoader?
617 if (klass->GetClassLoader() != nullptr) {
618 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
619 nullptr,
620 klass,
621 klass->GetClassLoader());
622 if (stop_reports_) {
623 return;
624 }
625 }
626 DCHECK_EQ(h_klass.Get(), klass);
627
628 // Declared static fields.
629 for (auto& field : klass->GetSFields()) {
630 if (!field.IsPrimitiveType()) {
631 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(klass);
632 if (field_value != nullptr) {
633 jvmtiHeapReferenceInfo reference_info;
634 memset(&reference_info, 0, sizeof(reference_info));
635
636 // TODO: Implement spec-compliant numbering.
637 reference_info.field.index = field.GetOffset().Int32Value();
638
639 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
640 &reference_info,
641 klass,
642 field_value.Ptr());
643 if (stop_reports_) {
644 return;
645 }
646 }
647 }
648 }
649 }
650
651 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
652 if (visited_.find(obj) == visited_.end()) {
653 worklist_.push_back(obj);
654 visited_.insert(obj);
655 }
656 }
657
658 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
659 const jvmtiHeapReferenceInfo* reference_info,
660 art::mirror::Object* referree,
661 art::mirror::Object* referrer)
662 REQUIRES_SHARED(art::Locks::mutator_lock_)
663 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
664 jint result = ReportReference(kind, reference_info, referree, referrer);
665 if ((result & JVMTI_VISIT_ABORT) == 0) {
666 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
667 MaybeEnqueue(referrer);
668 }
669 return true;
670 } else {
671 return false;
672 }
673 }
674
675 jint ReportReference(jvmtiHeapReferenceKind kind,
676 const jvmtiHeapReferenceInfo* reference_info,
677 art::mirror::Object* referrer,
678 art::mirror::Object* referree)
679 REQUIRES_SHARED(art::Locks::mutator_lock_)
680 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
681 if (referree == nullptr || stop_reports_) {
682 return 0;
683 }
684
685 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
686 const jlong referrer_class_tag =
687 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
688 const jlong size = static_cast<jlong>(referree->SizeOf());
689 jlong tag = tag_table_->GetTagOrZero(referree);
690 jlong saved_tag = tag;
691 jlong referrer_tag = 0;
692 jlong saved_referrer_tag = 0;
693 jlong* referrer_tag_ptr;
694 if (referrer == nullptr) {
695 referrer_tag_ptr = nullptr;
696 } else {
697 if (referrer == referree) {
698 referrer_tag_ptr = &tag;
699 } else {
700 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
701 referrer_tag_ptr = &referrer_tag;
702 }
703 }
704 jint length = -1;
705 if (referree->IsArrayInstance()) {
706 length = referree->AsArray()->GetLength();
707 }
708
709 jint result = callbacks_->heap_reference_callback(kind,
710 reference_info,
711 class_tag,
712 referrer_class_tag,
713 size,
714 &tag,
715 referrer_tag_ptr,
716 length,
717 const_cast<void*>(user_data_));
718
719 if (tag != saved_tag) {
720 tag_table_->Set(referree, tag);
721 }
722 if (referrer_tag != saved_referrer_tag) {
723 tag_table_->Set(referrer, referrer_tag);
724 }
725
726 return result;
727 }
728
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800729 jvmtiEnv* env;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700730 ObjectTagTable* tag_table_;
Andreas Gampe638a6932016-12-02 19:11:17 -0800731 art::ObjPtr<art::mirror::Object> initial_object_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700732 const jvmtiHeapCallbacks* callbacks_;
733 const void* user_data_;
734
735 std::vector<art::mirror::Object*> worklist_;
736 size_t start_;
737 static constexpr size_t kMaxStart = 1000000U;
738
739 std::unordered_set<art::mirror::Object*> visited_;
740
741 bool stop_reports_;
742
743 friend class CollectAndReportRootsVisitor;
744};
745
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800746jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700747 jint heap_filter ATTRIBUTE_UNUSED,
748 jclass klass ATTRIBUTE_UNUSED,
749 jobject initial_object,
750 const jvmtiHeapCallbacks* callbacks,
751 const void* user_data) {
752 if (callbacks == nullptr) {
753 return ERR(NULL_POINTER);
754 }
755
756 if (callbacks->array_primitive_value_callback != nullptr) {
757 // TODO: Implement.
758 return ERR(NOT_IMPLEMENTED);
759 }
760
761 art::Thread* self = art::Thread::Current();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700762
Andreas Gampe638a6932016-12-02 19:11:17 -0800763 art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
764 if (heap->IsGcConcurrentAndMoving()) {
765 // Need to take a heap dump while GC isn't running. See the
766 // comment in Heap::VisitObjects().
767 heap->IncrementDisableMovingGC(self);
768 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700769 {
Andreas Gampe638a6932016-12-02 19:11:17 -0800770 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700771 art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
772 art::ScopedSuspendAll ssa("FollowReferences");
773
Andreas Gampe638a6932016-12-02 19:11:17 -0800774 FollowReferencesHelper frh(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800775 env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800776 self->DecodeJObject(initial_object),
777 callbacks,
778 user_data);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700779 frh.Init();
780 frh.Work();
781 }
Andreas Gampe638a6932016-12-02 19:11:17 -0800782 if (heap->IsGcConcurrentAndMoving()) {
783 heap->DecrementDisableMovingGC(self);
784 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700785
786 return ERR(NONE);
787}
788
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700789jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
790 jint* class_count_ptr,
791 jclass** classes_ptr) {
792 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
793 return ERR(NULL_POINTER);
794 }
795
796 class ReportClassVisitor : public art::ClassVisitor {
797 public:
798 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
799
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700800 bool operator()(art::ObjPtr<art::mirror::Class> klass)
801 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampeef54d8d2016-10-25 09:55:53 -0700802 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700803 return true;
804 }
805
806 art::Thread* self_;
807 std::vector<jclass> classes_;
808 };
809
810 art::Thread* self = art::Thread::Current();
811 ReportClassVisitor rcv(self);
812 {
813 art::ScopedObjectAccess soa(self);
814 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
815 }
816
817 size_t size = rcv.classes_.size();
818 jclass* classes = nullptr;
819 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
820 reinterpret_cast<unsigned char**>(&classes));
821 if (alloc_ret != ERR(NONE)) {
822 return alloc_ret;
823 }
824
825 for (size_t i = 0; i < size; ++i) {
826 classes[i] = rcv.classes_[i];
827 }
828 *classes_ptr = classes;
829 *class_count_ptr = static_cast<jint>(size);
830
831 return ERR(NONE);
832}
833
Andreas Gampe8da6d032016-10-31 19:31:03 -0700834jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
835 art::Runtime::Current()->GetHeap()->CollectGarbage(false);
836
837 return ERR(NONE);
838}
Andreas Gampee54d9922016-10-11 19:55:37 -0700839} // namespace openjdkjvmti