blob: eb2cbbd60ac7760821f796436c286b3c9d7acb1e [file] [log] [blame]
Andreas Gampee54d9922016-10-11 19:55:37 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampeba8df692016-11-01 10:30:44 -070017#include "ti_heap.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070018
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070019#include "art_field-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070020#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070023#include "class_linker.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070024#include "gc/heap.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070025#include "gc_root-inl.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070026#include "jni_env_ext.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070027#include "jni_internal.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070028#include "mirror/class.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070029#include "mirror/object-inl.h"
30#include "mirror/object_array-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070031#include "object_callbacks.h"
32#include "object_tagging.h"
33#include "obj_ptr-inl.h"
Andreas Gampebecd6ad2017-02-22 19:20:37 -080034#include "primitive.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070035#include "runtime.h"
36#include "scoped_thread_state_change-inl.h"
37#include "thread-inl.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070038#include "thread_list.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070039
40namespace openjdkjvmti {
41
Andreas Gampe3ec8e402017-02-21 15:49:53 -080042namespace {
43
44// Report the contents of a string, if a callback is set.
45jint ReportString(art::ObjPtr<art::mirror::Object> obj,
46 jvmtiEnv* env,
47 ObjectTagTable* tag_table,
48 const jvmtiHeapCallbacks* cb,
49 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
50 if (UNLIKELY(cb->string_primitive_value_callback != nullptr) && obj->IsString()) {
51 art::ObjPtr<art::mirror::String> str = obj->AsString();
52 int32_t string_length = str->GetLength();
53 jvmtiError alloc_error;
54 JvmtiUniquePtr<uint16_t[]> data = AllocJvmtiUniquePtr<uint16_t[]>(env,
55 string_length,
56 &alloc_error);
57 if (data == nullptr) {
58 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
59 // back? For now just warn.
60 LOG(WARNING) << "Unable to allocate buffer for string reporting! Silently dropping value.";
61 return 0;
62 }
63
64 if (str->IsCompressed()) {
65 uint8_t* compressed_data = str->GetValueCompressed();
66 for (int32_t i = 0; i != string_length; ++i) {
67 data[i] = compressed_data[i];
68 }
69 } else {
70 // Can copy directly.
71 memcpy(data.get(), str->GetValue(), string_length * sizeof(uint16_t));
72 }
73
74 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
75 jlong string_tag = tag_table->GetTagOrZero(obj.Ptr());
76 const jlong saved_string_tag = string_tag;
77
78 jint result = cb->string_primitive_value_callback(class_tag,
79 obj->SizeOf(),
80 &string_tag,
81 data.get(),
82 string_length,
83 const_cast<void*>(user_data));
84 if (string_tag != saved_string_tag) {
85 tag_table->Set(obj.Ptr(), string_tag);
86 }
87
88 return result;
89 }
90 return 0;
91}
92
Andreas Gampebecd6ad2017-02-22 19:20:37 -080093// Report the contents of a primitive array, if a callback is set.
94jint ReportPrimitiveArray(art::ObjPtr<art::mirror::Object> obj,
95 jvmtiEnv* env,
96 ObjectTagTable* tag_table,
97 const jvmtiHeapCallbacks* cb,
98 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
99 if (UNLIKELY(cb->array_primitive_value_callback != nullptr) &&
100 obj->IsArrayInstance() &&
101 !obj->IsObjectArray()) {
102 art::ObjPtr<art::mirror::Array> array = obj->AsArray();
103 int32_t array_length = array->GetLength();
104 size_t component_size = array->GetClass()->GetComponentSize();
105 art::Primitive::Type art_prim_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
106 jvmtiPrimitiveType prim_type =
107 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
108 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
109 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
110 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
111 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
112 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
113 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
114 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
115 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
116
117 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
118 jlong array_tag = tag_table->GetTagOrZero(obj.Ptr());
119 const jlong saved_array_tag = array_tag;
120
121 jint result;
122 if (array_length == 0) {
123 result = cb->array_primitive_value_callback(class_tag,
124 obj->SizeOf(),
125 &array_tag,
126 0,
127 prim_type,
128 nullptr,
129 const_cast<void*>(user_data));
130 } else {
131 jvmtiError alloc_error;
132 JvmtiUniquePtr<char[]> data = AllocJvmtiUniquePtr<char[]>(env,
133 array_length * component_size,
134 &alloc_error);
135 if (data == nullptr) {
136 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
137 // back? For now just warn.
138 LOG(WARNING) << "Unable to allocate buffer for array reporting! Silently dropping value.";
139 return 0;
140 }
141
142 memcpy(data.get(), array->GetRawData(component_size, 0), array_length * component_size);
143
144 result = cb->array_primitive_value_callback(class_tag,
145 obj->SizeOf(),
146 &array_tag,
147 array_length,
148 prim_type,
149 data.get(),
150 const_cast<void*>(user_data));
151 }
152
153 if (array_tag != saved_array_tag) {
154 tag_table->Set(obj.Ptr(), array_tag);
155 }
156
157 return result;
158 }
159 return 0;
160}
161
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800162} // namespace
163
Andreas Gampee54d9922016-10-11 19:55:37 -0700164struct IterateThroughHeapData {
165 IterateThroughHeapData(HeapUtil* _heap_util,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800166 jvmtiEnv* _env,
Andreas Gampee54d9922016-10-11 19:55:37 -0700167 jint heap_filter,
168 art::ObjPtr<art::mirror::Class> klass,
169 const jvmtiHeapCallbacks* _callbacks,
170 const void* _user_data)
171 : heap_util(_heap_util),
172 filter_klass(klass),
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800173 env(_env),
Andreas Gampee54d9922016-10-11 19:55:37 -0700174 callbacks(_callbacks),
175 user_data(_user_data),
176 filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
177 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
178 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
179 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
180 any_filter(filter_out_tagged ||
181 filter_out_untagged ||
182 filter_out_class_tagged ||
183 filter_out_class_untagged),
184 stop_reports(false) {
185 }
186
187 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) {
188 if (!any_filter) {
189 return true;
190 }
191
192 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
193 return false;
194 }
195
196 if ((class_tag == 0 && filter_out_class_untagged) ||
197 (class_tag != 0 && filter_out_class_tagged)) {
198 return false;
199 }
200
201 return true;
202 }
203
204 HeapUtil* heap_util;
205 art::ObjPtr<art::mirror::Class> filter_klass;
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800206 jvmtiEnv* env;
Andreas Gampee54d9922016-10-11 19:55:37 -0700207 const jvmtiHeapCallbacks* callbacks;
208 const void* user_data;
209 const bool filter_out_tagged;
210 const bool filter_out_untagged;
211 const bool filter_out_class_tagged;
212 const bool filter_out_class_untagged;
213 const bool any_filter;
214
215 bool stop_reports;
216};
217
218static void IterateThroughHeapObjectCallback(art::mirror::Object* obj, void* arg)
219 REQUIRES_SHARED(art::Locks::mutator_lock_) {
220 IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
221 // Early return, as we can't really stop visiting.
222 if (ithd->stop_reports) {
223 return;
224 }
225
226 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
227
228 jlong tag = 0;
229 ithd->heap_util->GetTags()->GetTag(obj, &tag);
230
231 jlong class_tag = 0;
232 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
233 ithd->heap_util->GetTags()->GetTag(klass.Ptr(), &class_tag);
234 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
235
236 if (!ithd->ShouldReportByHeapFilter(tag, class_tag)) {
237 return;
238 }
239
Andreas Gampee54d9922016-10-11 19:55:37 -0700240 if (ithd->filter_klass != nullptr) {
241 if (ithd->filter_klass != klass) {
242 return;
243 }
244 }
245
246 jlong size = obj->SizeOf();
247
248 jint length = -1;
249 if (obj->IsArrayInstance()) {
250 length = obj->AsArray()->GetLength();
251 }
252
253 jlong saved_tag = tag;
254 jint ret = ithd->callbacks->heap_iteration_callback(class_tag,
255 size,
256 &tag,
257 length,
258 const_cast<void*>(ithd->user_data));
259
260 if (tag != saved_tag) {
261 ithd->heap_util->GetTags()->Set(obj, tag);
262 }
263
264 ithd->stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
265
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800266 if (!ithd->stop_reports) {
267 jint string_ret = ReportString(obj,
268 ithd->env,
269 ithd->heap_util->GetTags(),
270 ithd->callbacks,
271 ithd->user_data);
272 ithd->stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
273 }
274
Andreas Gampebecd6ad2017-02-22 19:20:37 -0800275 if (!ithd->stop_reports) {
276 jint array_ret = ReportPrimitiveArray(obj,
277 ithd->env,
278 ithd->heap_util->GetTags(),
279 ithd->callbacks,
280 ithd->user_data);
281 ithd->stop_reports = (array_ret & JVMTI_VISIT_ABORT) != 0;
282 }
283
Andreas Gampee54d9922016-10-11 19:55:37 -0700284 // TODO Implement primitive field callback.
285}
286
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800287jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
Andreas Gampee54d9922016-10-11 19:55:37 -0700288 jint heap_filter,
289 jclass klass,
290 const jvmtiHeapCallbacks* callbacks,
291 const void* user_data) {
292 if (callbacks == nullptr) {
293 return ERR(NULL_POINTER);
294 }
295
Andreas Gampee54d9922016-10-11 19:55:37 -0700296 art::Thread* self = art::Thread::Current();
297 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
298
299 IterateThroughHeapData ithd(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800300 env,
Andreas Gampee54d9922016-10-11 19:55:37 -0700301 heap_filter,
302 soa.Decode<art::mirror::Class>(klass),
303 callbacks,
304 user_data);
305
306 art::Runtime::Current()->GetHeap()->VisitObjects(IterateThroughHeapObjectCallback, &ithd);
307
308 return ERR(NONE);
309}
310
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700311class FollowReferencesHelper FINAL {
312 public:
313 FollowReferencesHelper(HeapUtil* h,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800314 jvmtiEnv* jvmti_env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800315 art::ObjPtr<art::mirror::Object> initial_object,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700316 const jvmtiHeapCallbacks* callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800317 art::ObjPtr<art::mirror::Class> class_filter,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700318 const void* user_data)
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800319 : env(jvmti_env),
320 tag_table_(h->GetTags()),
Andreas Gampe638a6932016-12-02 19:11:17 -0800321 initial_object_(initial_object),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700322 callbacks_(callbacks),
Andreas Gampe38da9f22017-02-20 13:35:36 -0800323 class_filter_(class_filter),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700324 user_data_(user_data),
325 start_(0),
326 stop_reports_(false) {
327 }
328
329 void Init()
330 REQUIRES_SHARED(art::Locks::mutator_lock_)
331 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
Andreas Gampe638a6932016-12-02 19:11:17 -0800332 if (initial_object_.IsNull()) {
333 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800334
335 // We need precise info (e.g., vregs).
336 constexpr art::VisitRootFlags kRootFlags = static_cast<art::VisitRootFlags>(
337 art::VisitRootFlags::kVisitRootFlagAllRoots | art::VisitRootFlags::kVisitRootFlagPrecise);
338 art::Runtime::Current()->VisitRoots(&carrv, kRootFlags);
339
Andreas Gampe638a6932016-12-02 19:11:17 -0800340 art::Runtime::Current()->VisitImageRoots(&carrv);
341 stop_reports_ = carrv.IsStopReports();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700342
Andreas Gampe638a6932016-12-02 19:11:17 -0800343 if (stop_reports_) {
344 worklist_.clear();
345 }
346 } else {
347 visited_.insert(initial_object_.Ptr());
348 worklist_.push_back(initial_object_.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700349 }
350 }
351
352 void Work()
353 REQUIRES_SHARED(art::Locks::mutator_lock_)
354 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
355 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
356 // from the head of the work list, instead postponing until there's a gap that's "large."
357 //
358 // Alternatively, we can implement a DFS and use the work list as a stack.
359 while (start_ < worklist_.size()) {
360 art::mirror::Object* cur_obj = worklist_[start_];
361 start_++;
362
363 if (start_ >= kMaxStart) {
364 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
365 start_ = 0;
366 }
367
368 VisitObject(cur_obj);
369
370 if (stop_reports_) {
371 break;
372 }
373 }
374 }
375
376 private:
377 class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
378 public:
379 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
380 ObjectTagTable* tag_table,
381 std::vector<art::mirror::Object*>* worklist,
382 std::unordered_set<art::mirror::Object*>* visited)
383 : helper_(helper),
384 tag_table_(tag_table),
385 worklist_(worklist),
386 visited_(visited),
387 stop_reports_(false) {}
388
389 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
390 OVERRIDE
391 REQUIRES_SHARED(art::Locks::mutator_lock_)
392 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
393 for (size_t i = 0; i != count; ++i) {
394 AddRoot(*roots[i], info);
395 }
396 }
397
398 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
399 size_t count,
400 const art::RootInfo& info)
401 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
402 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
403 for (size_t i = 0; i != count; ++i) {
404 AddRoot(roots[i]->AsMirrorPtr(), info);
405 }
406 }
407
408 bool IsStopReports() {
409 return stop_reports_;
410 }
411
412 private:
413 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
414 REQUIRES_SHARED(art::Locks::mutator_lock_)
415 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
416 // We use visited_ to mark roots already so we do not need another set.
417 if (visited_->find(root_obj) == visited_->end()) {
418 visited_->insert(root_obj);
419 worklist_->push_back(root_obj);
420 }
421 ReportRoot(root_obj, info);
422 }
423
Andreas Gampe93c30902016-11-18 13:30:30 -0800424 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
425 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
426 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
427 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
428 }
429
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700430 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
431 jvmtiHeapReferenceInfo* ref_info)
432 REQUIRES_SHARED(art::Locks::mutator_lock_) {
433 // TODO: Fill in ref_info.
434 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
435
436 switch (info.GetType()) {
437 case art::RootType::kRootJNIGlobal:
438 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
439
440 case art::RootType::kRootJNILocal:
Andreas Gampe93c30902016-11-18 13:30:30 -0800441 {
442 uint32_t thread_id = info.GetThreadId();
443 ref_info->jni_local.thread_id = thread_id;
444
445 art::Thread* thread = FindThread(info);
446 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800447 art::mirror::Object* thread_obj;
Andreas Gampe93c30902016-11-18 13:30:30 -0800448 if (thread->IsStillStarting()) {
449 thread_obj = nullptr;
450 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800451 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampe93c30902016-11-18 13:30:30 -0800452 }
453 if (thread_obj != nullptr) {
454 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
455 }
456 }
457
458 // TODO: We don't have this info.
459 if (thread != nullptr) {
460 ref_info->jni_local.depth = 0;
461 art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
462 if (method != nullptr) {
463 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
464 }
465 }
466
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700467 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
Andreas Gampe93c30902016-11-18 13:30:30 -0800468 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700469
470 case art::RootType::kRootJavaFrame:
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800471 {
472 uint32_t thread_id = info.GetThreadId();
473 ref_info->stack_local.thread_id = thread_id;
474
475 art::Thread* thread = FindThread(info);
476 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800477 art::mirror::Object* thread_obj;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800478 if (thread->IsStillStarting()) {
479 thread_obj = nullptr;
480 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800481 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800482 }
483 if (thread_obj != nullptr) {
484 ref_info->stack_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
485 }
486 }
487
488 auto& java_info = static_cast<const art::JavaFrameRootInfo&>(info);
489 ref_info->stack_local.slot = static_cast<jint>(java_info.GetVReg());
490 const art::StackVisitor* visitor = java_info.GetVisitor();
491 ref_info->stack_local.location =
492 static_cast<jlocation>(visitor->GetDexPc(false /* abort_on_failure */));
493 ref_info->stack_local.depth = static_cast<jint>(visitor->GetFrameDepth());
494 art::ArtMethod* method = visitor->GetMethod();
495 if (method != nullptr) {
496 ref_info->stack_local.method = art::jni::EncodeArtMethod(method);
497 }
498
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700499 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800500 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700501
502 case art::RootType::kRootNativeStack:
503 case art::RootType::kRootThreadBlock:
504 case art::RootType::kRootThreadObject:
505 return JVMTI_HEAP_REFERENCE_THREAD;
506
507 case art::RootType::kRootStickyClass:
508 case art::RootType::kRootInternedString:
509 // Note: this isn't a root in the RI.
510 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
511
512 case art::RootType::kRootMonitorUsed:
513 case art::RootType::kRootJNIMonitor:
514 return JVMTI_HEAP_REFERENCE_MONITOR;
515
516 case art::RootType::kRootFinalizing:
517 case art::RootType::kRootDebugger:
518 case art::RootType::kRootReferenceCleanup:
519 case art::RootType::kRootVMInternal:
520 case art::RootType::kRootUnknown:
521 return JVMTI_HEAP_REFERENCE_OTHER;
522 }
523 LOG(FATAL) << "Unreachable";
524 UNREACHABLE();
525 }
526
527 void ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
528 REQUIRES_SHARED(art::Locks::mutator_lock_)
529 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
530 jvmtiHeapReferenceInfo ref_info;
531 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
532 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
533 if ((result & JVMTI_VISIT_ABORT) != 0) {
534 stop_reports_ = true;
535 }
536 }
537
538 private:
539 FollowReferencesHelper* helper_;
540 ObjectTagTable* tag_table_;
541 std::vector<art::mirror::Object*>* worklist_;
542 std::unordered_set<art::mirror::Object*>* visited_;
543 bool stop_reports_;
544 };
545
546 void VisitObject(art::mirror::Object* obj)
547 REQUIRES_SHARED(art::Locks::mutator_lock_)
548 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
549 if (obj->IsClass()) {
550 VisitClass(obj->AsClass());
551 return;
552 }
553 if (obj->IsArrayInstance()) {
554 VisitArray(obj);
555 return;
556 }
557
558 // TODO: We'll probably have to rewrite this completely with our own visiting logic, if we
559 // want to have a chance of getting the field indices computed halfway efficiently. For
560 // now, ignore them altogether.
561
562 struct InstanceReferenceVisitor {
563 explicit InstanceReferenceVisitor(FollowReferencesHelper* helper_)
564 : helper(helper_), stop_reports(false) {}
565
566 void operator()(art::mirror::Object* src,
567 art::MemberOffset field_offset,
568 bool is_static ATTRIBUTE_UNUSED) const
569 REQUIRES_SHARED(art::Locks::mutator_lock_)
570 REQUIRES(!*helper->tag_table_->GetAllowDisallowLock()) {
571 if (stop_reports) {
572 return;
573 }
574
575 art::mirror::Object* trg = src->GetFieldObjectReferenceAddr(field_offset)->AsMirrorPtr();
576 jvmtiHeapReferenceInfo reference_info;
577 memset(&reference_info, 0, sizeof(reference_info));
578
579 // TODO: Implement spec-compliant numbering.
580 reference_info.field.index = field_offset.Int32Value();
581
582 jvmtiHeapReferenceKind kind =
583 field_offset.Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
584 ? JVMTI_HEAP_REFERENCE_CLASS
585 : JVMTI_HEAP_REFERENCE_FIELD;
586 const jvmtiHeapReferenceInfo* reference_info_ptr =
587 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
588
589 stop_reports = !helper->ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src, trg);
590 }
591
592 void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED)
593 const {
594 LOG(FATAL) << "Unreachable";
595 }
596 void VisitRootIfNonNull(
597 art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const {
598 LOG(FATAL) << "Unreachable";
599 }
600
601 // "mutable" required by the visitor API.
602 mutable FollowReferencesHelper* helper;
603 mutable bool stop_reports;
604 };
605
606 InstanceReferenceVisitor visitor(this);
607 // Visit references, not native roots.
608 obj->VisitReferences<false>(visitor, art::VoidFunctor());
609
610 stop_reports_ = visitor.stop_reports;
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800611
612 if (!stop_reports_) {
613 jint string_ret = ReportString(obj, env, tag_table_, callbacks_, user_data_);
614 stop_reports_ = (string_ret & JVMTI_VISIT_ABORT) != 0;
615 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700616 }
617
618 void VisitArray(art::mirror::Object* array)
619 REQUIRES_SHARED(art::Locks::mutator_lock_)
620 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
621 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
622 nullptr,
623 array,
624 array->GetClass());
625 if (stop_reports_) {
626 return;
627 }
628
629 if (array->IsObjectArray()) {
630 art::mirror::ObjectArray<art::mirror::Object>* obj_array =
631 array->AsObjectArray<art::mirror::Object>();
632 int32_t length = obj_array->GetLength();
633 for (int32_t i = 0; i != length; ++i) {
634 art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
635 if (elem != nullptr) {
636 jvmtiHeapReferenceInfo reference_info;
637 reference_info.array.index = i;
638 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
639 &reference_info,
640 array,
641 elem);
642 if (stop_reports_) {
643 break;
644 }
645 }
646 }
Andreas Gampebecd6ad2017-02-22 19:20:37 -0800647 } else {
648 if (!stop_reports_) {
649 jint array_ret = ReportPrimitiveArray(array, env, tag_table_, callbacks_, user_data_);
650 stop_reports_ = (array_ret & JVMTI_VISIT_ABORT) != 0;
651 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700652 }
653 }
654
655 void VisitClass(art::mirror::Class* klass)
656 REQUIRES_SHARED(art::Locks::mutator_lock_)
657 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
658 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
659 if (!klass->IsResolved()) {
660 return;
661 }
662
663 // Superclass.
664 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
665 nullptr,
666 klass,
667 klass->GetSuperClass());
668 if (stop_reports_) {
669 return;
670 }
671
672 // Directly implemented or extended interfaces.
673 art::Thread* self = art::Thread::Current();
674 art::StackHandleScope<1> hs(self);
675 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
676 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
677 art::ObjPtr<art::mirror::Class> inf_klass =
Vladimir Marko19a4d372016-12-08 14:41:46 +0000678 art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700679 if (inf_klass == nullptr) {
680 // TODO: With a resolved class this should not happen...
681 self->ClearException();
682 break;
683 }
684
685 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
686 nullptr,
687 klass,
688 inf_klass.Ptr());
689 if (stop_reports_) {
690 return;
691 }
692 }
693
694 // Classloader.
695 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
696 // fake BootClassLoader?
697 if (klass->GetClassLoader() != nullptr) {
698 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
699 nullptr,
700 klass,
701 klass->GetClassLoader());
702 if (stop_reports_) {
703 return;
704 }
705 }
706 DCHECK_EQ(h_klass.Get(), klass);
707
708 // Declared static fields.
709 for (auto& field : klass->GetSFields()) {
710 if (!field.IsPrimitiveType()) {
711 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(klass);
712 if (field_value != nullptr) {
713 jvmtiHeapReferenceInfo reference_info;
714 memset(&reference_info, 0, sizeof(reference_info));
715
716 // TODO: Implement spec-compliant numbering.
717 reference_info.field.index = field.GetOffset().Int32Value();
718
719 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
720 &reference_info,
721 klass,
722 field_value.Ptr());
723 if (stop_reports_) {
724 return;
725 }
726 }
727 }
728 }
729 }
730
731 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
732 if (visited_.find(obj) == visited_.end()) {
733 worklist_.push_back(obj);
734 visited_.insert(obj);
735 }
736 }
737
738 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
739 const jvmtiHeapReferenceInfo* reference_info,
740 art::mirror::Object* referree,
741 art::mirror::Object* referrer)
742 REQUIRES_SHARED(art::Locks::mutator_lock_)
743 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
744 jint result = ReportReference(kind, reference_info, referree, referrer);
745 if ((result & JVMTI_VISIT_ABORT) == 0) {
746 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
747 MaybeEnqueue(referrer);
748 }
749 return true;
750 } else {
751 return false;
752 }
753 }
754
755 jint ReportReference(jvmtiHeapReferenceKind kind,
756 const jvmtiHeapReferenceInfo* reference_info,
757 art::mirror::Object* referrer,
758 art::mirror::Object* referree)
759 REQUIRES_SHARED(art::Locks::mutator_lock_)
760 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
761 if (referree == nullptr || stop_reports_) {
762 return 0;
763 }
764
Andreas Gampe38da9f22017-02-20 13:35:36 -0800765 if (UNLIKELY(class_filter_ != nullptr) && class_filter_ != referree->GetClass()) {
766 return JVMTI_VISIT_OBJECTS;
767 }
768
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700769 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
770 const jlong referrer_class_tag =
771 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
772 const jlong size = static_cast<jlong>(referree->SizeOf());
773 jlong tag = tag_table_->GetTagOrZero(referree);
774 jlong saved_tag = tag;
775 jlong referrer_tag = 0;
776 jlong saved_referrer_tag = 0;
777 jlong* referrer_tag_ptr;
778 if (referrer == nullptr) {
779 referrer_tag_ptr = nullptr;
780 } else {
781 if (referrer == referree) {
782 referrer_tag_ptr = &tag;
783 } else {
784 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
785 referrer_tag_ptr = &referrer_tag;
786 }
787 }
Andreas Gampe38da9f22017-02-20 13:35:36 -0800788
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700789 jint length = -1;
790 if (referree->IsArrayInstance()) {
791 length = referree->AsArray()->GetLength();
792 }
793
794 jint result = callbacks_->heap_reference_callback(kind,
795 reference_info,
796 class_tag,
797 referrer_class_tag,
798 size,
799 &tag,
800 referrer_tag_ptr,
801 length,
802 const_cast<void*>(user_data_));
803
804 if (tag != saved_tag) {
805 tag_table_->Set(referree, tag);
806 }
807 if (referrer_tag != saved_referrer_tag) {
808 tag_table_->Set(referrer, referrer_tag);
809 }
810
811 return result;
812 }
813
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800814 jvmtiEnv* env;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700815 ObjectTagTable* tag_table_;
Andreas Gampe638a6932016-12-02 19:11:17 -0800816 art::ObjPtr<art::mirror::Object> initial_object_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700817 const jvmtiHeapCallbacks* callbacks_;
Andreas Gampe38da9f22017-02-20 13:35:36 -0800818 art::ObjPtr<art::mirror::Class> class_filter_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700819 const void* user_data_;
820
821 std::vector<art::mirror::Object*> worklist_;
822 size_t start_;
823 static constexpr size_t kMaxStart = 1000000U;
824
825 std::unordered_set<art::mirror::Object*> visited_;
826
827 bool stop_reports_;
828
829 friend class CollectAndReportRootsVisitor;
830};
831
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800832jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700833 jint heap_filter ATTRIBUTE_UNUSED,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800834 jclass klass,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700835 jobject initial_object,
836 const jvmtiHeapCallbacks* callbacks,
837 const void* user_data) {
838 if (callbacks == nullptr) {
839 return ERR(NULL_POINTER);
840 }
841
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700842 art::Thread* self = art::Thread::Current();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700843
Andreas Gampe638a6932016-12-02 19:11:17 -0800844 art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
845 if (heap->IsGcConcurrentAndMoving()) {
846 // Need to take a heap dump while GC isn't running. See the
847 // comment in Heap::VisitObjects().
848 heap->IncrementDisableMovingGC(self);
849 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700850 {
Andreas Gampe638a6932016-12-02 19:11:17 -0800851 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700852 art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
853 art::ScopedSuspendAll ssa("FollowReferences");
854
Andreas Gampe38da9f22017-02-20 13:35:36 -0800855 art::ObjPtr<art::mirror::Class> class_filter = klass == nullptr
856 ? nullptr
857 : art::ObjPtr<art::mirror::Class>::DownCast(self->DecodeJObject(klass));
Andreas Gampe638a6932016-12-02 19:11:17 -0800858 FollowReferencesHelper frh(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800859 env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800860 self->DecodeJObject(initial_object),
861 callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800862 class_filter,
Andreas Gampe638a6932016-12-02 19:11:17 -0800863 user_data);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700864 frh.Init();
865 frh.Work();
866 }
Andreas Gampe638a6932016-12-02 19:11:17 -0800867 if (heap->IsGcConcurrentAndMoving()) {
868 heap->DecrementDisableMovingGC(self);
869 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700870
871 return ERR(NONE);
872}
873
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700874jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
875 jint* class_count_ptr,
876 jclass** classes_ptr) {
877 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
878 return ERR(NULL_POINTER);
879 }
880
881 class ReportClassVisitor : public art::ClassVisitor {
882 public:
883 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
884
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700885 bool operator()(art::ObjPtr<art::mirror::Class> klass)
886 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampeef54d8d2016-10-25 09:55:53 -0700887 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700888 return true;
889 }
890
891 art::Thread* self_;
892 std::vector<jclass> classes_;
893 };
894
895 art::Thread* self = art::Thread::Current();
896 ReportClassVisitor rcv(self);
897 {
898 art::ScopedObjectAccess soa(self);
899 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
900 }
901
902 size_t size = rcv.classes_.size();
903 jclass* classes = nullptr;
904 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
905 reinterpret_cast<unsigned char**>(&classes));
906 if (alloc_ret != ERR(NONE)) {
907 return alloc_ret;
908 }
909
910 for (size_t i = 0; i < size; ++i) {
911 classes[i] = rcv.classes_[i];
912 }
913 *classes_ptr = classes;
914 *class_count_ptr = static_cast<jint>(size);
915
916 return ERR(NONE);
917}
918
Andreas Gampe8da6d032016-10-31 19:31:03 -0700919jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
920 art::Runtime::Current()->GetHeap()->CollectGarbage(false);
921
922 return ERR(NONE);
923}
Andreas Gampee54d9922016-10-11 19:55:37 -0700924} // namespace openjdkjvmti