blob: 976ce66f115dd48c9f1b3864be7b530c3d9385d6 [file] [log] [blame]
Andreas Gampee54d9922016-10-11 19:55:37 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampeba8df692016-11-01 10:30:44 -070017#include "ti_heap.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070018
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070019#include "art_field-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070020#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070023#include "class_linker.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070024#include "gc/heap.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070025#include "gc_root-inl.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070026#include "jni_env_ext.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070027#include "jni_internal.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070028#include "mirror/class.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070029#include "mirror/object-inl.h"
30#include "mirror/object_array-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070031#include "object_callbacks.h"
32#include "object_tagging.h"
33#include "obj_ptr-inl.h"
Andreas Gampebecd6ad2017-02-22 19:20:37 -080034#include "primitive.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070035#include "runtime.h"
36#include "scoped_thread_state_change-inl.h"
37#include "thread-inl.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070038#include "thread_list.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070039
40namespace openjdkjvmti {
41
Andreas Gampe3ec8e402017-02-21 15:49:53 -080042namespace {
43
44// Report the contents of a string, if a callback is set.
45jint ReportString(art::ObjPtr<art::mirror::Object> obj,
46 jvmtiEnv* env,
47 ObjectTagTable* tag_table,
48 const jvmtiHeapCallbacks* cb,
49 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
50 if (UNLIKELY(cb->string_primitive_value_callback != nullptr) && obj->IsString()) {
51 art::ObjPtr<art::mirror::String> str = obj->AsString();
52 int32_t string_length = str->GetLength();
53 jvmtiError alloc_error;
54 JvmtiUniquePtr<uint16_t[]> data = AllocJvmtiUniquePtr<uint16_t[]>(env,
55 string_length,
56 &alloc_error);
57 if (data == nullptr) {
58 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
59 // back? For now just warn.
60 LOG(WARNING) << "Unable to allocate buffer for string reporting! Silently dropping value.";
61 return 0;
62 }
63
64 if (str->IsCompressed()) {
65 uint8_t* compressed_data = str->GetValueCompressed();
66 for (int32_t i = 0; i != string_length; ++i) {
67 data[i] = compressed_data[i];
68 }
69 } else {
70 // Can copy directly.
71 memcpy(data.get(), str->GetValue(), string_length * sizeof(uint16_t));
72 }
73
74 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
75 jlong string_tag = tag_table->GetTagOrZero(obj.Ptr());
76 const jlong saved_string_tag = string_tag;
77
78 jint result = cb->string_primitive_value_callback(class_tag,
79 obj->SizeOf(),
80 &string_tag,
81 data.get(),
82 string_length,
83 const_cast<void*>(user_data));
84 if (string_tag != saved_string_tag) {
85 tag_table->Set(obj.Ptr(), string_tag);
86 }
87
88 return result;
89 }
90 return 0;
91}
92
Andreas Gampebecd6ad2017-02-22 19:20:37 -080093// Report the contents of a primitive array, if a callback is set.
94jint ReportPrimitiveArray(art::ObjPtr<art::mirror::Object> obj,
95 jvmtiEnv* env,
96 ObjectTagTable* tag_table,
97 const jvmtiHeapCallbacks* cb,
98 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
99 if (UNLIKELY(cb->array_primitive_value_callback != nullptr) &&
100 obj->IsArrayInstance() &&
101 !obj->IsObjectArray()) {
102 art::ObjPtr<art::mirror::Array> array = obj->AsArray();
103 int32_t array_length = array->GetLength();
104 size_t component_size = array->GetClass()->GetComponentSize();
105 art::Primitive::Type art_prim_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
106 jvmtiPrimitiveType prim_type =
107 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
108 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
109 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
110 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
111 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
112 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
113 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
114 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
115 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
116
117 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
118 jlong array_tag = tag_table->GetTagOrZero(obj.Ptr());
119 const jlong saved_array_tag = array_tag;
120
121 jint result;
122 if (array_length == 0) {
123 result = cb->array_primitive_value_callback(class_tag,
124 obj->SizeOf(),
125 &array_tag,
126 0,
127 prim_type,
128 nullptr,
129 const_cast<void*>(user_data));
130 } else {
131 jvmtiError alloc_error;
132 JvmtiUniquePtr<char[]> data = AllocJvmtiUniquePtr<char[]>(env,
133 array_length * component_size,
134 &alloc_error);
135 if (data == nullptr) {
136 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
137 // back? For now just warn.
138 LOG(WARNING) << "Unable to allocate buffer for array reporting! Silently dropping value.";
139 return 0;
140 }
141
142 memcpy(data.get(), array->GetRawData(component_size, 0), array_length * component_size);
143
144 result = cb->array_primitive_value_callback(class_tag,
145 obj->SizeOf(),
146 &array_tag,
147 array_length,
148 prim_type,
149 data.get(),
150 const_cast<void*>(user_data));
151 }
152
153 if (array_tag != saved_array_tag) {
154 tag_table->Set(obj.Ptr(), array_tag);
155 }
156
157 return result;
158 }
159 return 0;
160}
161
Andreas Gampe6ea06072017-02-24 18:01:19 +0000162struct HeapFilter {
163 explicit HeapFilter(jint heap_filter)
164 : filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
Andreas Gampee54d9922016-10-11 19:55:37 -0700165 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
166 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
167 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
168 any_filter(filter_out_tagged ||
169 filter_out_untagged ||
170 filter_out_class_tagged ||
Andreas Gampe6ea06072017-02-24 18:01:19 +0000171 filter_out_class_untagged) {
Andreas Gampee54d9922016-10-11 19:55:37 -0700172 }
173
Andreas Gampe6ea06072017-02-24 18:01:19 +0000174 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) const {
Andreas Gampee54d9922016-10-11 19:55:37 -0700175 if (!any_filter) {
176 return true;
177 }
178
179 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
180 return false;
181 }
182
183 if ((class_tag == 0 && filter_out_class_untagged) ||
184 (class_tag != 0 && filter_out_class_tagged)) {
185 return false;
186 }
187
188 return true;
189 }
190
Andreas Gampee54d9922016-10-11 19:55:37 -0700191 const bool filter_out_tagged;
192 const bool filter_out_untagged;
193 const bool filter_out_class_tagged;
194 const bool filter_out_class_untagged;
195 const bool any_filter;
Andreas Gampe6ea06072017-02-24 18:01:19 +0000196};
197
198} // namespace
199
200struct IterateThroughHeapData {
201 IterateThroughHeapData(HeapUtil* _heap_util,
202 jvmtiEnv* _env,
203 art::ObjPtr<art::mirror::Class> klass,
204 jint _heap_filter,
205 const jvmtiHeapCallbacks* _callbacks,
206 const void* _user_data)
207 : heap_util(_heap_util),
208 heap_filter(_heap_filter),
209 filter_klass(klass),
210 env(_env),
211 callbacks(_callbacks),
212 user_data(_user_data),
213 stop_reports(false) {
214 }
215
216 HeapUtil* heap_util;
217 const HeapFilter heap_filter;
218 art::ObjPtr<art::mirror::Class> filter_klass;
219 jvmtiEnv* env;
220 const jvmtiHeapCallbacks* callbacks;
221 const void* user_data;
Andreas Gampee54d9922016-10-11 19:55:37 -0700222
223 bool stop_reports;
224};
225
226static void IterateThroughHeapObjectCallback(art::mirror::Object* obj, void* arg)
227 REQUIRES_SHARED(art::Locks::mutator_lock_) {
228 IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
229 // Early return, as we can't really stop visiting.
230 if (ithd->stop_reports) {
231 return;
232 }
233
234 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
235
236 jlong tag = 0;
237 ithd->heap_util->GetTags()->GetTag(obj, &tag);
238
239 jlong class_tag = 0;
240 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
241 ithd->heap_util->GetTags()->GetTag(klass.Ptr(), &class_tag);
242 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
243
Andreas Gampe6ea06072017-02-24 18:01:19 +0000244 if (!ithd->heap_filter.ShouldReportByHeapFilter(tag, class_tag)) {
Andreas Gampee54d9922016-10-11 19:55:37 -0700245 return;
246 }
247
Andreas Gampee54d9922016-10-11 19:55:37 -0700248 if (ithd->filter_klass != nullptr) {
249 if (ithd->filter_klass != klass) {
250 return;
251 }
252 }
253
254 jlong size = obj->SizeOf();
255
256 jint length = -1;
257 if (obj->IsArrayInstance()) {
258 length = obj->AsArray()->GetLength();
259 }
260
261 jlong saved_tag = tag;
262 jint ret = ithd->callbacks->heap_iteration_callback(class_tag,
263 size,
264 &tag,
265 length,
266 const_cast<void*>(ithd->user_data));
267
268 if (tag != saved_tag) {
269 ithd->heap_util->GetTags()->Set(obj, tag);
270 }
271
272 ithd->stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
273
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800274 if (!ithd->stop_reports) {
275 jint string_ret = ReportString(obj,
276 ithd->env,
277 ithd->heap_util->GetTags(),
278 ithd->callbacks,
279 ithd->user_data);
280 ithd->stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
281 }
282
Andreas Gampebecd6ad2017-02-22 19:20:37 -0800283 if (!ithd->stop_reports) {
284 jint array_ret = ReportPrimitiveArray(obj,
285 ithd->env,
286 ithd->heap_util->GetTags(),
287 ithd->callbacks,
288 ithd->user_data);
289 ithd->stop_reports = (array_ret & JVMTI_VISIT_ABORT) != 0;
290 }
291
Andreas Gampee54d9922016-10-11 19:55:37 -0700292 // TODO Implement primitive field callback.
293}
294
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800295jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
Andreas Gampee54d9922016-10-11 19:55:37 -0700296 jint heap_filter,
297 jclass klass,
298 const jvmtiHeapCallbacks* callbacks,
299 const void* user_data) {
300 if (callbacks == nullptr) {
301 return ERR(NULL_POINTER);
302 }
303
Andreas Gampee54d9922016-10-11 19:55:37 -0700304 art::Thread* self = art::Thread::Current();
305 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
306
307 IterateThroughHeapData ithd(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800308 env,
Nicolas Geoffray2cb576c2017-02-24 09:40:37 +0000309 soa.Decode<art::mirror::Class>(klass),
Andreas Gampe6ea06072017-02-24 18:01:19 +0000310 heap_filter,
Andreas Gampee54d9922016-10-11 19:55:37 -0700311 callbacks,
312 user_data);
313
314 art::Runtime::Current()->GetHeap()->VisitObjects(IterateThroughHeapObjectCallback, &ithd);
315
316 return ERR(NONE);
317}
318
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700319class FollowReferencesHelper FINAL {
320 public:
321 FollowReferencesHelper(HeapUtil* h,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800322 jvmtiEnv* jvmti_env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800323 art::ObjPtr<art::mirror::Object> initial_object,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700324 const jvmtiHeapCallbacks* callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800325 art::ObjPtr<art::mirror::Class> class_filter,
Andreas Gampe6ea06072017-02-24 18:01:19 +0000326 jint heap_filter,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700327 const void* user_data)
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800328 : env(jvmti_env),
329 tag_table_(h->GetTags()),
Andreas Gampe638a6932016-12-02 19:11:17 -0800330 initial_object_(initial_object),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700331 callbacks_(callbacks),
Andreas Gampe38da9f22017-02-20 13:35:36 -0800332 class_filter_(class_filter),
Andreas Gampe6ea06072017-02-24 18:01:19 +0000333 heap_filter_(heap_filter),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700334 user_data_(user_data),
335 start_(0),
336 stop_reports_(false) {
337 }
338
339 void Init()
340 REQUIRES_SHARED(art::Locks::mutator_lock_)
341 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
Andreas Gampe638a6932016-12-02 19:11:17 -0800342 if (initial_object_.IsNull()) {
343 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800344
345 // We need precise info (e.g., vregs).
346 constexpr art::VisitRootFlags kRootFlags = static_cast<art::VisitRootFlags>(
347 art::VisitRootFlags::kVisitRootFlagAllRoots | art::VisitRootFlags::kVisitRootFlagPrecise);
348 art::Runtime::Current()->VisitRoots(&carrv, kRootFlags);
349
Andreas Gampe638a6932016-12-02 19:11:17 -0800350 art::Runtime::Current()->VisitImageRoots(&carrv);
351 stop_reports_ = carrv.IsStopReports();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700352
Andreas Gampe638a6932016-12-02 19:11:17 -0800353 if (stop_reports_) {
354 worklist_.clear();
355 }
356 } else {
357 visited_.insert(initial_object_.Ptr());
358 worklist_.push_back(initial_object_.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700359 }
360 }
361
362 void Work()
363 REQUIRES_SHARED(art::Locks::mutator_lock_)
364 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
365 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
366 // from the head of the work list, instead postponing until there's a gap that's "large."
367 //
368 // Alternatively, we can implement a DFS and use the work list as a stack.
369 while (start_ < worklist_.size()) {
370 art::mirror::Object* cur_obj = worklist_[start_];
371 start_++;
372
373 if (start_ >= kMaxStart) {
374 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
375 start_ = 0;
376 }
377
378 VisitObject(cur_obj);
379
380 if (stop_reports_) {
381 break;
382 }
383 }
384 }
385
386 private:
387 class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
388 public:
389 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
390 ObjectTagTable* tag_table,
391 std::vector<art::mirror::Object*>* worklist,
392 std::unordered_set<art::mirror::Object*>* visited)
393 : helper_(helper),
394 tag_table_(tag_table),
395 worklist_(worklist),
396 visited_(visited),
397 stop_reports_(false) {}
398
399 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
400 OVERRIDE
401 REQUIRES_SHARED(art::Locks::mutator_lock_)
402 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
403 for (size_t i = 0; i != count; ++i) {
404 AddRoot(*roots[i], info);
405 }
406 }
407
408 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
409 size_t count,
410 const art::RootInfo& info)
411 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
412 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
413 for (size_t i = 0; i != count; ++i) {
414 AddRoot(roots[i]->AsMirrorPtr(), info);
415 }
416 }
417
418 bool IsStopReports() {
419 return stop_reports_;
420 }
421
422 private:
423 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
424 REQUIRES_SHARED(art::Locks::mutator_lock_)
425 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
426 // We use visited_ to mark roots already so we do not need another set.
427 if (visited_->find(root_obj) == visited_->end()) {
428 visited_->insert(root_obj);
429 worklist_->push_back(root_obj);
430 }
431 ReportRoot(root_obj, info);
432 }
433
Andreas Gampe93c30902016-11-18 13:30:30 -0800434 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
435 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
436 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
437 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
438 }
439
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700440 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
441 jvmtiHeapReferenceInfo* ref_info)
442 REQUIRES_SHARED(art::Locks::mutator_lock_) {
443 // TODO: Fill in ref_info.
444 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
445
446 switch (info.GetType()) {
447 case art::RootType::kRootJNIGlobal:
448 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
449
450 case art::RootType::kRootJNILocal:
Andreas Gampe93c30902016-11-18 13:30:30 -0800451 {
452 uint32_t thread_id = info.GetThreadId();
453 ref_info->jni_local.thread_id = thread_id;
454
455 art::Thread* thread = FindThread(info);
456 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800457 art::mirror::Object* thread_obj;
Andreas Gampe93c30902016-11-18 13:30:30 -0800458 if (thread->IsStillStarting()) {
459 thread_obj = nullptr;
460 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800461 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampe93c30902016-11-18 13:30:30 -0800462 }
463 if (thread_obj != nullptr) {
464 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
465 }
466 }
467
468 // TODO: We don't have this info.
469 if (thread != nullptr) {
470 ref_info->jni_local.depth = 0;
471 art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
472 if (method != nullptr) {
473 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
474 }
475 }
476
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700477 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
Andreas Gampe93c30902016-11-18 13:30:30 -0800478 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700479
480 case art::RootType::kRootJavaFrame:
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800481 {
482 uint32_t thread_id = info.GetThreadId();
483 ref_info->stack_local.thread_id = thread_id;
484
485 art::Thread* thread = FindThread(info);
486 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800487 art::mirror::Object* thread_obj;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800488 if (thread->IsStillStarting()) {
489 thread_obj = nullptr;
490 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800491 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800492 }
493 if (thread_obj != nullptr) {
494 ref_info->stack_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
495 }
496 }
497
498 auto& java_info = static_cast<const art::JavaFrameRootInfo&>(info);
499 ref_info->stack_local.slot = static_cast<jint>(java_info.GetVReg());
500 const art::StackVisitor* visitor = java_info.GetVisitor();
501 ref_info->stack_local.location =
502 static_cast<jlocation>(visitor->GetDexPc(false /* abort_on_failure */));
503 ref_info->stack_local.depth = static_cast<jint>(visitor->GetFrameDepth());
504 art::ArtMethod* method = visitor->GetMethod();
505 if (method != nullptr) {
506 ref_info->stack_local.method = art::jni::EncodeArtMethod(method);
507 }
508
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700509 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800510 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700511
512 case art::RootType::kRootNativeStack:
513 case art::RootType::kRootThreadBlock:
514 case art::RootType::kRootThreadObject:
515 return JVMTI_HEAP_REFERENCE_THREAD;
516
517 case art::RootType::kRootStickyClass:
518 case art::RootType::kRootInternedString:
519 // Note: this isn't a root in the RI.
520 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
521
522 case art::RootType::kRootMonitorUsed:
523 case art::RootType::kRootJNIMonitor:
524 return JVMTI_HEAP_REFERENCE_MONITOR;
525
526 case art::RootType::kRootFinalizing:
527 case art::RootType::kRootDebugger:
528 case art::RootType::kRootReferenceCleanup:
529 case art::RootType::kRootVMInternal:
530 case art::RootType::kRootUnknown:
531 return JVMTI_HEAP_REFERENCE_OTHER;
532 }
533 LOG(FATAL) << "Unreachable";
534 UNREACHABLE();
535 }
536
537 void ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
538 REQUIRES_SHARED(art::Locks::mutator_lock_)
539 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
540 jvmtiHeapReferenceInfo ref_info;
541 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
542 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
543 if ((result & JVMTI_VISIT_ABORT) != 0) {
544 stop_reports_ = true;
545 }
546 }
547
548 private:
549 FollowReferencesHelper* helper_;
550 ObjectTagTable* tag_table_;
551 std::vector<art::mirror::Object*>* worklist_;
552 std::unordered_set<art::mirror::Object*>* visited_;
553 bool stop_reports_;
554 };
555
556 void VisitObject(art::mirror::Object* obj)
557 REQUIRES_SHARED(art::Locks::mutator_lock_)
558 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
559 if (obj->IsClass()) {
560 VisitClass(obj->AsClass());
561 return;
562 }
563 if (obj->IsArrayInstance()) {
564 VisitArray(obj);
565 return;
566 }
567
568 // TODO: We'll probably have to rewrite this completely with our own visiting logic, if we
569 // want to have a chance of getting the field indices computed halfway efficiently. For
570 // now, ignore them altogether.
571
572 struct InstanceReferenceVisitor {
573 explicit InstanceReferenceVisitor(FollowReferencesHelper* helper_)
574 : helper(helper_), stop_reports(false) {}
575
576 void operator()(art::mirror::Object* src,
577 art::MemberOffset field_offset,
578 bool is_static ATTRIBUTE_UNUSED) const
579 REQUIRES_SHARED(art::Locks::mutator_lock_)
580 REQUIRES(!*helper->tag_table_->GetAllowDisallowLock()) {
581 if (stop_reports) {
582 return;
583 }
584
585 art::mirror::Object* trg = src->GetFieldObjectReferenceAddr(field_offset)->AsMirrorPtr();
586 jvmtiHeapReferenceInfo reference_info;
587 memset(&reference_info, 0, sizeof(reference_info));
588
589 // TODO: Implement spec-compliant numbering.
590 reference_info.field.index = field_offset.Int32Value();
591
592 jvmtiHeapReferenceKind kind =
593 field_offset.Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
594 ? JVMTI_HEAP_REFERENCE_CLASS
595 : JVMTI_HEAP_REFERENCE_FIELD;
596 const jvmtiHeapReferenceInfo* reference_info_ptr =
597 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
598
599 stop_reports = !helper->ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src, trg);
600 }
601
602 void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED)
603 const {
604 LOG(FATAL) << "Unreachable";
605 }
606 void VisitRootIfNonNull(
607 art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const {
608 LOG(FATAL) << "Unreachable";
609 }
610
611 // "mutable" required by the visitor API.
612 mutable FollowReferencesHelper* helper;
613 mutable bool stop_reports;
614 };
615
616 InstanceReferenceVisitor visitor(this);
617 // Visit references, not native roots.
618 obj->VisitReferences<false>(visitor, art::VoidFunctor());
619
620 stop_reports_ = visitor.stop_reports;
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800621
622 if (!stop_reports_) {
623 jint string_ret = ReportString(obj, env, tag_table_, callbacks_, user_data_);
624 stop_reports_ = (string_ret & JVMTI_VISIT_ABORT) != 0;
625 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700626 }
627
628 void VisitArray(art::mirror::Object* array)
629 REQUIRES_SHARED(art::Locks::mutator_lock_)
630 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
631 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
632 nullptr,
633 array,
634 array->GetClass());
635 if (stop_reports_) {
636 return;
637 }
638
639 if (array->IsObjectArray()) {
640 art::mirror::ObjectArray<art::mirror::Object>* obj_array =
641 array->AsObjectArray<art::mirror::Object>();
642 int32_t length = obj_array->GetLength();
643 for (int32_t i = 0; i != length; ++i) {
644 art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
645 if (elem != nullptr) {
646 jvmtiHeapReferenceInfo reference_info;
647 reference_info.array.index = i;
648 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
649 &reference_info,
650 array,
651 elem);
652 if (stop_reports_) {
653 break;
654 }
655 }
656 }
Andreas Gampebecd6ad2017-02-22 19:20:37 -0800657 } else {
658 if (!stop_reports_) {
659 jint array_ret = ReportPrimitiveArray(array, env, tag_table_, callbacks_, user_data_);
660 stop_reports_ = (array_ret & JVMTI_VISIT_ABORT) != 0;
661 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700662 }
663 }
664
665 void VisitClass(art::mirror::Class* klass)
666 REQUIRES_SHARED(art::Locks::mutator_lock_)
667 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
668 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
669 if (!klass->IsResolved()) {
670 return;
671 }
672
673 // Superclass.
674 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
675 nullptr,
676 klass,
677 klass->GetSuperClass());
678 if (stop_reports_) {
679 return;
680 }
681
682 // Directly implemented or extended interfaces.
683 art::Thread* self = art::Thread::Current();
684 art::StackHandleScope<1> hs(self);
685 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
686 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
687 art::ObjPtr<art::mirror::Class> inf_klass =
Vladimir Marko19a4d372016-12-08 14:41:46 +0000688 art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700689 if (inf_klass == nullptr) {
690 // TODO: With a resolved class this should not happen...
691 self->ClearException();
692 break;
693 }
694
695 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
696 nullptr,
697 klass,
698 inf_klass.Ptr());
699 if (stop_reports_) {
700 return;
701 }
702 }
703
704 // Classloader.
705 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
706 // fake BootClassLoader?
707 if (klass->GetClassLoader() != nullptr) {
708 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
709 nullptr,
710 klass,
711 klass->GetClassLoader());
712 if (stop_reports_) {
713 return;
714 }
715 }
716 DCHECK_EQ(h_klass.Get(), klass);
717
718 // Declared static fields.
719 for (auto& field : klass->GetSFields()) {
720 if (!field.IsPrimitiveType()) {
721 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(klass);
722 if (field_value != nullptr) {
723 jvmtiHeapReferenceInfo reference_info;
724 memset(&reference_info, 0, sizeof(reference_info));
725
726 // TODO: Implement spec-compliant numbering.
727 reference_info.field.index = field.GetOffset().Int32Value();
728
729 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
730 &reference_info,
731 klass,
732 field_value.Ptr());
733 if (stop_reports_) {
734 return;
735 }
736 }
737 }
738 }
739 }
740
741 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
742 if (visited_.find(obj) == visited_.end()) {
743 worklist_.push_back(obj);
744 visited_.insert(obj);
745 }
746 }
747
748 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
749 const jvmtiHeapReferenceInfo* reference_info,
750 art::mirror::Object* referree,
751 art::mirror::Object* referrer)
752 REQUIRES_SHARED(art::Locks::mutator_lock_)
753 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
754 jint result = ReportReference(kind, reference_info, referree, referrer);
755 if ((result & JVMTI_VISIT_ABORT) == 0) {
756 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
757 MaybeEnqueue(referrer);
758 }
759 return true;
760 } else {
761 return false;
762 }
763 }
764
765 jint ReportReference(jvmtiHeapReferenceKind kind,
766 const jvmtiHeapReferenceInfo* reference_info,
767 art::mirror::Object* referrer,
768 art::mirror::Object* referree)
769 REQUIRES_SHARED(art::Locks::mutator_lock_)
770 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
771 if (referree == nullptr || stop_reports_) {
772 return 0;
773 }
774
Andreas Gampe38da9f22017-02-20 13:35:36 -0800775 if (UNLIKELY(class_filter_ != nullptr) && class_filter_ != referree->GetClass()) {
776 return JVMTI_VISIT_OBJECTS;
777 }
778
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700779 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
Andreas Gampe6ea06072017-02-24 18:01:19 +0000780 jlong tag = tag_table_->GetTagOrZero(referree);
781
782 if (!heap_filter_.ShouldReportByHeapFilter(tag, class_tag)) {
783 return JVMTI_VISIT_OBJECTS;
784 }
785
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700786 const jlong referrer_class_tag =
787 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
788 const jlong size = static_cast<jlong>(referree->SizeOf());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700789 jlong saved_tag = tag;
790 jlong referrer_tag = 0;
791 jlong saved_referrer_tag = 0;
792 jlong* referrer_tag_ptr;
793 if (referrer == nullptr) {
794 referrer_tag_ptr = nullptr;
795 } else {
796 if (referrer == referree) {
797 referrer_tag_ptr = &tag;
798 } else {
799 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
800 referrer_tag_ptr = &referrer_tag;
801 }
802 }
Andreas Gampe38da9f22017-02-20 13:35:36 -0800803
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700804 jint length = -1;
805 if (referree->IsArrayInstance()) {
806 length = referree->AsArray()->GetLength();
807 }
808
809 jint result = callbacks_->heap_reference_callback(kind,
810 reference_info,
811 class_tag,
812 referrer_class_tag,
813 size,
814 &tag,
815 referrer_tag_ptr,
816 length,
817 const_cast<void*>(user_data_));
818
819 if (tag != saved_tag) {
820 tag_table_->Set(referree, tag);
821 }
822 if (referrer_tag != saved_referrer_tag) {
823 tag_table_->Set(referrer, referrer_tag);
824 }
825
826 return result;
827 }
828
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800829 jvmtiEnv* env;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700830 ObjectTagTable* tag_table_;
Andreas Gampe638a6932016-12-02 19:11:17 -0800831 art::ObjPtr<art::mirror::Object> initial_object_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700832 const jvmtiHeapCallbacks* callbacks_;
Andreas Gampe38da9f22017-02-20 13:35:36 -0800833 art::ObjPtr<art::mirror::Class> class_filter_;
Andreas Gampe6ea06072017-02-24 18:01:19 +0000834 const HeapFilter heap_filter_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700835 const void* user_data_;
836
837 std::vector<art::mirror::Object*> worklist_;
838 size_t start_;
839 static constexpr size_t kMaxStart = 1000000U;
840
841 std::unordered_set<art::mirror::Object*> visited_;
842
843 bool stop_reports_;
844
845 friend class CollectAndReportRootsVisitor;
846};
847
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800848jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env,
Andreas Gampe6ea06072017-02-24 18:01:19 +0000849 jint heap_filter,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800850 jclass klass,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700851 jobject initial_object,
852 const jvmtiHeapCallbacks* callbacks,
853 const void* user_data) {
854 if (callbacks == nullptr) {
855 return ERR(NULL_POINTER);
856 }
857
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700858 art::Thread* self = art::Thread::Current();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700859
Andreas Gampe638a6932016-12-02 19:11:17 -0800860 art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
861 if (heap->IsGcConcurrentAndMoving()) {
862 // Need to take a heap dump while GC isn't running. See the
863 // comment in Heap::VisitObjects().
864 heap->IncrementDisableMovingGC(self);
865 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700866 {
Andreas Gampe638a6932016-12-02 19:11:17 -0800867 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700868 art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
869 art::ScopedSuspendAll ssa("FollowReferences");
870
Andreas Gampe38da9f22017-02-20 13:35:36 -0800871 art::ObjPtr<art::mirror::Class> class_filter = klass == nullptr
872 ? nullptr
873 : art::ObjPtr<art::mirror::Class>::DownCast(self->DecodeJObject(klass));
Andreas Gampe638a6932016-12-02 19:11:17 -0800874 FollowReferencesHelper frh(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800875 env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800876 self->DecodeJObject(initial_object),
877 callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800878 class_filter,
Andreas Gampe6ea06072017-02-24 18:01:19 +0000879 heap_filter,
Andreas Gampe638a6932016-12-02 19:11:17 -0800880 user_data);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700881 frh.Init();
882 frh.Work();
883 }
Andreas Gampe638a6932016-12-02 19:11:17 -0800884 if (heap->IsGcConcurrentAndMoving()) {
885 heap->DecrementDisableMovingGC(self);
886 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700887
888 return ERR(NONE);
889}
890
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700891jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
892 jint* class_count_ptr,
893 jclass** classes_ptr) {
894 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
895 return ERR(NULL_POINTER);
896 }
897
898 class ReportClassVisitor : public art::ClassVisitor {
899 public:
900 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
901
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700902 bool operator()(art::ObjPtr<art::mirror::Class> klass)
903 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampeef54d8d2016-10-25 09:55:53 -0700904 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700905 return true;
906 }
907
908 art::Thread* self_;
909 std::vector<jclass> classes_;
910 };
911
912 art::Thread* self = art::Thread::Current();
913 ReportClassVisitor rcv(self);
914 {
915 art::ScopedObjectAccess soa(self);
916 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
917 }
918
919 size_t size = rcv.classes_.size();
920 jclass* classes = nullptr;
921 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
922 reinterpret_cast<unsigned char**>(&classes));
923 if (alloc_ret != ERR(NONE)) {
924 return alloc_ret;
925 }
926
927 for (size_t i = 0; i < size; ++i) {
928 classes[i] = rcv.classes_[i];
929 }
930 *classes_ptr = classes;
931 *class_count_ptr = static_cast<jint>(size);
932
933 return ERR(NONE);
934}
935
Andreas Gampe8da6d032016-10-31 19:31:03 -0700936jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
937 art::Runtime::Current()->GetHeap()->CollectGarbage(false);
938
939 return ERR(NONE);
940}
Andreas Gampee54d9922016-10-11 19:55:37 -0700941} // namespace openjdkjvmti