blob: c7294a9b8debe37345c71b96c279480fc57c7ffc [file] [log] [blame]
Andreas Gampee54d9922016-10-11 19:55:37 -07001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampeba8df692016-11-01 10:30:44 -070017#include "ti_heap.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070018
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070019#include "art_field-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070020#include "art_jvmti.h"
21#include "base/macros.h"
22#include "base/mutex.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070023#include "class_linker.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070024#include "gc/heap.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070025#include "gc_root-inl.h"
Andreas Gampeaa8b60c2016-10-12 12:51:25 -070026#include "jni_env_ext.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070027#include "jni_internal.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070028#include "mirror/class.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070029#include "mirror/object-inl.h"
30#include "mirror/object_array-inl.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070031#include "object_callbacks.h"
32#include "object_tagging.h"
33#include "obj_ptr-inl.h"
Andreas Gampebecd6ad2017-02-22 19:20:37 -080034#include "primitive.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070035#include "runtime.h"
36#include "scoped_thread_state_change-inl.h"
37#include "thread-inl.h"
Andreas Gampe70bfc8a2016-11-03 11:04:15 -070038#include "thread_list.h"
Andreas Gampee54d9922016-10-11 19:55:37 -070039
40namespace openjdkjvmti {
41
Andreas Gampe3ec8e402017-02-21 15:49:53 -080042namespace {
43
44// Report the contents of a string, if a callback is set.
45jint ReportString(art::ObjPtr<art::mirror::Object> obj,
46 jvmtiEnv* env,
47 ObjectTagTable* tag_table,
48 const jvmtiHeapCallbacks* cb,
49 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
50 if (UNLIKELY(cb->string_primitive_value_callback != nullptr) && obj->IsString()) {
51 art::ObjPtr<art::mirror::String> str = obj->AsString();
52 int32_t string_length = str->GetLength();
Andreas Gampe5f942032017-02-27 19:59:40 -080053 JvmtiUniquePtr<uint16_t[]> data;
Andreas Gampe3ec8e402017-02-21 15:49:53 -080054
Andreas Gampe5f942032017-02-27 19:59:40 -080055 if (string_length > 0) {
56 jvmtiError alloc_error;
57 data = AllocJvmtiUniquePtr<uint16_t[]>(env, string_length, &alloc_error);
58 if (data == nullptr) {
59 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
60 // back? For now just warn.
61 LOG(WARNING) << "Unable to allocate buffer for string reporting! Silently dropping value."
62 << " >" << str->ToModifiedUtf8() << "<";
63 return 0;
Andreas Gampe3ec8e402017-02-21 15:49:53 -080064 }
Andreas Gampe5f942032017-02-27 19:59:40 -080065
66 if (str->IsCompressed()) {
67 uint8_t* compressed_data = str->GetValueCompressed();
68 for (int32_t i = 0; i != string_length; ++i) {
69 data[i] = compressed_data[i];
70 }
71 } else {
72 // Can copy directly.
73 memcpy(data.get(), str->GetValue(), string_length * sizeof(uint16_t));
74 }
Andreas Gampe3ec8e402017-02-21 15:49:53 -080075 }
76
77 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
78 jlong string_tag = tag_table->GetTagOrZero(obj.Ptr());
79 const jlong saved_string_tag = string_tag;
80
81 jint result = cb->string_primitive_value_callback(class_tag,
82 obj->SizeOf(),
83 &string_tag,
84 data.get(),
85 string_length,
86 const_cast<void*>(user_data));
87 if (string_tag != saved_string_tag) {
88 tag_table->Set(obj.Ptr(), string_tag);
89 }
90
91 return result;
92 }
93 return 0;
94}
95
Andreas Gampebecd6ad2017-02-22 19:20:37 -080096// Report the contents of a primitive array, if a callback is set.
97jint ReportPrimitiveArray(art::ObjPtr<art::mirror::Object> obj,
98 jvmtiEnv* env,
99 ObjectTagTable* tag_table,
100 const jvmtiHeapCallbacks* cb,
101 const void* user_data) REQUIRES_SHARED(art::Locks::mutator_lock_) {
102 if (UNLIKELY(cb->array_primitive_value_callback != nullptr) &&
103 obj->IsArrayInstance() &&
104 !obj->IsObjectArray()) {
105 art::ObjPtr<art::mirror::Array> array = obj->AsArray();
106 int32_t array_length = array->GetLength();
107 size_t component_size = array->GetClass()->GetComponentSize();
108 art::Primitive::Type art_prim_type = array->GetClass()->GetComponentType()->GetPrimitiveType();
109 jvmtiPrimitiveType prim_type =
110 static_cast<jvmtiPrimitiveType>(art::Primitive::Descriptor(art_prim_type)[0]);
111 DCHECK(prim_type == JVMTI_PRIMITIVE_TYPE_BOOLEAN ||
112 prim_type == JVMTI_PRIMITIVE_TYPE_BYTE ||
113 prim_type == JVMTI_PRIMITIVE_TYPE_CHAR ||
114 prim_type == JVMTI_PRIMITIVE_TYPE_SHORT ||
115 prim_type == JVMTI_PRIMITIVE_TYPE_INT ||
116 prim_type == JVMTI_PRIMITIVE_TYPE_LONG ||
117 prim_type == JVMTI_PRIMITIVE_TYPE_FLOAT ||
118 prim_type == JVMTI_PRIMITIVE_TYPE_DOUBLE);
119
120 const jlong class_tag = tag_table->GetTagOrZero(obj->GetClass());
121 jlong array_tag = tag_table->GetTagOrZero(obj.Ptr());
122 const jlong saved_array_tag = array_tag;
123
124 jint result;
125 if (array_length == 0) {
126 result = cb->array_primitive_value_callback(class_tag,
127 obj->SizeOf(),
128 &array_tag,
129 0,
130 prim_type,
131 nullptr,
132 const_cast<void*>(user_data));
133 } else {
134 jvmtiError alloc_error;
135 JvmtiUniquePtr<char[]> data = AllocJvmtiUniquePtr<char[]>(env,
136 array_length * component_size,
137 &alloc_error);
138 if (data == nullptr) {
139 // TODO: Not really sure what to do here. Should we abort the iteration and go all the way
140 // back? For now just warn.
141 LOG(WARNING) << "Unable to allocate buffer for array reporting! Silently dropping value.";
142 return 0;
143 }
144
145 memcpy(data.get(), array->GetRawData(component_size, 0), array_length * component_size);
146
147 result = cb->array_primitive_value_callback(class_tag,
148 obj->SizeOf(),
149 &array_tag,
150 array_length,
151 prim_type,
152 data.get(),
153 const_cast<void*>(user_data));
154 }
155
156 if (array_tag != saved_array_tag) {
157 tag_table->Set(obj.Ptr(), array_tag);
158 }
159
160 return result;
161 }
162 return 0;
163}
164
Andreas Gampe6ea06072017-02-24 18:01:19 +0000165struct HeapFilter {
166 explicit HeapFilter(jint heap_filter)
167 : filter_out_tagged((heap_filter & JVMTI_HEAP_FILTER_TAGGED) != 0),
Andreas Gampee54d9922016-10-11 19:55:37 -0700168 filter_out_untagged((heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) != 0),
169 filter_out_class_tagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) != 0),
170 filter_out_class_untagged((heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) != 0),
171 any_filter(filter_out_tagged ||
172 filter_out_untagged ||
173 filter_out_class_tagged ||
Andreas Gampe6ea06072017-02-24 18:01:19 +0000174 filter_out_class_untagged) {
Andreas Gampee54d9922016-10-11 19:55:37 -0700175 }
176
Andreas Gampe6ea06072017-02-24 18:01:19 +0000177 bool ShouldReportByHeapFilter(jlong tag, jlong class_tag) const {
Andreas Gampee54d9922016-10-11 19:55:37 -0700178 if (!any_filter) {
179 return true;
180 }
181
182 if ((tag == 0 && filter_out_untagged) || (tag != 0 && filter_out_tagged)) {
183 return false;
184 }
185
186 if ((class_tag == 0 && filter_out_class_untagged) ||
187 (class_tag != 0 && filter_out_class_tagged)) {
188 return false;
189 }
190
191 return true;
192 }
193
Andreas Gampee54d9922016-10-11 19:55:37 -0700194 const bool filter_out_tagged;
195 const bool filter_out_untagged;
196 const bool filter_out_class_tagged;
197 const bool filter_out_class_untagged;
198 const bool any_filter;
Andreas Gampe6ea06072017-02-24 18:01:19 +0000199};
200
201} // namespace
202
203struct IterateThroughHeapData {
204 IterateThroughHeapData(HeapUtil* _heap_util,
205 jvmtiEnv* _env,
206 art::ObjPtr<art::mirror::Class> klass,
207 jint _heap_filter,
208 const jvmtiHeapCallbacks* _callbacks,
209 const void* _user_data)
210 : heap_util(_heap_util),
211 heap_filter(_heap_filter),
212 filter_klass(klass),
213 env(_env),
214 callbacks(_callbacks),
215 user_data(_user_data),
216 stop_reports(false) {
217 }
218
219 HeapUtil* heap_util;
220 const HeapFilter heap_filter;
221 art::ObjPtr<art::mirror::Class> filter_klass;
222 jvmtiEnv* env;
223 const jvmtiHeapCallbacks* callbacks;
224 const void* user_data;
Andreas Gampee54d9922016-10-11 19:55:37 -0700225
226 bool stop_reports;
227};
228
229static void IterateThroughHeapObjectCallback(art::mirror::Object* obj, void* arg)
230 REQUIRES_SHARED(art::Locks::mutator_lock_) {
231 IterateThroughHeapData* ithd = reinterpret_cast<IterateThroughHeapData*>(arg);
232 // Early return, as we can't really stop visiting.
233 if (ithd->stop_reports) {
234 return;
235 }
236
237 art::ScopedAssertNoThreadSuspension no_suspension("IterateThroughHeapCallback");
238
239 jlong tag = 0;
240 ithd->heap_util->GetTags()->GetTag(obj, &tag);
241
242 jlong class_tag = 0;
243 art::ObjPtr<art::mirror::Class> klass = obj->GetClass();
244 ithd->heap_util->GetTags()->GetTag(klass.Ptr(), &class_tag);
245 // For simplicity, even if we find a tag = 0, assume 0 = not tagged.
246
Andreas Gampe6ea06072017-02-24 18:01:19 +0000247 if (!ithd->heap_filter.ShouldReportByHeapFilter(tag, class_tag)) {
Andreas Gampee54d9922016-10-11 19:55:37 -0700248 return;
249 }
250
Andreas Gampee54d9922016-10-11 19:55:37 -0700251 if (ithd->filter_klass != nullptr) {
252 if (ithd->filter_klass != klass) {
253 return;
254 }
255 }
256
257 jlong size = obj->SizeOf();
258
259 jint length = -1;
260 if (obj->IsArrayInstance()) {
261 length = obj->AsArray()->GetLength();
262 }
263
264 jlong saved_tag = tag;
265 jint ret = ithd->callbacks->heap_iteration_callback(class_tag,
266 size,
267 &tag,
268 length,
269 const_cast<void*>(ithd->user_data));
270
271 if (tag != saved_tag) {
272 ithd->heap_util->GetTags()->Set(obj, tag);
273 }
274
275 ithd->stop_reports = (ret & JVMTI_VISIT_ABORT) != 0;
276
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800277 if (!ithd->stop_reports) {
278 jint string_ret = ReportString(obj,
279 ithd->env,
280 ithd->heap_util->GetTags(),
281 ithd->callbacks,
282 ithd->user_data);
283 ithd->stop_reports = (string_ret & JVMTI_VISIT_ABORT) != 0;
284 }
285
Andreas Gampebecd6ad2017-02-22 19:20:37 -0800286 if (!ithd->stop_reports) {
287 jint array_ret = ReportPrimitiveArray(obj,
288 ithd->env,
289 ithd->heap_util->GetTags(),
290 ithd->callbacks,
291 ithd->user_data);
292 ithd->stop_reports = (array_ret & JVMTI_VISIT_ABORT) != 0;
293 }
294
Andreas Gampee54d9922016-10-11 19:55:37 -0700295 // TODO Implement primitive field callback.
296}
297
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800298jvmtiError HeapUtil::IterateThroughHeap(jvmtiEnv* env,
Andreas Gampee54d9922016-10-11 19:55:37 -0700299 jint heap_filter,
300 jclass klass,
301 const jvmtiHeapCallbacks* callbacks,
302 const void* user_data) {
303 if (callbacks == nullptr) {
304 return ERR(NULL_POINTER);
305 }
306
Andreas Gampee54d9922016-10-11 19:55:37 -0700307 art::Thread* self = art::Thread::Current();
308 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
309
310 IterateThroughHeapData ithd(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800311 env,
Nicolas Geoffray2cb576c2017-02-24 09:40:37 +0000312 soa.Decode<art::mirror::Class>(klass),
Andreas Gampe6ea06072017-02-24 18:01:19 +0000313 heap_filter,
Andreas Gampee54d9922016-10-11 19:55:37 -0700314 callbacks,
315 user_data);
316
317 art::Runtime::Current()->GetHeap()->VisitObjects(IterateThroughHeapObjectCallback, &ithd);
318
319 return ERR(NONE);
320}
321
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700322class FollowReferencesHelper FINAL {
323 public:
324 FollowReferencesHelper(HeapUtil* h,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800325 jvmtiEnv* jvmti_env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800326 art::ObjPtr<art::mirror::Object> initial_object,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700327 const jvmtiHeapCallbacks* callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800328 art::ObjPtr<art::mirror::Class> class_filter,
Andreas Gampe6ea06072017-02-24 18:01:19 +0000329 jint heap_filter,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700330 const void* user_data)
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800331 : env(jvmti_env),
332 tag_table_(h->GetTags()),
Andreas Gampe638a6932016-12-02 19:11:17 -0800333 initial_object_(initial_object),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700334 callbacks_(callbacks),
Andreas Gampe38da9f22017-02-20 13:35:36 -0800335 class_filter_(class_filter),
Andreas Gampe6ea06072017-02-24 18:01:19 +0000336 heap_filter_(heap_filter),
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700337 user_data_(user_data),
338 start_(0),
339 stop_reports_(false) {
340 }
341
342 void Init()
343 REQUIRES_SHARED(art::Locks::mutator_lock_)
344 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
Andreas Gampe638a6932016-12-02 19:11:17 -0800345 if (initial_object_.IsNull()) {
346 CollectAndReportRootsVisitor carrv(this, tag_table_, &worklist_, &visited_);
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800347
348 // We need precise info (e.g., vregs).
349 constexpr art::VisitRootFlags kRootFlags = static_cast<art::VisitRootFlags>(
350 art::VisitRootFlags::kVisitRootFlagAllRoots | art::VisitRootFlags::kVisitRootFlagPrecise);
351 art::Runtime::Current()->VisitRoots(&carrv, kRootFlags);
352
Andreas Gampe638a6932016-12-02 19:11:17 -0800353 art::Runtime::Current()->VisitImageRoots(&carrv);
354 stop_reports_ = carrv.IsStopReports();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700355
Andreas Gampe638a6932016-12-02 19:11:17 -0800356 if (stop_reports_) {
357 worklist_.clear();
358 }
359 } else {
360 visited_.insert(initial_object_.Ptr());
361 worklist_.push_back(initial_object_.Ptr());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700362 }
363 }
364
365 void Work()
366 REQUIRES_SHARED(art::Locks::mutator_lock_)
367 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
368 // Currently implemented as a BFS. To lower overhead, we don't erase elements immediately
369 // from the head of the work list, instead postponing until there's a gap that's "large."
370 //
371 // Alternatively, we can implement a DFS and use the work list as a stack.
372 while (start_ < worklist_.size()) {
373 art::mirror::Object* cur_obj = worklist_[start_];
374 start_++;
375
376 if (start_ >= kMaxStart) {
377 worklist_.erase(worklist_.begin(), worklist_.begin() + start_);
378 start_ = 0;
379 }
380
381 VisitObject(cur_obj);
382
383 if (stop_reports_) {
384 break;
385 }
386 }
387 }
388
389 private:
390 class CollectAndReportRootsVisitor FINAL : public art::RootVisitor {
391 public:
392 CollectAndReportRootsVisitor(FollowReferencesHelper* helper,
393 ObjectTagTable* tag_table,
394 std::vector<art::mirror::Object*>* worklist,
395 std::unordered_set<art::mirror::Object*>* visited)
396 : helper_(helper),
397 tag_table_(tag_table),
398 worklist_(worklist),
399 visited_(visited),
400 stop_reports_(false) {}
401
402 void VisitRoots(art::mirror::Object*** roots, size_t count, const art::RootInfo& info)
403 OVERRIDE
404 REQUIRES_SHARED(art::Locks::mutator_lock_)
405 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
406 for (size_t i = 0; i != count; ++i) {
407 AddRoot(*roots[i], info);
408 }
409 }
410
411 void VisitRoots(art::mirror::CompressedReference<art::mirror::Object>** roots,
412 size_t count,
413 const art::RootInfo& info)
414 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_)
415 REQUIRES(!*helper_->tag_table_->GetAllowDisallowLock()) {
416 for (size_t i = 0; i != count; ++i) {
417 AddRoot(roots[i]->AsMirrorPtr(), info);
418 }
419 }
420
421 bool IsStopReports() {
422 return stop_reports_;
423 }
424
425 private:
426 void AddRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
427 REQUIRES_SHARED(art::Locks::mutator_lock_)
428 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
429 // We use visited_ to mark roots already so we do not need another set.
430 if (visited_->find(root_obj) == visited_->end()) {
431 visited_->insert(root_obj);
432 worklist_->push_back(root_obj);
433 }
434 ReportRoot(root_obj, info);
435 }
436
Andreas Gampe93c30902016-11-18 13:30:30 -0800437 // Remove NO_THREAD_SAFETY_ANALYSIS once ASSERT_CAPABILITY works correctly.
438 art::Thread* FindThread(const art::RootInfo& info) NO_THREAD_SAFETY_ANALYSIS {
439 art::Locks::thread_list_lock_->AssertExclusiveHeld(art::Thread::Current());
440 return art::Runtime::Current()->GetThreadList()->FindThreadByThreadId(info.GetThreadId());
441 }
442
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700443 jvmtiHeapReferenceKind GetReferenceKind(const art::RootInfo& info,
444 jvmtiHeapReferenceInfo* ref_info)
445 REQUIRES_SHARED(art::Locks::mutator_lock_) {
446 // TODO: Fill in ref_info.
447 memset(ref_info, 0, sizeof(jvmtiHeapReferenceInfo));
448
449 switch (info.GetType()) {
450 case art::RootType::kRootJNIGlobal:
451 return JVMTI_HEAP_REFERENCE_JNI_GLOBAL;
452
453 case art::RootType::kRootJNILocal:
Andreas Gampe93c30902016-11-18 13:30:30 -0800454 {
455 uint32_t thread_id = info.GetThreadId();
456 ref_info->jni_local.thread_id = thread_id;
457
458 art::Thread* thread = FindThread(info);
459 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800460 art::mirror::Object* thread_obj;
Andreas Gampe93c30902016-11-18 13:30:30 -0800461 if (thread->IsStillStarting()) {
462 thread_obj = nullptr;
463 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800464 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampe93c30902016-11-18 13:30:30 -0800465 }
466 if (thread_obj != nullptr) {
467 ref_info->jni_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
468 }
469 }
470
471 // TODO: We don't have this info.
472 if (thread != nullptr) {
473 ref_info->jni_local.depth = 0;
474 art::ArtMethod* method = thread->GetCurrentMethod(nullptr, false /* abort_on_error */);
475 if (method != nullptr) {
476 ref_info->jni_local.method = art::jni::EncodeArtMethod(method);
477 }
478 }
479
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700480 return JVMTI_HEAP_REFERENCE_JNI_LOCAL;
Andreas Gampe93c30902016-11-18 13:30:30 -0800481 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700482
483 case art::RootType::kRootJavaFrame:
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800484 {
485 uint32_t thread_id = info.GetThreadId();
486 ref_info->stack_local.thread_id = thread_id;
487
488 art::Thread* thread = FindThread(info);
489 if (thread != nullptr) {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800490 art::mirror::Object* thread_obj;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800491 if (thread->IsStillStarting()) {
492 thread_obj = nullptr;
493 } else {
Andreas Gampe202f85a2017-02-06 10:23:26 -0800494 thread_obj = thread->GetPeerFromOtherThread();
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800495 }
496 if (thread_obj != nullptr) {
497 ref_info->stack_local.thread_tag = tag_table_->GetTagOrZero(thread_obj);
498 }
499 }
500
501 auto& java_info = static_cast<const art::JavaFrameRootInfo&>(info);
502 ref_info->stack_local.slot = static_cast<jint>(java_info.GetVReg());
503 const art::StackVisitor* visitor = java_info.GetVisitor();
504 ref_info->stack_local.location =
505 static_cast<jlocation>(visitor->GetDexPc(false /* abort_on_failure */));
506 ref_info->stack_local.depth = static_cast<jint>(visitor->GetFrameDepth());
507 art::ArtMethod* method = visitor->GetMethod();
508 if (method != nullptr) {
509 ref_info->stack_local.method = art::jni::EncodeArtMethod(method);
510 }
511
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700512 return JVMTI_HEAP_REFERENCE_STACK_LOCAL;
Andreas Gampef10dfcd2016-12-02 14:42:33 -0800513 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700514
515 case art::RootType::kRootNativeStack:
516 case art::RootType::kRootThreadBlock:
517 case art::RootType::kRootThreadObject:
518 return JVMTI_HEAP_REFERENCE_THREAD;
519
520 case art::RootType::kRootStickyClass:
521 case art::RootType::kRootInternedString:
522 // Note: this isn't a root in the RI.
523 return JVMTI_HEAP_REFERENCE_SYSTEM_CLASS;
524
525 case art::RootType::kRootMonitorUsed:
526 case art::RootType::kRootJNIMonitor:
527 return JVMTI_HEAP_REFERENCE_MONITOR;
528
529 case art::RootType::kRootFinalizing:
530 case art::RootType::kRootDebugger:
531 case art::RootType::kRootReferenceCleanup:
532 case art::RootType::kRootVMInternal:
533 case art::RootType::kRootUnknown:
534 return JVMTI_HEAP_REFERENCE_OTHER;
535 }
536 LOG(FATAL) << "Unreachable";
537 UNREACHABLE();
538 }
539
540 void ReportRoot(art::mirror::Object* root_obj, const art::RootInfo& info)
541 REQUIRES_SHARED(art::Locks::mutator_lock_)
542 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
543 jvmtiHeapReferenceInfo ref_info;
544 jvmtiHeapReferenceKind kind = GetReferenceKind(info, &ref_info);
545 jint result = helper_->ReportReference(kind, &ref_info, nullptr, root_obj);
546 if ((result & JVMTI_VISIT_ABORT) != 0) {
547 stop_reports_ = true;
548 }
549 }
550
551 private:
552 FollowReferencesHelper* helper_;
553 ObjectTagTable* tag_table_;
554 std::vector<art::mirror::Object*>* worklist_;
555 std::unordered_set<art::mirror::Object*>* visited_;
556 bool stop_reports_;
557 };
558
559 void VisitObject(art::mirror::Object* obj)
560 REQUIRES_SHARED(art::Locks::mutator_lock_)
561 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
562 if (obj->IsClass()) {
563 VisitClass(obj->AsClass());
564 return;
565 }
566 if (obj->IsArrayInstance()) {
567 VisitArray(obj);
568 return;
569 }
570
571 // TODO: We'll probably have to rewrite this completely with our own visiting logic, if we
572 // want to have a chance of getting the field indices computed halfway efficiently. For
573 // now, ignore them altogether.
574
575 struct InstanceReferenceVisitor {
576 explicit InstanceReferenceVisitor(FollowReferencesHelper* helper_)
577 : helper(helper_), stop_reports(false) {}
578
579 void operator()(art::mirror::Object* src,
580 art::MemberOffset field_offset,
581 bool is_static ATTRIBUTE_UNUSED) const
582 REQUIRES_SHARED(art::Locks::mutator_lock_)
583 REQUIRES(!*helper->tag_table_->GetAllowDisallowLock()) {
584 if (stop_reports) {
585 return;
586 }
587
588 art::mirror::Object* trg = src->GetFieldObjectReferenceAddr(field_offset)->AsMirrorPtr();
589 jvmtiHeapReferenceInfo reference_info;
590 memset(&reference_info, 0, sizeof(reference_info));
591
592 // TODO: Implement spec-compliant numbering.
593 reference_info.field.index = field_offset.Int32Value();
594
595 jvmtiHeapReferenceKind kind =
596 field_offset.Int32Value() == art::mirror::Object::ClassOffset().Int32Value()
597 ? JVMTI_HEAP_REFERENCE_CLASS
598 : JVMTI_HEAP_REFERENCE_FIELD;
599 const jvmtiHeapReferenceInfo* reference_info_ptr =
600 kind == JVMTI_HEAP_REFERENCE_CLASS ? nullptr : &reference_info;
601
602 stop_reports = !helper->ReportReferenceMaybeEnqueue(kind, reference_info_ptr, src, trg);
603 }
604
605 void VisitRoot(art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED)
606 const {
607 LOG(FATAL) << "Unreachable";
608 }
609 void VisitRootIfNonNull(
610 art::mirror::CompressedReference<art::mirror::Object>* root ATTRIBUTE_UNUSED) const {
611 LOG(FATAL) << "Unreachable";
612 }
613
614 // "mutable" required by the visitor API.
615 mutable FollowReferencesHelper* helper;
616 mutable bool stop_reports;
617 };
618
619 InstanceReferenceVisitor visitor(this);
620 // Visit references, not native roots.
621 obj->VisitReferences<false>(visitor, art::VoidFunctor());
622
623 stop_reports_ = visitor.stop_reports;
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800624
625 if (!stop_reports_) {
626 jint string_ret = ReportString(obj, env, tag_table_, callbacks_, user_data_);
627 stop_reports_ = (string_ret & JVMTI_VISIT_ABORT) != 0;
628 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700629 }
630
631 void VisitArray(art::mirror::Object* array)
632 REQUIRES_SHARED(art::Locks::mutator_lock_)
633 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
634 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS,
635 nullptr,
636 array,
637 array->GetClass());
638 if (stop_reports_) {
639 return;
640 }
641
642 if (array->IsObjectArray()) {
643 art::mirror::ObjectArray<art::mirror::Object>* obj_array =
644 array->AsObjectArray<art::mirror::Object>();
645 int32_t length = obj_array->GetLength();
646 for (int32_t i = 0; i != length; ++i) {
647 art::mirror::Object* elem = obj_array->GetWithoutChecks(i);
648 if (elem != nullptr) {
649 jvmtiHeapReferenceInfo reference_info;
650 reference_info.array.index = i;
651 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT,
652 &reference_info,
653 array,
654 elem);
655 if (stop_reports_) {
656 break;
657 }
658 }
659 }
Andreas Gampebecd6ad2017-02-22 19:20:37 -0800660 } else {
661 if (!stop_reports_) {
662 jint array_ret = ReportPrimitiveArray(array, env, tag_table_, callbacks_, user_data_);
663 stop_reports_ = (array_ret & JVMTI_VISIT_ABORT) != 0;
664 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700665 }
666 }
667
668 void VisitClass(art::mirror::Class* klass)
669 REQUIRES_SHARED(art::Locks::mutator_lock_)
670 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
671 // TODO: Are erroneous classes reported? Are non-prepared ones? For now, just use resolved ones.
672 if (!klass->IsResolved()) {
673 return;
674 }
675
676 // Superclass.
677 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_SUPERCLASS,
678 nullptr,
679 klass,
680 klass->GetSuperClass());
681 if (stop_reports_) {
682 return;
683 }
684
685 // Directly implemented or extended interfaces.
686 art::Thread* self = art::Thread::Current();
687 art::StackHandleScope<1> hs(self);
688 art::Handle<art::mirror::Class> h_klass(hs.NewHandle<art::mirror::Class>(klass));
689 for (size_t i = 0; i < h_klass->NumDirectInterfaces(); ++i) {
690 art::ObjPtr<art::mirror::Class> inf_klass =
Vladimir Marko19a4d372016-12-08 14:41:46 +0000691 art::mirror::Class::ResolveDirectInterface(self, h_klass, i);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700692 if (inf_klass == nullptr) {
693 // TODO: With a resolved class this should not happen...
694 self->ClearException();
695 break;
696 }
697
698 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_INTERFACE,
699 nullptr,
700 klass,
701 inf_klass.Ptr());
702 if (stop_reports_) {
703 return;
704 }
705 }
706
707 // Classloader.
708 // TODO: What about the boot classpath loader? We'll skip for now, but do we have to find the
709 // fake BootClassLoader?
710 if (klass->GetClassLoader() != nullptr) {
711 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_CLASS_LOADER,
712 nullptr,
713 klass,
714 klass->GetClassLoader());
715 if (stop_reports_) {
716 return;
717 }
718 }
719 DCHECK_EQ(h_klass.Get(), klass);
720
721 // Declared static fields.
722 for (auto& field : klass->GetSFields()) {
723 if (!field.IsPrimitiveType()) {
724 art::ObjPtr<art::mirror::Object> field_value = field.GetObject(klass);
725 if (field_value != nullptr) {
726 jvmtiHeapReferenceInfo reference_info;
727 memset(&reference_info, 0, sizeof(reference_info));
728
729 // TODO: Implement spec-compliant numbering.
730 reference_info.field.index = field.GetOffset().Int32Value();
731
732 stop_reports_ = !ReportReferenceMaybeEnqueue(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
733 &reference_info,
734 klass,
735 field_value.Ptr());
736 if (stop_reports_) {
737 return;
738 }
739 }
740 }
741 }
742 }
743
744 void MaybeEnqueue(art::mirror::Object* obj) REQUIRES_SHARED(art::Locks::mutator_lock_) {
745 if (visited_.find(obj) == visited_.end()) {
746 worklist_.push_back(obj);
747 visited_.insert(obj);
748 }
749 }
750
751 bool ReportReferenceMaybeEnqueue(jvmtiHeapReferenceKind kind,
752 const jvmtiHeapReferenceInfo* reference_info,
753 art::mirror::Object* referree,
754 art::mirror::Object* referrer)
755 REQUIRES_SHARED(art::Locks::mutator_lock_)
756 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
757 jint result = ReportReference(kind, reference_info, referree, referrer);
758 if ((result & JVMTI_VISIT_ABORT) == 0) {
759 if ((result & JVMTI_VISIT_OBJECTS) != 0) {
760 MaybeEnqueue(referrer);
761 }
762 return true;
763 } else {
764 return false;
765 }
766 }
767
768 jint ReportReference(jvmtiHeapReferenceKind kind,
769 const jvmtiHeapReferenceInfo* reference_info,
770 art::mirror::Object* referrer,
771 art::mirror::Object* referree)
772 REQUIRES_SHARED(art::Locks::mutator_lock_)
773 REQUIRES(!*tag_table_->GetAllowDisallowLock()) {
774 if (referree == nullptr || stop_reports_) {
775 return 0;
776 }
777
Andreas Gampe38da9f22017-02-20 13:35:36 -0800778 if (UNLIKELY(class_filter_ != nullptr) && class_filter_ != referree->GetClass()) {
779 return JVMTI_VISIT_OBJECTS;
780 }
781
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700782 const jlong class_tag = tag_table_->GetTagOrZero(referree->GetClass());
Andreas Gampe6ea06072017-02-24 18:01:19 +0000783 jlong tag = tag_table_->GetTagOrZero(referree);
784
785 if (!heap_filter_.ShouldReportByHeapFilter(tag, class_tag)) {
786 return JVMTI_VISIT_OBJECTS;
787 }
788
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700789 const jlong referrer_class_tag =
790 referrer == nullptr ? 0 : tag_table_->GetTagOrZero(referrer->GetClass());
791 const jlong size = static_cast<jlong>(referree->SizeOf());
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700792 jlong saved_tag = tag;
793 jlong referrer_tag = 0;
794 jlong saved_referrer_tag = 0;
795 jlong* referrer_tag_ptr;
796 if (referrer == nullptr) {
797 referrer_tag_ptr = nullptr;
798 } else {
799 if (referrer == referree) {
800 referrer_tag_ptr = &tag;
801 } else {
802 referrer_tag = saved_referrer_tag = tag_table_->GetTagOrZero(referrer);
803 referrer_tag_ptr = &referrer_tag;
804 }
805 }
Andreas Gampe38da9f22017-02-20 13:35:36 -0800806
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700807 jint length = -1;
808 if (referree->IsArrayInstance()) {
809 length = referree->AsArray()->GetLength();
810 }
811
812 jint result = callbacks_->heap_reference_callback(kind,
813 reference_info,
814 class_tag,
815 referrer_class_tag,
816 size,
817 &tag,
818 referrer_tag_ptr,
819 length,
820 const_cast<void*>(user_data_));
821
822 if (tag != saved_tag) {
823 tag_table_->Set(referree, tag);
824 }
825 if (referrer_tag != saved_referrer_tag) {
826 tag_table_->Set(referrer, referrer_tag);
827 }
828
829 return result;
830 }
831
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800832 jvmtiEnv* env;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700833 ObjectTagTable* tag_table_;
Andreas Gampe638a6932016-12-02 19:11:17 -0800834 art::ObjPtr<art::mirror::Object> initial_object_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700835 const jvmtiHeapCallbacks* callbacks_;
Andreas Gampe38da9f22017-02-20 13:35:36 -0800836 art::ObjPtr<art::mirror::Class> class_filter_;
Andreas Gampe6ea06072017-02-24 18:01:19 +0000837 const HeapFilter heap_filter_;
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700838 const void* user_data_;
839
840 std::vector<art::mirror::Object*> worklist_;
841 size_t start_;
842 static constexpr size_t kMaxStart = 1000000U;
843
844 std::unordered_set<art::mirror::Object*> visited_;
845
846 bool stop_reports_;
847
848 friend class CollectAndReportRootsVisitor;
849};
850
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800851jvmtiError HeapUtil::FollowReferences(jvmtiEnv* env,
Andreas Gampe6ea06072017-02-24 18:01:19 +0000852 jint heap_filter,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800853 jclass klass,
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700854 jobject initial_object,
855 const jvmtiHeapCallbacks* callbacks,
856 const void* user_data) {
857 if (callbacks == nullptr) {
858 return ERR(NULL_POINTER);
859 }
860
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700861 art::Thread* self = art::Thread::Current();
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700862
Andreas Gampe638a6932016-12-02 19:11:17 -0800863 art::gc::Heap* heap = art::Runtime::Current()->GetHeap();
864 if (heap->IsGcConcurrentAndMoving()) {
865 // Need to take a heap dump while GC isn't running. See the
866 // comment in Heap::VisitObjects().
867 heap->IncrementDisableMovingGC(self);
868 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700869 {
Andreas Gampe638a6932016-12-02 19:11:17 -0800870 art::ScopedObjectAccess soa(self); // Now we know we have the shared lock.
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700871 art::ScopedThreadSuspension sts(self, art::kWaitingForVisitObjects);
872 art::ScopedSuspendAll ssa("FollowReferences");
873
Andreas Gampe38da9f22017-02-20 13:35:36 -0800874 art::ObjPtr<art::mirror::Class> class_filter = klass == nullptr
875 ? nullptr
876 : art::ObjPtr<art::mirror::Class>::DownCast(self->DecodeJObject(klass));
Andreas Gampe638a6932016-12-02 19:11:17 -0800877 FollowReferencesHelper frh(this,
Andreas Gampe3ec8e402017-02-21 15:49:53 -0800878 env,
Andreas Gampe638a6932016-12-02 19:11:17 -0800879 self->DecodeJObject(initial_object),
880 callbacks,
Andreas Gampe38da9f22017-02-20 13:35:36 -0800881 class_filter,
Andreas Gampe6ea06072017-02-24 18:01:19 +0000882 heap_filter,
Andreas Gampe638a6932016-12-02 19:11:17 -0800883 user_data);
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700884 frh.Init();
885 frh.Work();
886 }
Andreas Gampe638a6932016-12-02 19:11:17 -0800887 if (heap->IsGcConcurrentAndMoving()) {
888 heap->DecrementDisableMovingGC(self);
889 }
Andreas Gampe70bfc8a2016-11-03 11:04:15 -0700890
891 return ERR(NONE);
892}
893
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700894jvmtiError HeapUtil::GetLoadedClasses(jvmtiEnv* env,
895 jint* class_count_ptr,
896 jclass** classes_ptr) {
897 if (class_count_ptr == nullptr || classes_ptr == nullptr) {
898 return ERR(NULL_POINTER);
899 }
900
901 class ReportClassVisitor : public art::ClassVisitor {
902 public:
903 explicit ReportClassVisitor(art::Thread* self) : self_(self) {}
904
Mathieu Chartier28357fa2016-10-18 16:27:40 -0700905 bool operator()(art::ObjPtr<art::mirror::Class> klass)
906 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampeef54d8d2016-10-25 09:55:53 -0700907 classes_.push_back(self_->GetJniEnv()->AddLocalReference<jclass>(klass));
Andreas Gampeaa8b60c2016-10-12 12:51:25 -0700908 return true;
909 }
910
911 art::Thread* self_;
912 std::vector<jclass> classes_;
913 };
914
915 art::Thread* self = art::Thread::Current();
916 ReportClassVisitor rcv(self);
917 {
918 art::ScopedObjectAccess soa(self);
919 art::Runtime::Current()->GetClassLinker()->VisitClasses(&rcv);
920 }
921
922 size_t size = rcv.classes_.size();
923 jclass* classes = nullptr;
924 jvmtiError alloc_ret = env->Allocate(static_cast<jlong>(size * sizeof(jclass)),
925 reinterpret_cast<unsigned char**>(&classes));
926 if (alloc_ret != ERR(NONE)) {
927 return alloc_ret;
928 }
929
930 for (size_t i = 0; i < size; ++i) {
931 classes[i] = rcv.classes_[i];
932 }
933 *classes_ptr = classes;
934 *class_count_ptr = static_cast<jint>(size);
935
936 return ERR(NONE);
937}
938
Andreas Gampe8da6d032016-10-31 19:31:03 -0700939jvmtiError HeapUtil::ForceGarbageCollection(jvmtiEnv* env ATTRIBUTE_UNUSED) {
940 art::Runtime::Current()->GetHeap()->CollectGarbage(false);
941
942 return ERR(NONE);
943}
Andreas Gampee54d9922016-10-11 19:55:37 -0700944} // namespace openjdkjvmti