blob: f428bc2751fa901cff723c792e0d7f9b8630a9d6 [file] [log] [blame]
Jesse Wilsonc4824e62011-11-01 14:39:04 -04001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Preparation and completion of hprof data generation. The output is
19 * written into two files and then combined. This is necessary because
20 * we generate some of the data (strings and classes) while we dump the
21 * heap, and some analysis tools require that the class and string data
22 * appear first.
23 */
24
25#include "hprof.h"
Ian Rogers6d4d9fc2011-11-30 16:24:48 -080026
Elliott Hughes622a6982012-06-08 17:58:54 -070027#include <cutils/open_memstream.h>
28#include <errno.h>
29#include <fcntl.h>
30#include <stdio.h>
31#include <string.h>
32#include <sys/time.h>
33#include <sys/uio.h>
34#include <time.h>
35#include <time.h>
36#include <unistd.h>
Elliott Hughes622a6982012-06-08 17:58:54 -070037#include <set>
38
Andreas Gampe46ee31b2016-12-14 10:11:49 -080039#include "android-base/stringprintf.h"
40
Mathieu Chartierc7853442015-03-27 14:35:38 -070041#include "art_field-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080042#include "art_method-inl.h"
Elliott Hughes07ed66b2012-12-12 18:34:25 -080043#include "base/logging.h"
Vladimir Marko80afd022015-05-19 18:08:00 +010044#include "base/time_utils.h"
Elliott Hughes76160052012-12-12 16:31:20 -080045#include "base/unix_file/fd_file.h"
Ian Rogers6d4d9fc2011-11-30 16:24:48 -080046#include "class_linker.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080047#include "common_throws.h"
Jesse Wilsonc4824e62011-11-01 14:39:04 -040048#include "debugger.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070049#include "dex_file-inl.h"
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080050#include "gc_root.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070051#include "gc/accounting/heap_bitmap.h"
Man Cao8c2ff642015-05-27 17:25:30 -070052#include "gc/allocation_record.h"
Mathieu Chartierecc82302017-02-16 10:20:12 -080053#include "gc/scoped_gc_critical_section.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070054#include "gc/heap.h"
Andreas Gampe1c158a02017-07-13 17:26:19 -070055#include "gc/heap-visit-objects-inl.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070056#include "gc/space/space.h"
Elliott Hughes622a6982012-06-08 17:58:54 -070057#include "globals.h"
Mathieu Chartierad466ad2015-01-08 16:28:08 -080058#include "jdwp/jdwp.h"
59#include "jdwp/jdwp_priv.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080060#include "mirror/class.h"
61#include "mirror/class-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080062#include "mirror/object-refvisitor-inl.h"
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -070063#include "os.h"
Elliott Hughes622a6982012-06-08 17:58:54 -070064#include "safe_map.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070065#include "scoped_thread_state_change-inl.h"
Elliott Hughes622a6982012-06-08 17:58:54 -070066#include "thread_list.h"
Jesse Wilsonc4824e62011-11-01 14:39:04 -040067
68namespace art {
69
70namespace hprof {
71
Mathieu Chartierad466ad2015-01-08 16:28:08 -080072static constexpr bool kDirectStream = true;
Jesse Wilson0c54ac12011-11-09 15:14:05 -050073
Andreas Gampe3a913092015-01-10 00:26:17 -080074static constexpr uint32_t kHprofTime = 0;
Andreas Gampe3a913092015-01-10 00:26:17 -080075static constexpr uint32_t kHprofNullThread = 0;
Elliott Hughes622a6982012-06-08 17:58:54 -070076
Andreas Gampe3a913092015-01-10 00:26:17 -080077static constexpr size_t kMaxObjectsPerSegment = 128;
78static constexpr size_t kMaxBytesPerSegment = 4096;
Elliott Hughes622a6982012-06-08 17:58:54 -070079
Andreas Gampe3a913092015-01-10 00:26:17 -080080// The static field-name for the synthetic object generated to account for class static overhead.
Mathieu Chartiera6d3a7e2015-06-03 16:51:09 -070081static constexpr const char* kClassOverheadName = "$classOverhead";
Elliott Hughes622a6982012-06-08 17:58:54 -070082
83enum HprofTag {
84 HPROF_TAG_STRING = 0x01,
85 HPROF_TAG_LOAD_CLASS = 0x02,
86 HPROF_TAG_UNLOAD_CLASS = 0x03,
87 HPROF_TAG_STACK_FRAME = 0x04,
88 HPROF_TAG_STACK_TRACE = 0x05,
89 HPROF_TAG_ALLOC_SITES = 0x06,
90 HPROF_TAG_HEAP_SUMMARY = 0x07,
91 HPROF_TAG_START_THREAD = 0x0A,
92 HPROF_TAG_END_THREAD = 0x0B,
93 HPROF_TAG_HEAP_DUMP = 0x0C,
94 HPROF_TAG_HEAP_DUMP_SEGMENT = 0x1C,
95 HPROF_TAG_HEAP_DUMP_END = 0x2C,
96 HPROF_TAG_CPU_SAMPLES = 0x0D,
97 HPROF_TAG_CONTROL_SETTINGS = 0x0E,
98};
99
100// Values for the first byte of HEAP_DUMP and HEAP_DUMP_SEGMENT records:
101enum HprofHeapTag {
102 // Traditional.
103 HPROF_ROOT_UNKNOWN = 0xFF,
104 HPROF_ROOT_JNI_GLOBAL = 0x01,
105 HPROF_ROOT_JNI_LOCAL = 0x02,
106 HPROF_ROOT_JAVA_FRAME = 0x03,
107 HPROF_ROOT_NATIVE_STACK = 0x04,
108 HPROF_ROOT_STICKY_CLASS = 0x05,
109 HPROF_ROOT_THREAD_BLOCK = 0x06,
110 HPROF_ROOT_MONITOR_USED = 0x07,
111 HPROF_ROOT_THREAD_OBJECT = 0x08,
112 HPROF_CLASS_DUMP = 0x20,
113 HPROF_INSTANCE_DUMP = 0x21,
114 HPROF_OBJECT_ARRAY_DUMP = 0x22,
115 HPROF_PRIMITIVE_ARRAY_DUMP = 0x23,
116
117 // Android.
118 HPROF_HEAP_DUMP_INFO = 0xfe,
119 HPROF_ROOT_INTERNED_STRING = 0x89,
120 HPROF_ROOT_FINALIZING = 0x8a, // Obsolete.
121 HPROF_ROOT_DEBUGGER = 0x8b,
122 HPROF_ROOT_REFERENCE_CLEANUP = 0x8c, // Obsolete.
123 HPROF_ROOT_VM_INTERNAL = 0x8d,
124 HPROF_ROOT_JNI_MONITOR = 0x8e,
125 HPROF_UNREACHABLE = 0x90, // Obsolete.
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700126 HPROF_PRIMITIVE_ARRAY_NODATA_DUMP = 0xc3, // Obsolete.
Elliott Hughes622a6982012-06-08 17:58:54 -0700127};
128
129enum HprofHeapId {
130 HPROF_HEAP_DEFAULT = 0,
131 HPROF_HEAP_ZYGOTE = 'Z',
Mathieu Chartierae1ad002014-07-18 17:58:22 -0700132 HPROF_HEAP_APP = 'A',
133 HPROF_HEAP_IMAGE = 'I',
Elliott Hughes622a6982012-06-08 17:58:54 -0700134};
135
136enum HprofBasicType {
137 hprof_basic_object = 2,
138 hprof_basic_boolean = 4,
139 hprof_basic_char = 5,
140 hprof_basic_float = 6,
141 hprof_basic_double = 7,
142 hprof_basic_byte = 8,
143 hprof_basic_short = 9,
144 hprof_basic_int = 10,
145 hprof_basic_long = 11,
146};
147
Ian Rogersef7d42f2014-01-06 12:55:46 -0800148typedef uint32_t HprofStringId;
149typedef uint32_t HprofClassObjectId;
Man Cao8c2ff642015-05-27 17:25:30 -0700150typedef uint32_t HprofClassSerialNumber;
151typedef uint32_t HprofStackTraceSerialNumber;
152typedef uint32_t HprofStackFrameId;
153static constexpr HprofStackTraceSerialNumber kHprofNullStackTrace = 0;
Elliott Hughes622a6982012-06-08 17:58:54 -0700154
Andreas Gampe3a913092015-01-10 00:26:17 -0800155class EndianOutput {
Elliott Hughes622a6982012-06-08 17:58:54 -0700156 public:
Andreas Gampe3a913092015-01-10 00:26:17 -0800157 EndianOutput() : length_(0), sum_length_(0), max_length_(0), started_(false) {}
158 virtual ~EndianOutput() {}
159
160 void StartNewRecord(uint8_t tag, uint32_t time) {
161 if (length_ > 0) {
162 EndRecord();
163 }
164 DCHECK_EQ(length_, 0U);
165 AddU1(tag);
166 AddU4(time);
167 AddU4(0xdeaddead); // Length, replaced on flush.
168 started_ = true;
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700169 }
170
Andreas Gampe3a913092015-01-10 00:26:17 -0800171 void EndRecord() {
172 // Replace length in header.
173 if (started_) {
174 UpdateU4(sizeof(uint8_t) + sizeof(uint32_t),
175 length_ - sizeof(uint8_t) - 2 * sizeof(uint32_t));
176 }
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700177
Andreas Gampe3a913092015-01-10 00:26:17 -0800178 HandleEndRecord();
179
180 sum_length_ += length_;
181 max_length_ = std::max(max_length_, length_);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700182 length_ = 0;
Andreas Gampe3a913092015-01-10 00:26:17 -0800183 started_ = false;
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700184 }
185
Andreas Gampe3a913092015-01-10 00:26:17 -0800186 void AddU1(uint8_t value) {
187 AddU1List(&value, 1);
188 }
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800189 void AddU2(uint16_t value) {
190 AddU2List(&value, 1);
Elliott Hughes622a6982012-06-08 17:58:54 -0700191 }
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800192 void AddU4(uint32_t value) {
193 AddU4List(&value, 1);
Elliott Hughes622a6982012-06-08 17:58:54 -0700194 }
195
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800196 void AddU8(uint64_t value) {
197 AddU8List(&value, 1);
Elliott Hughes622a6982012-06-08 17:58:54 -0700198 }
199
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800200 void AddObjectId(const mirror::Object* value) {
201 AddU4(PointerToLowMemUInt32(value));
Ian Rogersef7d42f2014-01-06 12:55:46 -0800202 }
203
Man Cao8c2ff642015-05-27 17:25:30 -0700204 void AddStackTraceSerialNumber(HprofStackTraceSerialNumber value) {
205 AddU4(value);
206 }
207
Ian Rogersef7d42f2014-01-06 12:55:46 -0800208 // The ID for the synthetic object generated to account for class static overhead.
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800209 void AddClassStaticsId(const mirror::Class* value) {
210 AddU4(1 | PointerToLowMemUInt32(value));
Ian Rogersef7d42f2014-01-06 12:55:46 -0800211 }
212
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800213 void AddJniGlobalRefId(jobject value) {
214 AddU4(PointerToLowMemUInt32(value));
Ian Rogersef7d42f2014-01-06 12:55:46 -0800215 }
216
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800217 void AddClassId(HprofClassObjectId value) {
218 AddU4(value);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800219 }
220
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800221 void AddStringId(HprofStringId value) {
222 AddU4(value);
Elliott Hughes622a6982012-06-08 17:58:54 -0700223 }
224
Andreas Gampe3a913092015-01-10 00:26:17 -0800225 void AddU1List(const uint8_t* values, size_t count) {
226 HandleU1List(values, count);
227 length_ += count;
228 }
229 void AddU2List(const uint16_t* values, size_t count) {
230 HandleU2List(values, count);
231 length_ += count * sizeof(uint16_t);
232 }
233 void AddU4List(const uint32_t* values, size_t count) {
234 HandleU4List(values, count);
235 length_ += count * sizeof(uint32_t);
236 }
Andreas Gampeca714582015-04-03 19:41:34 -0700237 virtual void UpdateU4(size_t offset, uint32_t new_value ATTRIBUTE_UNUSED) {
Andreas Gampe3a913092015-01-10 00:26:17 -0800238 DCHECK_LE(offset, length_ - 4);
239 }
240 void AddU8List(const uint64_t* values, size_t count) {
241 HandleU8List(values, count);
242 length_ += count * sizeof(uint64_t);
243 }
Elliott Hughes622a6982012-06-08 17:58:54 -0700244
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800245 void AddIdList(mirror::ObjectArray<mirror::Object>* values)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700246 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800247 const int32_t length = values->GetLength();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800248 for (int32_t i = 0; i < length; ++i) {
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800249 AddObjectId(values->GetWithoutChecks(i));
Ian Rogersef7d42f2014-01-06 12:55:46 -0800250 }
Elliott Hughes622a6982012-06-08 17:58:54 -0700251 }
252
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800253 void AddUtf8String(const char* str) {
Elliott Hughes622a6982012-06-08 17:58:54 -0700254 // The terminating NUL character is NOT written.
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800255 AddU1List((const uint8_t*)str, strlen(str));
Elliott Hughes622a6982012-06-08 17:58:54 -0700256 }
257
Andreas Gampe3a913092015-01-10 00:26:17 -0800258 size_t Length() const {
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700259 return length_;
260 }
Elliott Hughes622a6982012-06-08 17:58:54 -0700261
Andreas Gampe3a913092015-01-10 00:26:17 -0800262 size_t SumLength() const {
263 return sum_length_;
Elliott Hughes622a6982012-06-08 17:58:54 -0700264 }
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700265
Andreas Gampe3a913092015-01-10 00:26:17 -0800266 size_t MaxLength() const {
267 return max_length_;
268 }
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700269
Andreas Gampe3a913092015-01-10 00:26:17 -0800270 protected:
271 virtual void HandleU1List(const uint8_t* values ATTRIBUTE_UNUSED,
272 size_t count ATTRIBUTE_UNUSED) {
273 }
jessicahandojo3aaa37b2016-07-29 14:46:37 -0700274 virtual void HandleU1AsU2List(const uint8_t* values ATTRIBUTE_UNUSED,
275 size_t count ATTRIBUTE_UNUSED) {
276 }
Andreas Gampe3a913092015-01-10 00:26:17 -0800277 virtual void HandleU2List(const uint16_t* values ATTRIBUTE_UNUSED,
278 size_t count ATTRIBUTE_UNUSED) {
279 }
280 virtual void HandleU4List(const uint32_t* values ATTRIBUTE_UNUSED,
281 size_t count ATTRIBUTE_UNUSED) {
282 }
283 virtual void HandleU8List(const uint64_t* values ATTRIBUTE_UNUSED,
284 size_t count ATTRIBUTE_UNUSED) {
285 }
286 virtual void HandleEndRecord() {
287 }
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700288
Andreas Gampe3a913092015-01-10 00:26:17 -0800289 size_t length_; // Current record size.
290 size_t sum_length_; // Size of all data.
291 size_t max_length_; // Maximum seen length.
292 bool started_; // Was StartRecord called?
Elliott Hughes622a6982012-06-08 17:58:54 -0700293};
294
Andreas Gampe3a913092015-01-10 00:26:17 -0800295// This keeps things buffered until flushed.
296class EndianOutputBuffered : public EndianOutput {
297 public:
298 explicit EndianOutputBuffered(size_t reserve_size) {
299 buffer_.reserve(reserve_size);
300 }
301 virtual ~EndianOutputBuffered() {}
302
303 void UpdateU4(size_t offset, uint32_t new_value) OVERRIDE {
304 DCHECK_LE(offset, length_ - 4);
305 buffer_[offset + 0] = static_cast<uint8_t>((new_value >> 24) & 0xFF);
306 buffer_[offset + 1] = static_cast<uint8_t>((new_value >> 16) & 0xFF);
307 buffer_[offset + 2] = static_cast<uint8_t>((new_value >> 8) & 0xFF);
308 buffer_[offset + 3] = static_cast<uint8_t>((new_value >> 0) & 0xFF);
309 }
310
311 protected:
312 void HandleU1List(const uint8_t* values, size_t count) OVERRIDE {
313 DCHECK_EQ(length_, buffer_.size());
314 buffer_.insert(buffer_.end(), values, values + count);
315 }
316
jessicahandojo3aaa37b2016-07-29 14:46:37 -0700317 void HandleU1AsU2List(const uint8_t* values, size_t count) OVERRIDE {
318 DCHECK_EQ(length_, buffer_.size());
319 // All 8-bits are grouped in 2 to make 16-bit block like Java Char
320 if (count & 1) {
321 buffer_.push_back(0);
322 }
323 for (size_t i = 0; i < count; ++i) {
324 uint8_t value = *values;
325 buffer_.push_back(value);
326 values++;
327 }
328 }
329
Andreas Gampe3a913092015-01-10 00:26:17 -0800330 void HandleU2List(const uint16_t* values, size_t count) OVERRIDE {
331 DCHECK_EQ(length_, buffer_.size());
332 for (size_t i = 0; i < count; ++i) {
333 uint16_t value = *values;
334 buffer_.push_back(static_cast<uint8_t>((value >> 8) & 0xFF));
335 buffer_.push_back(static_cast<uint8_t>((value >> 0) & 0xFF));
336 values++;
337 }
338 }
339
340 void HandleU4List(const uint32_t* values, size_t count) OVERRIDE {
341 DCHECK_EQ(length_, buffer_.size());
342 for (size_t i = 0; i < count; ++i) {
343 uint32_t value = *values;
344 buffer_.push_back(static_cast<uint8_t>((value >> 24) & 0xFF));
345 buffer_.push_back(static_cast<uint8_t>((value >> 16) & 0xFF));
346 buffer_.push_back(static_cast<uint8_t>((value >> 8) & 0xFF));
347 buffer_.push_back(static_cast<uint8_t>((value >> 0) & 0xFF));
348 values++;
349 }
350 }
351
352 void HandleU8List(const uint64_t* values, size_t count) OVERRIDE {
353 DCHECK_EQ(length_, buffer_.size());
354 for (size_t i = 0; i < count; ++i) {
355 uint64_t value = *values;
356 buffer_.push_back(static_cast<uint8_t>((value >> 56) & 0xFF));
357 buffer_.push_back(static_cast<uint8_t>((value >> 48) & 0xFF));
358 buffer_.push_back(static_cast<uint8_t>((value >> 40) & 0xFF));
359 buffer_.push_back(static_cast<uint8_t>((value >> 32) & 0xFF));
360 buffer_.push_back(static_cast<uint8_t>((value >> 24) & 0xFF));
361 buffer_.push_back(static_cast<uint8_t>((value >> 16) & 0xFF));
362 buffer_.push_back(static_cast<uint8_t>((value >> 8) & 0xFF));
363 buffer_.push_back(static_cast<uint8_t>((value >> 0) & 0xFF));
364 values++;
365 }
366 }
367
368 void HandleEndRecord() OVERRIDE {
369 DCHECK_EQ(buffer_.size(), length_);
370 if (kIsDebugBuild && started_) {
371 uint32_t stored_length =
372 static_cast<uint32_t>(buffer_[5]) << 24 |
373 static_cast<uint32_t>(buffer_[6]) << 16 |
374 static_cast<uint32_t>(buffer_[7]) << 8 |
375 static_cast<uint32_t>(buffer_[8]);
376 DCHECK_EQ(stored_length, length_ - sizeof(uint8_t) - 2 * sizeof(uint32_t));
377 }
378 HandleFlush(buffer_.data(), length_);
379 buffer_.clear();
380 }
381
382 virtual void HandleFlush(const uint8_t* buffer ATTRIBUTE_UNUSED, size_t length ATTRIBUTE_UNUSED) {
383 }
384
385 std::vector<uint8_t> buffer_;
386};
387
388class FileEndianOutput FINAL : public EndianOutputBuffered {
389 public:
390 FileEndianOutput(File* fp, size_t reserved_size)
391 : EndianOutputBuffered(reserved_size), fp_(fp), errors_(false) {
392 DCHECK(fp != nullptr);
393 }
394 ~FileEndianOutput() {
395 }
396
397 bool Errors() {
398 return errors_;
399 }
400
401 protected:
402 void HandleFlush(const uint8_t* buffer, size_t length) OVERRIDE {
403 if (!errors_) {
404 errors_ = !fp_->WriteFully(buffer, length);
405 }
406 }
407
408 private:
409 File* fp_;
410 bool errors_;
411};
412
413class NetStateEndianOutput FINAL : public EndianOutputBuffered {
414 public:
415 NetStateEndianOutput(JDWP::JdwpNetStateBase* net_state, size_t reserved_size)
416 : EndianOutputBuffered(reserved_size), net_state_(net_state) {
417 DCHECK(net_state != nullptr);
418 }
419 ~NetStateEndianOutput() {}
420
421 protected:
422 void HandleFlush(const uint8_t* buffer, size_t length) OVERRIDE {
423 std::vector<iovec> iov;
424 iov.push_back(iovec());
425 iov[0].iov_base = const_cast<void*>(reinterpret_cast<const void*>(buffer));
426 iov[0].iov_len = length;
427 net_state_->WriteBufferedPacketLocked(iov);
428 }
429
430 private:
431 JDWP::JdwpNetStateBase* net_state_;
432};
433
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700434#define __ output_->
Andreas Gampe3a913092015-01-10 00:26:17 -0800435
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700436class Hprof : public SingleRootVisitor {
Elliott Hughes622a6982012-06-08 17:58:54 -0700437 public:
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700438 Hprof(const char* output_filename, int fd, bool direct_to_ddms)
439 : filename_(output_filename),
440 fd_(fd),
Mathieu Chartierc9cd7ac2016-01-20 14:48:36 -0800441 direct_to_ddms_(direct_to_ddms) {
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700442 LOG(INFO) << "hprof: heap dump \"" << filename_ << "\" starting...";
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500443 }
444
Andreas Gampe3a913092015-01-10 00:26:17 -0800445 void Dump()
Mathieu Chartierc9cd7ac2016-01-20 14:48:36 -0800446 REQUIRES(Locks::mutator_lock_)
447 REQUIRES(!Locks::heap_bitmap_lock_, !Locks::alloc_tracker_lock_) {
Man Cao8c2ff642015-05-27 17:25:30 -0700448 {
449 MutexLock mu(Thread::Current(), *Locks::alloc_tracker_lock_);
450 if (Runtime::Current()->GetHeap()->IsAllocTrackingEnabled()) {
451 PopulateAllocationTrackingTraces();
452 }
453 }
454
Andreas Gampe3a913092015-01-10 00:26:17 -0800455 // First pass to measure the size of the dump.
456 size_t overall_size;
457 size_t max_length;
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800458 {
Andreas Gampe3a913092015-01-10 00:26:17 -0800459 EndianOutput count_output;
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700460 output_ = &count_output;
461 ProcessHeap(false);
Andreas Gampe3a913092015-01-10 00:26:17 -0800462 overall_size = count_output.SumLength();
463 max_length = count_output.MaxLength();
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700464 output_ = nullptr;
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800465 }
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700466
Andreas Gampe3a913092015-01-10 00:26:17 -0800467 bool okay;
Mathieu Chartierecc82302017-02-16 10:20:12 -0800468 visited_objects_.clear();
Andreas Gampe3a913092015-01-10 00:26:17 -0800469 if (direct_to_ddms_) {
470 if (kDirectStream) {
471 okay = DumpToDdmsDirect(overall_size, max_length, CHUNK_TYPE("HPDS"));
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700472 } else {
Andreas Gampe3a913092015-01-10 00:26:17 -0800473 okay = DumpToDdmsBuffered(overall_size, max_length);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700474 }
Andreas Gampe3a913092015-01-10 00:26:17 -0800475 } else {
476 okay = DumpToFile(overall_size, max_length);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700477 }
478
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700479 if (okay) {
Mathieu Chartierc9cd7ac2016-01-20 14:48:36 -0800480 const uint64_t duration = NanoTime() - start_ns_;
481 LOG(INFO) << "hprof: heap dump completed (" << PrettySize(RoundUp(overall_size, KB))
482 << ") in " << PrettyDuration(duration)
483 << " objects " << total_objects_
484 << " objects with stack traces " << total_objects_with_stack_trace_;
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700485 }
486 }
487
488 private:
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700489 void DumpHeapObject(mirror::Object* obj)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700490 REQUIRES_SHARED(Locks::mutator_lock_);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700491
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700492 void DumpHeapClass(mirror::Class* klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700493 REQUIRES_SHARED(Locks::mutator_lock_);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700494
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700495 void DumpHeapArray(mirror::Array* obj, mirror::Class* klass)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700496 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe3a913092015-01-10 00:26:17 -0800497
Mathieu Chartier996f75e2017-03-31 11:18:41 -0700498 void DumpFakeObjectArray(mirror::Object* obj, const std::set<mirror::Object*>& elements)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700499 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe3a913092015-01-10 00:26:17 -0800500
Mathieu Chartier996f75e2017-03-31 11:18:41 -0700501 void DumpHeapInstanceObject(mirror::Object* obj,
502 mirror::Class* klass,
503 const std::set<mirror::Object*>& fake_roots)
504 REQUIRES_SHARED(Locks::mutator_lock_);
505
506 bool AddRuntimeInternalObjectsField(mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_);
507
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700508 void ProcessHeap(bool header_first)
Mathieu Chartier90443472015-07-16 20:32:27 -0700509 REQUIRES(Locks::mutator_lock_) {
Andreas Gampe3a913092015-01-10 00:26:17 -0800510 // Reset current heap and object count.
511 current_heap_ = HPROF_HEAP_DEFAULT;
512 objects_in_segment_ = 0;
513
514 if (header_first) {
Man Cao8c2ff642015-05-27 17:25:30 -0700515 ProcessHeader(true);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700516 ProcessBody();
Andreas Gampe3a913092015-01-10 00:26:17 -0800517 } else {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700518 ProcessBody();
Man Cao8c2ff642015-05-27 17:25:30 -0700519 ProcessHeader(false);
Andreas Gampe3a913092015-01-10 00:26:17 -0800520 }
521 }
522
Mathieu Chartier90443472015-07-16 20:32:27 -0700523 void ProcessBody() REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700524 Runtime* const runtime = Runtime::Current();
Andreas Gampe3a913092015-01-10 00:26:17 -0800525 // Walk the roots and the heap.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700526 output_->StartNewRecord(HPROF_TAG_HEAP_DUMP_SEGMENT, kHprofTime);
Andreas Gampe3a913092015-01-10 00:26:17 -0800527
Richard Uhler4b767552016-04-26 13:28:59 -0700528 simple_roots_.clear();
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700529 runtime->VisitRoots(this);
530 runtime->VisitImageRoots(this);
Andreas Gampe1c158a02017-07-13 17:26:19 -0700531 auto dump_object = [this](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
532 DCHECK(obj != nullptr);
533 DumpHeapObject(obj);
534 };
535 runtime->GetHeap()->VisitObjectsPaused(dump_object);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700536 output_->StartNewRecord(HPROF_TAG_HEAP_DUMP_END, kHprofTime);
537 output_->EndRecord();
Andreas Gampe3a913092015-01-10 00:26:17 -0800538 }
539
Mathieu Chartier90443472015-07-16 20:32:27 -0700540 void ProcessHeader(bool string_first) REQUIRES(Locks::mutator_lock_) {
Andreas Gampe3a913092015-01-10 00:26:17 -0800541 // Write the header.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700542 WriteFixedHeader();
Andreas Gampe3a913092015-01-10 00:26:17 -0800543 // Write the string and class tables, and any stack traces, to the header.
544 // (jhat requires that these appear before any of the data in the body that refers to them.)
Man Cao8c2ff642015-05-27 17:25:30 -0700545 // jhat also requires the string table appear before class table and stack traces.
546 // However, WriteStackTraces() can modify the string table, so it's necessary to call
547 // WriteStringTable() last in the first pass, to compute the correct length of the output.
548 if (string_first) {
549 WriteStringTable();
550 }
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700551 WriteClassTable();
552 WriteStackTraces();
Man Cao8c2ff642015-05-27 17:25:30 -0700553 if (!string_first) {
554 WriteStringTable();
555 }
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700556 output_->EndRecord();
Andreas Gampe3a913092015-01-10 00:26:17 -0800557 }
558
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700559 void WriteClassTable() REQUIRES_SHARED(Locks::mutator_lock_) {
Man Cao8c2ff642015-05-27 17:25:30 -0700560 for (const auto& p : classes_) {
561 mirror::Class* c = p.first;
562 HprofClassSerialNumber sn = p.second;
Ian Rogersef7d42f2014-01-06 12:55:46 -0800563 CHECK(c != nullptr);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700564 output_->StartNewRecord(HPROF_TAG_LOAD_CLASS, kHprofTime);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700565 // LOAD CLASS format:
566 // U4: class serial number (always > 0)
567 // ID: class object ID. We use the address of the class object structure as its ID.
568 // U4: stack trace serial number
569 // ID: class name string ID
Man Cao8c2ff642015-05-27 17:25:30 -0700570 __ AddU4(sn);
Andreas Gampe3a913092015-01-10 00:26:17 -0800571 __ AddObjectId(c);
Man Cao8c2ff642015-05-27 17:25:30 -0700572 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(c));
Andreas Gampe3a913092015-01-10 00:26:17 -0800573 __ AddStringId(LookupClassNameId(c));
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700574 }
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700575 }
576
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700577 void WriteStringTable() {
Andreas Gampe1eeb00c2016-11-03 08:19:01 -0700578 for (const auto& p : strings_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800579 const std::string& string = p.first;
Andreas Gampe1eeb00c2016-11-03 08:19:01 -0700580 const HprofStringId id = p.second;
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700581
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700582 output_->StartNewRecord(HPROF_TAG_STRING, kHprofTime);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700583
584 // STRING format:
585 // ID: ID for this string
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700586 // U1*: UTF8 characters for string (NOT null terminated)
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700587 // (the record format encodes the length)
Andreas Gampe3a913092015-01-10 00:26:17 -0800588 __ AddU4(id);
589 __ AddUtf8String(string.c_str());
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700590 }
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700591 }
592
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700593 void StartNewHeapDumpSegment() {
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700594 // This flushes the old segment and starts a new one.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700595 output_->StartNewRecord(HPROF_TAG_HEAP_DUMP_SEGMENT, kHprofTime);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700596 objects_in_segment_ = 0;
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700597 // Starting a new HEAP_DUMP resets the heap to default.
598 current_heap_ = HPROF_HEAP_DEFAULT;
599 }
600
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700601 void CheckHeapSegmentConstraints() {
602 if (objects_in_segment_ >= kMaxObjectsPerSegment || output_->Length() >= kMaxBytesPerSegment) {
603 StartNewHeapDumpSegment();
Andreas Gampe3a913092015-01-10 00:26:17 -0800604 }
605 }
606
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700607 void VisitRoot(mirror::Object* obj, const RootInfo& root_info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700608 OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe3a913092015-01-10 00:26:17 -0800609 void MarkRootObject(const mirror::Object* obj, jobject jni_obj, HprofHeapTag heap_tag,
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700610 uint32_t thread_serial);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700611
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700612 HprofClassObjectId LookupClassId(mirror::Class* c) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800613 if (c != nullptr) {
Man Cao8c2ff642015-05-27 17:25:30 -0700614 auto it = classes_.find(c);
615 if (it == classes_.end()) {
616 // first time to see this class
617 HprofClassSerialNumber sn = next_class_serial_number_++;
618 classes_.Put(c, sn);
619 // Make sure that we've assigned a string ID for this class' name
620 LookupClassNameId(c);
621 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800622 }
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800623 return PointerToLowMemUInt32(c);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700624 }
625
Man Cao8c2ff642015-05-27 17:25:30 -0700626 HprofStackTraceSerialNumber LookupStackTraceSerialNumber(const mirror::Object* obj)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700627 REQUIRES_SHARED(Locks::mutator_lock_) {
Man Cao8c2ff642015-05-27 17:25:30 -0700628 auto r = allocation_records_.find(obj);
629 if (r == allocation_records_.end()) {
630 return kHprofNullStackTrace;
631 } else {
632 const gc::AllocRecordStackTrace* trace = r->second;
633 auto result = traces_.find(trace);
634 CHECK(result != traces_.end());
635 return result->second;
636 }
637 }
638
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700639 HprofStringId LookupStringId(mirror::String* string) REQUIRES_SHARED(Locks::mutator_lock_) {
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700640 return LookupStringId(string->ToModifiedUtf8());
641 }
642
643 HprofStringId LookupStringId(const char* string) {
644 return LookupStringId(std::string(string));
645 }
646
647 HprofStringId LookupStringId(const std::string& string) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800648 auto it = strings_.find(string);
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700649 if (it != strings_.end()) {
650 return it->second;
651 }
652 HprofStringId id = next_string_id_++;
653 strings_.Put(string, id);
654 return id;
655 }
656
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700657 HprofStringId LookupClassNameId(mirror::Class* c) REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehr709b0702016-10-13 09:12:37 -0700658 return LookupStringId(c->PrettyDescriptor());
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700659 }
660
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700661 void WriteFixedHeader() {
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500662 // Write the file header.
663 // U1: NUL-terminated magic string.
Andreas Gampe3a913092015-01-10 00:26:17 -0800664 const char magic[] = "JAVA PROFILE 1.0.3";
665 __ AddU1List(reinterpret_cast<const uint8_t*>(magic), sizeof(magic));
666
Calin Juravle32805172014-07-04 16:24:03 +0100667 // U4: size of identifiers. We're using addresses as IDs and our heap references are stored
668 // as uint32_t.
669 // Note of warning: hprof-conv hard-codes the size of identifiers to 4.
Andreas Gampe575e78c2014-11-03 23:41:03 -0800670 static_assert(sizeof(mirror::HeapReference<mirror::Object>) == sizeof(uint32_t),
671 "Unexpected HeapReference size");
Andreas Gampe3a913092015-01-10 00:26:17 -0800672 __ AddU4(sizeof(uint32_t));
673
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500674 // The current time, in milliseconds since 0:00 GMT, 1/1/70.
Elliott Hughes7b9d9962012-04-20 18:48:18 -0700675 timeval now;
Andreas Gampe3a913092015-01-10 00:26:17 -0800676 const uint64_t nowMs = (gettimeofday(&now, nullptr) < 0) ? 0 :
Mathieu Chartierad466ad2015-01-08 16:28:08 -0800677 (uint64_t)now.tv_sec * 1000 + now.tv_usec / 1000;
Andreas Gampe3a913092015-01-10 00:26:17 -0800678 // TODO: It seems it would be correct to use U8.
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500679 // U4: high word of the 64-bit time.
Andreas Gampe3a913092015-01-10 00:26:17 -0800680 __ AddU4(static_cast<uint32_t>(nowMs >> 32));
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500681 // U4: low word of the 64-bit time.
Andreas Gampe3a913092015-01-10 00:26:17 -0800682 __ AddU4(static_cast<uint32_t>(nowMs & 0xFFFFFFFF));
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500683 }
684
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700685 void WriteStackTraces() REQUIRES_SHARED(Locks::mutator_lock_) {
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700686 // Write a dummy stack trace record so the analysis tools don't freak out.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700687 output_->StartNewRecord(HPROF_TAG_STACK_TRACE, kHprofTime);
Man Cao8c2ff642015-05-27 17:25:30 -0700688 __ AddStackTraceSerialNumber(kHprofNullStackTrace);
Andreas Gampe3a913092015-01-10 00:26:17 -0800689 __ AddU4(kHprofNullThread);
690 __ AddU4(0); // no frames
Man Cao8c2ff642015-05-27 17:25:30 -0700691
692 // TODO: jhat complains "WARNING: Stack trace not found for serial # -1", but no trace should
693 // have -1 as its serial number (as long as HprofStackTraceSerialNumber doesn't overflow).
694 for (const auto& it : traces_) {
695 const gc::AllocRecordStackTrace* trace = it.first;
696 HprofStackTraceSerialNumber trace_sn = it.second;
697 size_t depth = trace->GetDepth();
698
699 // First write stack frames of the trace
700 for (size_t i = 0; i < depth; ++i) {
701 const gc::AllocRecordStackTraceElement* frame = &trace->GetStackElement(i);
702 ArtMethod* method = frame->GetMethod();
703 CHECK(method != nullptr);
704 output_->StartNewRecord(HPROF_TAG_STACK_FRAME, kHprofTime);
705 // STACK FRAME format:
706 // ID: stack frame ID. We use the address of the AllocRecordStackTraceElement object as its ID.
707 // ID: method name string ID
708 // ID: method signature string ID
709 // ID: source file name string ID
710 // U4: class serial number
711 // U4: >0, line number; 0, no line information available; -1, unknown location
712 auto frame_result = frames_.find(frame);
713 CHECK(frame_result != frames_.end());
714 __ AddU4(frame_result->second);
715 __ AddStringId(LookupStringId(method->GetName()));
716 __ AddStringId(LookupStringId(method->GetSignature().ToString()));
717 const char* source_file = method->GetDeclaringClassSourceFile();
718 if (source_file == nullptr) {
719 source_file = "";
720 }
721 __ AddStringId(LookupStringId(source_file));
722 auto class_result = classes_.find(method->GetDeclaringClass());
723 CHECK(class_result != classes_.end());
724 __ AddU4(class_result->second);
725 __ AddU4(frame->ComputeLineNumber());
726 }
727
728 // Then write the trace itself
729 output_->StartNewRecord(HPROF_TAG_STACK_TRACE, kHprofTime);
730 // STACK TRACE format:
731 // U4: stack trace serial number. We use the address of the AllocRecordStackTrace object as its serial number.
732 // U4: thread serial number. We use Thread::GetTid().
733 // U4: number of frames
734 // [ID]*: series of stack frame ID's
735 __ AddStackTraceSerialNumber(trace_sn);
736 __ AddU4(trace->GetTid());
737 __ AddU4(depth);
738 for (size_t i = 0; i < depth; ++i) {
739 const gc::AllocRecordStackTraceElement* frame = &trace->GetStackElement(i);
740 auto frame_result = frames_.find(frame);
741 CHECK(frame_result != frames_.end());
742 __ AddU4(frame_result->second);
743 }
744 }
Andreas Gampe3a913092015-01-10 00:26:17 -0800745 }
746
747 bool DumpToDdmsBuffered(size_t overall_size ATTRIBUTE_UNUSED, size_t max_length ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -0700748 REQUIRES(Locks::mutator_lock_) {
Andreas Gampe3a913092015-01-10 00:26:17 -0800749 LOG(FATAL) << "Unimplemented";
750 UNREACHABLE();
751 // // Send the data off to DDMS.
752 // iovec iov[2];
753 // iov[0].iov_base = header_data_ptr_;
754 // iov[0].iov_len = header_data_size_;
755 // iov[1].iov_base = body_data_ptr_;
756 // iov[1].iov_len = body_data_size_;
757 // Dbg::DdmSendChunkV(CHUNK_TYPE("HPDS"), iov, 2);
758 }
759
760 bool DumpToFile(size_t overall_size, size_t max_length)
Mathieu Chartier90443472015-07-16 20:32:27 -0700761 REQUIRES(Locks::mutator_lock_) {
Andreas Gampe3a913092015-01-10 00:26:17 -0800762 // Where exactly are we writing to?
763 int out_fd;
764 if (fd_ >= 0) {
765 out_fd = dup(fd_);
766 if (out_fd < 0) {
767 ThrowRuntimeException("Couldn't dump heap; dup(%d) failed: %s", fd_, strerror(errno));
768 return false;
769 }
770 } else {
771 out_fd = open(filename_.c_str(), O_WRONLY|O_CREAT|O_TRUNC, 0644);
772 if (out_fd < 0) {
773 ThrowRuntimeException("Couldn't dump heap; open(\"%s\") failed: %s", filename_.c_str(),
774 strerror(errno));
775 return false;
776 }
777 }
778
779 std::unique_ptr<File> file(new File(out_fd, filename_, true));
780 bool okay;
781 {
782 FileEndianOutput file_output(file.get(), max_length);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700783 output_ = &file_output;
784 ProcessHeap(true);
Andreas Gampe3a913092015-01-10 00:26:17 -0800785 okay = !file_output.Errors();
786
787 if (okay) {
Andreas Gampec515f212015-08-28 18:15:27 -0700788 // Check for expected size. Output is expected to be less-or-equal than first phase, see
789 // b/23521263.
790 DCHECK_LE(file_output.SumLength(), overall_size);
Andreas Gampe3a913092015-01-10 00:26:17 -0800791 }
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700792 output_ = nullptr;
Andreas Gampe3a913092015-01-10 00:26:17 -0800793 }
794
795 if (okay) {
796 okay = file->FlushCloseOrErase() == 0;
797 } else {
798 file->Erase();
799 }
800 if (!okay) {
Andreas Gampe46ee31b2016-12-14 10:11:49 -0800801 std::string msg(android::base::StringPrintf("Couldn't dump heap; writing \"%s\" failed: %s",
802 filename_.c_str(),
803 strerror(errno)));
Andreas Gampe3a913092015-01-10 00:26:17 -0800804 ThrowRuntimeException("%s", msg.c_str());
805 LOG(ERROR) << msg;
806 }
807
808 return okay;
809 }
810
811 bool DumpToDdmsDirect(size_t overall_size, size_t max_length, uint32_t chunk_type)
Mathieu Chartier90443472015-07-16 20:32:27 -0700812 REQUIRES(Locks::mutator_lock_) {
Andreas Gampe3a913092015-01-10 00:26:17 -0800813 CHECK(direct_to_ddms_);
814 JDWP::JdwpState* state = Dbg::GetJdwpState();
815 CHECK(state != nullptr);
816 JDWP::JdwpNetStateBase* net_state = state->netState;
817 CHECK(net_state != nullptr);
818
819 // Hold the socket lock for the whole time since we want this to be atomic.
820 MutexLock mu(Thread::Current(), *net_state->GetSocketLock());
821
822 // Prepare the Ddms chunk.
823 constexpr size_t kChunkHeaderSize = kJDWPHeaderLen + 8;
824 uint8_t chunk_header[kChunkHeaderSize] = { 0 };
825 state->SetupChunkHeader(chunk_type, overall_size, kChunkHeaderSize, chunk_header);
826
827 // Prepare the output and send the chunk header.
828 NetStateEndianOutput net_output(net_state, max_length);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700829 output_ = &net_output;
Andreas Gampe3a913092015-01-10 00:26:17 -0800830 net_output.AddU1List(chunk_header, kChunkHeaderSize);
831
832 // Write the dump.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700833 ProcessHeap(true);
Andreas Gampe3a913092015-01-10 00:26:17 -0800834
Andreas Gampec515f212015-08-28 18:15:27 -0700835 // Check for expected size. See DumpToFile for comment.
836 DCHECK_LE(net_output.SumLength(), overall_size + kChunkHeaderSize);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700837 output_ = nullptr;
Andreas Gampe3a913092015-01-10 00:26:17 -0800838
839 return true;
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700840 }
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500841
Man Cao8c2ff642015-05-27 17:25:30 -0700842 void PopulateAllocationTrackingTraces()
Mathieu Chartier90443472015-07-16 20:32:27 -0700843 REQUIRES(Locks::mutator_lock_, Locks::alloc_tracker_lock_) {
Man Cao8c2ff642015-05-27 17:25:30 -0700844 gc::AllocRecordObjectMap* records = Runtime::Current()->GetHeap()->GetAllocationRecords();
845 CHECK(records != nullptr);
846 HprofStackTraceSerialNumber next_trace_sn = kHprofNullStackTrace + 1;
847 HprofStackFrameId next_frame_id = 0;
Man Cao42c3c332015-06-23 16:38:25 -0700848 size_t count = 0;
Man Cao8c2ff642015-05-27 17:25:30 -0700849
850 for (auto it = records->Begin(), end = records->End(); it != end; ++it) {
851 const mirror::Object* obj = it->first.Read();
Man Cao42c3c332015-06-23 16:38:25 -0700852 if (obj == nullptr) {
853 continue;
854 }
855 ++count;
Mathieu Chartier458b1052016-03-29 14:02:55 -0700856 const gc::AllocRecordStackTrace* trace = it->second.GetStackTrace();
Man Cao8c2ff642015-05-27 17:25:30 -0700857
858 // Copy the pair into a real hash map to speed up look up.
859 auto records_result = allocation_records_.emplace(obj, trace);
860 // The insertion should always succeed, i.e. no duplicate object pointers in "records"
861 CHECK(records_result.second);
862
863 // Generate serial numbers for traces, and IDs for frames.
864 auto traces_result = traces_.find(trace);
865 if (traces_result == traces_.end()) {
866 traces_.emplace(trace, next_trace_sn++);
867 // only check frames if the trace is newly discovered
868 for (size_t i = 0, depth = trace->GetDepth(); i < depth; ++i) {
869 const gc::AllocRecordStackTraceElement* frame = &trace->GetStackElement(i);
870 auto frames_result = frames_.find(frame);
871 if (frames_result == frames_.end()) {
872 frames_.emplace(frame, next_frame_id++);
873 }
874 }
875 }
876 }
877 CHECK_EQ(traces_.size(), next_trace_sn - kHprofNullStackTrace - 1);
878 CHECK_EQ(frames_.size(), next_frame_id);
Mathieu Chartierc9cd7ac2016-01-20 14:48:36 -0800879 total_objects_with_stack_trace_ = count;
Man Cao8c2ff642015-05-27 17:25:30 -0700880 }
881
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700882 // If direct_to_ddms_ is set, "filename_" and "fd" will be ignored.
883 // Otherwise, "filename_" must be valid, though if "fd" >= 0 it will
884 // only be used for debug messages.
885 std::string filename_;
886 int fd_;
887 bool direct_to_ddms_;
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500888
Mathieu Chartierc9cd7ac2016-01-20 14:48:36 -0800889 uint64_t start_ns_ = NanoTime();
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700890
Mathieu Chartierc9cd7ac2016-01-20 14:48:36 -0800891 EndianOutput* output_ = nullptr;
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700892
Mathieu Chartierc9cd7ac2016-01-20 14:48:36 -0800893 HprofHeapId current_heap_ = HPROF_HEAP_DEFAULT; // Which heap we're currently dumping.
894 size_t objects_in_segment_ = 0;
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700895
Mathieu Chartierc9cd7ac2016-01-20 14:48:36 -0800896 size_t total_objects_ = 0u;
897 size_t total_objects_with_stack_trace_ = 0u;
898
899 HprofStringId next_string_id_ = 0x400000;
Ian Rogersef7d42f2014-01-06 12:55:46 -0800900 SafeMap<std::string, HprofStringId> strings_;
Mathieu Chartierc9cd7ac2016-01-20 14:48:36 -0800901 HprofClassSerialNumber next_class_serial_number_ = 1;
Man Cao8c2ff642015-05-27 17:25:30 -0700902 SafeMap<mirror::Class*, HprofClassSerialNumber> classes_;
903
904 std::unordered_map<const gc::AllocRecordStackTrace*, HprofStackTraceSerialNumber,
905 gc::HashAllocRecordTypesPtr<gc::AllocRecordStackTrace>,
906 gc::EqAllocRecordTypesPtr<gc::AllocRecordStackTrace>> traces_;
907 std::unordered_map<const gc::AllocRecordStackTraceElement*, HprofStackFrameId,
908 gc::HashAllocRecordTypesPtr<gc::AllocRecordStackTraceElement>,
909 gc::EqAllocRecordTypesPtr<gc::AllocRecordStackTraceElement>> frames_;
910 std::unordered_map<const mirror::Object*, const gc::AllocRecordStackTrace*> allocation_records_;
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700911
Richard Uhler4b767552016-04-26 13:28:59 -0700912 // Set used to keep track of what simple root records we have already
913 // emitted, to avoid emitting duplicate entries. The simple root records are
914 // those that contain no other information than the root type and the object
915 // id. A pair of root type and object id is packed into a uint64_t, with
916 // the root type in the upper 32 bits and the object id in the lower 32
917 // bits.
918 std::unordered_set<uint64_t> simple_roots_;
919
Mathieu Chartierecc82302017-02-16 10:20:12 -0800920 // To make sure we don't dump the same object multiple times. b/34967844
921 std::unordered_set<mirror::Object*> visited_objects_;
922
Mathieu Chartiere4275c02015-08-06 15:34:15 -0700923 friend class GcRootVisitor;
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -0700924 DISALLOW_COPY_AND_ASSIGN(Hprof);
925};
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500926
Andreas Gampe3a913092015-01-10 00:26:17 -0800927static HprofBasicType SignatureToBasicTypeAndSize(const char* sig, size_t* size_out) {
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500928 char c = sig[0];
929 HprofBasicType ret;
930 size_t size;
931
932 switch (c) {
Andreas Gampe3a913092015-01-10 00:26:17 -0800933 case '[':
934 case 'L':
935 ret = hprof_basic_object;
936 size = 4;
937 break;
938 case 'Z':
939 ret = hprof_basic_boolean;
940 size = 1;
941 break;
942 case 'C':
943 ret = hprof_basic_char;
944 size = 2;
945 break;
946 case 'F':
947 ret = hprof_basic_float;
948 size = 4;
949 break;
950 case 'D':
951 ret = hprof_basic_double;
952 size = 8;
953 break;
954 case 'B':
955 ret = hprof_basic_byte;
956 size = 1;
957 break;
958 case 'S':
959 ret = hprof_basic_short;
960 size = 2;
961 break;
962 case 'I':
963 ret = hprof_basic_int;
964 size = 4;
965 break;
966 case 'J':
967 ret = hprof_basic_long;
968 size = 8;
969 break;
970 default:
971 LOG(FATAL) << "UNREACHABLE";
972 UNREACHABLE();
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500973 }
974
Andreas Gampe3a913092015-01-10 00:26:17 -0800975 if (size_out != nullptr) {
976 *size_out = size;
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500977 }
978
979 return ret;
980}
981
982// Always called when marking objects, but only does
983// something when ctx->gc_scan_state_ is non-zero, which is usually
984// only true when marking the root set or unreachable
985// objects. Used to add rootset references to obj.
Andreas Gampe3a913092015-01-10 00:26:17 -0800986void Hprof::MarkRootObject(const mirror::Object* obj, jobject jni_obj, HprofHeapTag heap_tag,
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700987 uint32_t thread_serial) {
Andreas Gampe3a913092015-01-10 00:26:17 -0800988 if (heap_tag == 0) {
989 return;
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500990 }
991
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700992 CheckHeapSegmentConstraints();
Jesse Wilson0c54ac12011-11-09 15:14:05 -0500993
Andreas Gampe3a913092015-01-10 00:26:17 -0800994 switch (heap_tag) {
995 // ID: object ID
996 case HPROF_ROOT_UNKNOWN:
997 case HPROF_ROOT_STICKY_CLASS:
998 case HPROF_ROOT_MONITOR_USED:
999 case HPROF_ROOT_INTERNED_STRING:
1000 case HPROF_ROOT_DEBUGGER:
Richard Uhler4b767552016-04-26 13:28:59 -07001001 case HPROF_ROOT_VM_INTERNAL: {
1002 uint64_t key = (static_cast<uint64_t>(heap_tag) << 32) | PointerToLowMemUInt32(obj);
1003 if (simple_roots_.insert(key).second) {
1004 __ AddU1(heap_tag);
1005 __ AddObjectId(obj);
1006 }
Andreas Gampe3a913092015-01-10 00:26:17 -08001007 break;
Richard Uhler4b767552016-04-26 13:28:59 -07001008 }
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001009
Andreas Gampe3a913092015-01-10 00:26:17 -08001010 // ID: object ID
1011 // ID: JNI global ref ID
1012 case HPROF_ROOT_JNI_GLOBAL:
1013 __ AddU1(heap_tag);
1014 __ AddObjectId(obj);
1015 __ AddJniGlobalRefId(jni_obj);
1016 break;
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001017
Andreas Gampe3a913092015-01-10 00:26:17 -08001018 // ID: object ID
1019 // U4: thread serial number
1020 // U4: frame number in stack trace (-1 for empty)
1021 case HPROF_ROOT_JNI_LOCAL:
1022 case HPROF_ROOT_JNI_MONITOR:
1023 case HPROF_ROOT_JAVA_FRAME:
1024 __ AddU1(heap_tag);
1025 __ AddObjectId(obj);
1026 __ AddU4(thread_serial);
1027 __ AddU4((uint32_t)-1);
1028 break;
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001029
Andreas Gampe3a913092015-01-10 00:26:17 -08001030 // ID: object ID
1031 // U4: thread serial number
1032 case HPROF_ROOT_NATIVE_STACK:
1033 case HPROF_ROOT_THREAD_BLOCK:
1034 __ AddU1(heap_tag);
1035 __ AddObjectId(obj);
1036 __ AddU4(thread_serial);
1037 break;
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001038
Andreas Gampe3a913092015-01-10 00:26:17 -08001039 // ID: thread object ID
1040 // U4: thread serial number
1041 // U4: stack trace serial number
1042 case HPROF_ROOT_THREAD_OBJECT:
1043 __ AddU1(heap_tag);
1044 __ AddObjectId(obj);
1045 __ AddU4(thread_serial);
1046 __ AddU4((uint32_t)-1); // xxx
1047 break;
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001048
Andreas Gampe3a913092015-01-10 00:26:17 -08001049 case HPROF_CLASS_DUMP:
1050 case HPROF_INSTANCE_DUMP:
1051 case HPROF_OBJECT_ARRAY_DUMP:
1052 case HPROF_PRIMITIVE_ARRAY_DUMP:
1053 case HPROF_HEAP_DUMP_INFO:
1054 case HPROF_PRIMITIVE_ARRAY_NODATA_DUMP:
1055 // Ignored.
1056 break;
Elliott Hughes73e66f72012-05-09 09:34:45 -07001057
Andreas Gampe3a913092015-01-10 00:26:17 -08001058 case HPROF_ROOT_FINALIZING:
1059 case HPROF_ROOT_REFERENCE_CLEANUP:
1060 case HPROF_UNREACHABLE:
1061 LOG(FATAL) << "obsolete tag " << static_cast<int>(heap_tag);
1062 break;
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001063 }
1064
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -07001065 ++objects_in_segment_;
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001066}
1067
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001068bool Hprof::AddRuntimeInternalObjectsField(mirror::Class* klass) {
1069 if (klass->IsDexCacheClass()) {
1070 return true;
Mathieu Chartiere4275c02015-08-06 15:34:15 -07001071 }
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001072 // IsClassLoaderClass is true for subclasses of classloader but we only want to add the fake
1073 // field to the java.lang.ClassLoader class.
1074 if (klass->IsClassLoaderClass() && klass->GetSuperClass()->IsObjectClass()) {
1075 return true;
Mathieu Chartiere4275c02015-08-06 15:34:15 -07001076 }
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001077 return false;
1078}
Mathieu Chartiere4275c02015-08-06 15:34:15 -07001079
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001080void Hprof::DumpHeapObject(mirror::Object* obj) {
Andreas Gampe3a913092015-01-10 00:26:17 -08001081 // Ignore classes that are retired.
1082 if (obj->IsClass() && obj->AsClass()->IsRetired()) {
1083 return;
1084 }
Mathieu Chartierecc82302017-02-16 10:20:12 -08001085 DCHECK(visited_objects_.insert(obj).second) << "Already visited " << obj;
Andreas Gampe3a913092015-01-10 00:26:17 -08001086
Mathieu Chartierc9cd7ac2016-01-20 14:48:36 -08001087 ++total_objects_;
1088
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001089 class RootCollector {
1090 public:
1091 explicit RootCollector() {}
1092
1093 void operator()(mirror::Object*, MemberOffset, bool) const {}
1094
1095 // Note that these don't have read barriers. Its OK however since the GC is guaranteed to not be
1096 // running during the hprof dumping process.
1097 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
1098 REQUIRES_SHARED(Locks::mutator_lock_) {
1099 if (!root->IsNull()) {
1100 VisitRoot(root);
1101 }
1102 }
1103
1104 void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
1105 REQUIRES_SHARED(Locks::mutator_lock_) {
1106 roots_.insert(root->AsMirrorPtr());
1107 }
1108
1109 const std::set<mirror::Object*>& GetRoots() const {
1110 return roots_;
1111 }
1112
1113 private:
1114 // These roots are actually live from the object. Avoid marking them as roots in hprof to make
1115 // it easier to debug class unloading.
1116 mutable std::set<mirror::Object*> roots_;
1117 };
1118
1119 RootCollector visitor;
1120 // Collect all native roots.
1121 if (!obj->IsClass()) {
1122 obj->VisitReferences(visitor, VoidFunctor());
1123 }
Mathieu Chartiere4275c02015-08-06 15:34:15 -07001124
Mathieu Chartiere7158112015-06-03 13:32:15 -07001125 gc::Heap* const heap = Runtime::Current()->GetHeap();
1126 const gc::space::ContinuousSpace* const space = heap->FindContinuousSpaceFromObject(obj, true);
Mathieu Chartierae1ad002014-07-18 17:58:22 -07001127 HprofHeapId heap_type = HPROF_HEAP_APP;
1128 if (space != nullptr) {
1129 if (space->IsZygoteSpace()) {
1130 heap_type = HPROF_HEAP_ZYGOTE;
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001131 VisitRoot(obj, RootInfo(kRootVMInternal));
Mathieu Chartier0b874522017-02-24 14:47:08 -08001132 } else if (space->IsImageSpace() && heap->ObjectIsInBootImageSpace(obj)) {
1133 // Only count objects in the boot image as HPROF_HEAP_IMAGE, this leaves app image objects as
1134 // HPROF_HEAP_APP. b/35762934
Mathieu Chartierae1ad002014-07-18 17:58:22 -07001135 heap_type = HPROF_HEAP_IMAGE;
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001136 VisitRoot(obj, RootInfo(kRootVMInternal));
Mathieu Chartierae1ad002014-07-18 17:58:22 -07001137 }
Mathieu Chartiere7158112015-06-03 13:32:15 -07001138 } else {
1139 const auto* los = heap->GetLargeObjectsSpace();
1140 if (los->Contains(obj) && los->IsZygoteLargeObject(Thread::Current(), obj)) {
1141 heap_type = HPROF_HEAP_ZYGOTE;
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001142 VisitRoot(obj, RootInfo(kRootVMInternal));
Mathieu Chartiere7158112015-06-03 13:32:15 -07001143 }
Mathieu Chartierae1ad002014-07-18 17:58:22 -07001144 }
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001145 CheckHeapSegmentConstraints();
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001146
Mathieu Chartierae1ad002014-07-18 17:58:22 -07001147 if (heap_type != current_heap_) {
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001148 HprofStringId nameId;
1149
1150 // This object is in a different heap than the current one.
1151 // Emit a HEAP_DUMP_INFO tag to change heaps.
Andreas Gampe3a913092015-01-10 00:26:17 -08001152 __ AddU1(HPROF_HEAP_DUMP_INFO);
1153 __ AddU4(static_cast<uint32_t>(heap_type)); // uint32_t: heap type
Mathieu Chartierae1ad002014-07-18 17:58:22 -07001154 switch (heap_type) {
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001155 case HPROF_HEAP_APP:
1156 nameId = LookupStringId("app");
1157 break;
1158 case HPROF_HEAP_ZYGOTE:
1159 nameId = LookupStringId("zygote");
1160 break;
Mathieu Chartierae1ad002014-07-18 17:58:22 -07001161 case HPROF_HEAP_IMAGE:
1162 nameId = LookupStringId("image");
1163 break;
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001164 default:
1165 // Internal error
1166 LOG(ERROR) << "Unexpected desiredHeap";
1167 nameId = LookupStringId("<ILLEGAL>");
1168 break;
1169 }
Andreas Gampe3a913092015-01-10 00:26:17 -08001170 __ AddStringId(nameId);
Mathieu Chartierae1ad002014-07-18 17:58:22 -07001171 current_heap_ = heap_type;
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001172 }
1173
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001174 mirror::Class* c = obj->GetClass();
Andreas Gampe3a913092015-01-10 00:26:17 -08001175 if (c == nullptr) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001176 // This object will bother HprofReader, because it has a null
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001177 // class, so just don't dump it. It could be
1178 // gDvm.unlinkedJavaLangClass or it could be an object just
1179 // allocated which hasn't been initialized yet.
1180 } else {
1181 if (obj->IsClass()) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001182 DumpHeapClass(obj->AsClass());
Elliott Hughese84278b2012-03-22 10:06:53 -07001183 } else if (c->IsArrayClass()) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001184 DumpHeapArray(obj->AsArray(), c);
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001185 } else {
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001186 DumpHeapInstanceObject(obj, c, visitor.GetRoots());
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001187 }
1188 }
1189
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -07001190 ++objects_in_segment_;
Jesse Wilson0c54ac12011-11-09 15:14:05 -05001191}
1192
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001193void Hprof::DumpHeapClass(mirror::Class* klass) {
Vladimir Marko72ab6842017-01-20 19:32:50 +00001194 if (!klass->IsResolved()) {
Mathieu Chartier12393f32016-10-20 17:11:23 -07001195 // Class is allocated but not yet resolved: we cannot access its fields or super class.
Sebastien Hertzca068b22015-04-07 10:28:53 +02001196 return;
1197 }
Mathieu Chartiera6d3a7e2015-06-03 16:51:09 -07001198
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001199 // Note: We will emit instance fields of Class as synthetic static fields with a prefix of
1200 // "$class$" so the class fields are visible in hprof dumps. For tools to account for that
1201 // correctly, we'll emit an instance size of zero for java.lang.Class, and also emit the
1202 // instance fields of java.lang.Object.
1203 //
1204 // For other overhead (currently only the embedded vtable), we will generate a synthetic
1205 // byte array (or field[s] in case the overhead size is of reference size or less).
1206
1207 const size_t num_static_fields = klass->NumStaticFields();
1208
1209 // Total class size:
1210 // * class instance fields (including Object instance fields)
1211 // * vtable
1212 // * class static fields
1213 const size_t total_class_size = klass->GetClassSize();
1214
1215 // Base class size (common parts of all Class instances):
1216 // * class instance fields (including Object instance fields)
1217 constexpr size_t base_class_size = sizeof(mirror::Class);
1218 CHECK_LE(base_class_size, total_class_size);
1219
1220 // Difference of Total and Base:
1221 // * vtable
1222 // * class static fields
1223 const size_t base_overhead_size = total_class_size - base_class_size;
1224
1225 // Tools (ahat/Studio) will count the static fields and account for them in the class size. We
1226 // must thus subtract them from base_overhead_size or they will be double-counted.
1227 size_t class_static_fields_size = 0;
1228 for (ArtField& class_static_field : klass->GetSFields()) {
1229 size_t size = 0;
1230 SignatureToBasicTypeAndSize(class_static_field.GetTypeDescriptor(), &size);
1231 class_static_fields_size += size;
1232 }
1233
1234 CHECK_GE(base_overhead_size, class_static_fields_size);
1235 // Now we have:
1236 // * vtable
1237 const size_t base_no_statics_overhead_size = base_overhead_size - class_static_fields_size;
1238
1239 // We may decide to display native overhead (the actual IMT, ArtFields and ArtMethods) in the
1240 // future.
1241 const size_t java_heap_overhead_size = base_no_statics_overhead_size;
1242
1243 // For overhead greater 4, we'll allocate a synthetic array.
1244 if (java_heap_overhead_size > 4) {
Andreas Gampe3a913092015-01-10 00:26:17 -08001245 // Create a byte array to reflect the allocation of the
1246 // StaticField array at the end of this class.
1247 __ AddU1(HPROF_PRIMITIVE_ARRAY_DUMP);
1248 __ AddClassStaticsId(klass);
Man Cao8c2ff642015-05-27 17:25:30 -07001249 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(klass));
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001250 __ AddU4(java_heap_overhead_size - 4);
Andreas Gampe3a913092015-01-10 00:26:17 -08001251 __ AddU1(hprof_basic_byte);
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001252 for (size_t i = 0; i < java_heap_overhead_size - 4; ++i) {
Andreas Gampe3a913092015-01-10 00:26:17 -08001253 __ AddU1(0);
1254 }
1255 }
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001256 const size_t java_heap_overhead_field_count = java_heap_overhead_size > 0
1257 ? (java_heap_overhead_size == 3 ? 2u : 1u)
1258 : 0;
Andreas Gampe3a913092015-01-10 00:26:17 -08001259
1260 __ AddU1(HPROF_CLASS_DUMP);
1261 __ AddClassId(LookupClassId(klass));
Man Cao8c2ff642015-05-27 17:25:30 -07001262 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(klass));
Andreas Gampe3a913092015-01-10 00:26:17 -08001263 __ AddClassId(LookupClassId(klass->GetSuperClass()));
1264 __ AddObjectId(klass->GetClassLoader());
1265 __ AddObjectId(nullptr); // no signer
1266 __ AddObjectId(nullptr); // no prot domain
1267 __ AddObjectId(nullptr); // reserved
1268 __ AddObjectId(nullptr); // reserved
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001269 // Instance size.
Andreas Gampe3a913092015-01-10 00:26:17 -08001270 if (klass->IsClassClass()) {
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001271 // As mentioned above, we will emit instance fields as synthetic static fields. So the
1272 // base object is "empty."
1273 __ AddU4(0);
Jeff Hao0ce43532015-05-12 18:58:32 -07001274 } else if (klass->IsStringClass()) {
1275 // Strings are variable length with character data at the end like arrays.
1276 // This outputs the size of an empty string.
1277 __ AddU4(sizeof(mirror::String));
1278 } else if (klass->IsArrayClass() || klass->IsPrimitive()) {
Andreas Gampe3a913092015-01-10 00:26:17 -08001279 __ AddU4(0);
1280 } else {
1281 __ AddU4(klass->GetObjectSize()); // instance size
1282 }
1283
1284 __ AddU2(0); // empty const pool
1285
1286 // Static fields
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001287 //
1288 // Note: we report Class' and Object's instance fields here, too. This is for visibility reasons.
1289 // (b/38167721)
1290 mirror::Class* class_class = klass->GetClass();
1291
1292 DCHECK(class_class->GetSuperClass()->IsObjectClass());
1293 const size_t static_fields_reported = class_class->NumInstanceFields()
1294 + class_class->GetSuperClass()->NumInstanceFields()
1295 + java_heap_overhead_field_count
1296 + num_static_fields;
1297 __ AddU2(dchecked_integral_cast<uint16_t>(static_fields_reported));
1298
1299 if (java_heap_overhead_size != 0) {
Mathieu Chartiera6d3a7e2015-06-03 16:51:09 -07001300 __ AddStringId(LookupStringId(kClassOverheadName));
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001301 size_t overhead_fields = 0;
1302 if (java_heap_overhead_size > 4) {
1303 __ AddU1(hprof_basic_object);
1304 __ AddClassStaticsId(klass);
1305 ++overhead_fields;
1306 } else {
1307 switch (java_heap_overhead_size) {
1308 case 4: {
1309 __ AddU1(hprof_basic_int);
1310 __ AddU4(0);
1311 ++overhead_fields;
1312 break;
1313 }
Andreas Gampe3a913092015-01-10 00:26:17 -08001314
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001315 case 2: {
1316 __ AddU1(hprof_basic_short);
1317 __ AddU2(0);
1318 ++overhead_fields;
1319 break;
1320 }
Andreas Gampe3a913092015-01-10 00:26:17 -08001321
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001322 case 3: {
1323 __ AddU1(hprof_basic_short);
1324 __ AddU2(0);
1325 __ AddStringId(LookupStringId(std::string(kClassOverheadName) + "2"));
1326 ++overhead_fields;
1327 }
1328 FALLTHROUGH_INTENDED;
1329
1330 case 1: {
1331 __ AddU1(hprof_basic_byte);
1332 __ AddU1(0);
1333 ++overhead_fields;
Andreas Gampe3a913092015-01-10 00:26:17 -08001334 break;
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001335 }
Andreas Gampe3a913092015-01-10 00:26:17 -08001336 }
1337 }
Andreas Gampe3a2c55c2017-05-10 13:44:36 -07001338 DCHECK_EQ(java_heap_overhead_field_count, overhead_fields);
1339 }
1340
1341 // Helper lambda to emit the given static field. The second argument name_fn will be called to
1342 // generate the name to emit. This can be used to emit something else than the field's actual
1343 // name.
1344 auto static_field_writer = [&](ArtField& field, auto name_fn)
1345 REQUIRES_SHARED(Locks::mutator_lock_) {
1346 __ AddStringId(LookupStringId(name_fn(field)));
1347
1348 size_t size;
1349 HprofBasicType t = SignatureToBasicTypeAndSize(field.GetTypeDescriptor(), &size);
1350 __ AddU1(t);
1351 switch (t) {
1352 case hprof_basic_byte:
1353 __ AddU1(field.GetByte(klass));
1354 return;
1355 case hprof_basic_boolean:
1356 __ AddU1(field.GetBoolean(klass));
1357 return;
1358 case hprof_basic_char:
1359 __ AddU2(field.GetChar(klass));
1360 return;
1361 case hprof_basic_short:
1362 __ AddU2(field.GetShort(klass));
1363 return;
1364 case hprof_basic_float:
1365 case hprof_basic_int:
1366 case hprof_basic_object:
1367 __ AddU4(field.Get32(klass));
1368 return;
1369 case hprof_basic_double:
1370 case hprof_basic_long:
1371 __ AddU8(field.Get64(klass));
1372 return;
1373 }
1374 LOG(FATAL) << "Unexpected size " << size;
1375 UNREACHABLE();
1376 };
1377
1378 {
1379 auto class_instance_field_name_fn = [](ArtField& field) REQUIRES_SHARED(Locks::mutator_lock_) {
1380 return std::string("$class$") + field.GetName();
1381 };
1382 for (ArtField& class_instance_field : class_class->GetIFields()) {
1383 static_field_writer(class_instance_field, class_instance_field_name_fn);
1384 }
1385 for (ArtField& object_instance_field : class_class->GetSuperClass()->GetIFields()) {
1386 static_field_writer(object_instance_field, class_instance_field_name_fn);
1387 }
1388 }
1389
1390 {
1391 auto class_static_field_name_fn = [](ArtField& field) REQUIRES_SHARED(Locks::mutator_lock_) {
1392 return field.GetName();
1393 };
1394 for (ArtField& class_static_field : klass->GetSFields()) {
1395 static_field_writer(class_static_field, class_static_field_name_fn);
1396 }
Andreas Gampe3a913092015-01-10 00:26:17 -08001397 }
1398
1399 // Instance fields for this class (no superclass fields)
Mathieu Chartier7c1f53e2015-06-03 10:51:13 -07001400 int iFieldCount = klass->NumInstanceFields();
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001401 // add_internal_runtime_objects is only for classes that may retain objects live through means
1402 // other than fields. It is never the case for strings.
1403 const bool add_internal_runtime_objects = AddRuntimeInternalObjectsField(klass);
1404 if (klass->IsStringClass() || add_internal_runtime_objects) {
Jeff Hao848f70a2014-01-15 13:49:50 -08001405 __ AddU2((uint16_t)iFieldCount + 1);
1406 } else {
1407 __ AddU2((uint16_t)iFieldCount);
1408 }
Andreas Gampe3a913092015-01-10 00:26:17 -08001409 for (int i = 0; i < iFieldCount; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001410 ArtField* f = klass->GetInstanceField(i);
Andreas Gampe3a913092015-01-10 00:26:17 -08001411 __ AddStringId(LookupStringId(f->GetName()));
1412 HprofBasicType t = SignatureToBasicTypeAndSize(f->GetTypeDescriptor(), nullptr);
1413 __ AddU1(t);
1414 }
Vladimir Marko595beb32017-02-06 14:11:54 +00001415 // Add native value character array for strings / byte array for compressed strings.
Jeff Hao848f70a2014-01-15 13:49:50 -08001416 if (klass->IsStringClass()) {
1417 __ AddStringId(LookupStringId("value"));
1418 __ AddU1(hprof_basic_object);
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001419 } else if (add_internal_runtime_objects) {
1420 __ AddStringId(LookupStringId("runtimeInternalObjects"));
1421 __ AddU1(hprof_basic_object);
1422 }
1423}
1424
1425void Hprof::DumpFakeObjectArray(mirror::Object* obj, const std::set<mirror::Object*>& elements) {
1426 __ AddU1(HPROF_OBJECT_ARRAY_DUMP);
1427 __ AddObjectId(obj);
1428 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(obj));
1429 __ AddU4(elements.size());
1430 __ AddClassId(LookupClassId(
1431 Runtime::Current()->GetClassLinker()->GetClassRoot(ClassLinker::kObjectArrayClass)));
1432 for (mirror::Object* e : elements) {
1433 __ AddObjectId(e);
Jeff Hao848f70a2014-01-15 13:49:50 -08001434 }
Andreas Gampe3a913092015-01-10 00:26:17 -08001435}
1436
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001437void Hprof::DumpHeapArray(mirror::Array* obj, mirror::Class* klass) {
Andreas Gampe3a913092015-01-10 00:26:17 -08001438 uint32_t length = obj->GetLength();
1439
1440 if (obj->IsObjectArray()) {
1441 // obj is an object array.
1442 __ AddU1(HPROF_OBJECT_ARRAY_DUMP);
1443
1444 __ AddObjectId(obj);
Man Cao8c2ff642015-05-27 17:25:30 -07001445 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(obj));
Andreas Gampe3a913092015-01-10 00:26:17 -08001446 __ AddU4(length);
1447 __ AddClassId(LookupClassId(klass));
1448
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001449 // Dump the elements, which are always objects or null.
Andreas Gampe3a913092015-01-10 00:26:17 -08001450 __ AddIdList(obj->AsObjectArray<mirror::Object>());
1451 } else {
1452 size_t size;
1453 HprofBasicType t = SignatureToBasicTypeAndSize(
1454 Primitive::Descriptor(klass->GetComponentType()->GetPrimitiveType()), &size);
1455
1456 // obj is a primitive array.
1457 __ AddU1(HPROF_PRIMITIVE_ARRAY_DUMP);
1458
1459 __ AddObjectId(obj);
Man Cao8c2ff642015-05-27 17:25:30 -07001460 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(obj));
Andreas Gampe3a913092015-01-10 00:26:17 -08001461 __ AddU4(length);
1462 __ AddU1(t);
1463
1464 // Dump the raw, packed element values.
1465 if (size == 1) {
1466 __ AddU1List(reinterpret_cast<const uint8_t*>(obj->GetRawData(sizeof(uint8_t), 0)), length);
1467 } else if (size == 2) {
1468 __ AddU2List(reinterpret_cast<const uint16_t*>(obj->GetRawData(sizeof(uint16_t), 0)), length);
1469 } else if (size == 4) {
1470 __ AddU4List(reinterpret_cast<const uint32_t*>(obj->GetRawData(sizeof(uint32_t), 0)), length);
1471 } else if (size == 8) {
1472 __ AddU8List(reinterpret_cast<const uint64_t*>(obj->GetRawData(sizeof(uint64_t), 0)), length);
1473 }
1474 }
1475}
1476
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001477void Hprof::DumpHeapInstanceObject(mirror::Object* obj,
1478 mirror::Class* klass,
1479 const std::set<mirror::Object*>& fake_roots) {
Andreas Gampe3a913092015-01-10 00:26:17 -08001480 // obj is an instance object.
1481 __ AddU1(HPROF_INSTANCE_DUMP);
1482 __ AddObjectId(obj);
Man Cao8c2ff642015-05-27 17:25:30 -07001483 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(obj));
Andreas Gampe3a913092015-01-10 00:26:17 -08001484 __ AddClassId(LookupClassId(klass));
1485
1486 // Reserve some space for the length of the instance data, which we won't
1487 // know until we're done writing it.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001488 size_t size_patch_offset = output_->Length();
Andreas Gampe3a913092015-01-10 00:26:17 -08001489 __ AddU4(0x77777777);
1490
Mathieu Chartier07d7eab2015-06-23 15:45:15 -07001491 // What we will use for the string value if the object is a string.
1492 mirror::Object* string_value = nullptr;
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001493 mirror::Object* fake_object_array = nullptr;
Mathieu Chartier07d7eab2015-06-23 15:45:15 -07001494
1495 // Write the instance data; fields for this class, followed by super class fields, and so on.
Mathieu Chartier7c1f53e2015-06-03 10:51:13 -07001496 do {
Mathieu Chartier07d7eab2015-06-23 15:45:15 -07001497 const size_t instance_fields = klass->NumInstanceFields();
1498 for (size_t i = 0; i < instance_fields; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001499 ArtField* f = klass->GetInstanceField(i);
Andreas Gampe3a913092015-01-10 00:26:17 -08001500 size_t size;
Mathieu Chartier07d7eab2015-06-23 15:45:15 -07001501 HprofBasicType t = SignatureToBasicTypeAndSize(f->GetTypeDescriptor(), &size);
Mathieu Chartier15f345c2015-03-06 12:45:44 -08001502 switch (t) {
1503 case hprof_basic_byte:
Mathieu Chartierff38c042015-03-06 11:33:36 -08001504 __ AddU1(f->GetByte(obj));
Mathieu Chartier15f345c2015-03-06 12:45:44 -08001505 break;
1506 case hprof_basic_boolean:
1507 __ AddU1(f->GetBoolean(obj));
1508 break;
1509 case hprof_basic_char:
Mathieu Chartierff38c042015-03-06 11:33:36 -08001510 __ AddU2(f->GetChar(obj));
Mathieu Chartier15f345c2015-03-06 12:45:44 -08001511 break;
1512 case hprof_basic_short:
1513 __ AddU2(f->GetShort(obj));
1514 break;
Mathieu Chartier15f345c2015-03-06 12:45:44 -08001515 case hprof_basic_int:
Vladimir Marko595beb32017-02-06 14:11:54 +00001516 if (mirror::kUseStringCompression &&
1517 klass->IsStringClass() &&
1518 f->GetOffset().SizeValue() == mirror::String::CountOffset().SizeValue()) {
1519 // Store the string length instead of the raw count field with compression flag.
1520 __ AddU4(obj->AsString()->GetLength());
1521 break;
1522 }
1523 FALLTHROUGH_INTENDED;
1524 case hprof_basic_float:
Mathieu Chartier15f345c2015-03-06 12:45:44 -08001525 case hprof_basic_object:
Andreas Gampe3a913092015-01-10 00:26:17 -08001526 __ AddU4(f->Get32(obj));
Mathieu Chartier15f345c2015-03-06 12:45:44 -08001527 break;
1528 case hprof_basic_double:
1529 case hprof_basic_long:
Andreas Gampe3a913092015-01-10 00:26:17 -08001530 __ AddU8(f->Get64(obj));
Mathieu Chartier15f345c2015-03-06 12:45:44 -08001531 break;
Andreas Gampe3a913092015-01-10 00:26:17 -08001532 }
1533 }
Mathieu Chartier07d7eab2015-06-23 15:45:15 -07001534 // Add value field for String if necessary.
1535 if (klass->IsStringClass()) {
1536 mirror::String* s = obj->AsString();
1537 if (s->GetLength() == 0) {
1538 // If string is empty, use an object-aligned address within the string for the value.
1539 string_value = reinterpret_cast<mirror::Object*>(
1540 reinterpret_cast<uintptr_t>(s) + kObjectAlignment);
1541 } else {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001542 if (s->IsCompressed()) {
1543 string_value = reinterpret_cast<mirror::Object*>(s->GetValueCompressed());
1544 } else {
1545 string_value = reinterpret_cast<mirror::Object*>(s->GetValue());
1546 }
Mathieu Chartier07d7eab2015-06-23 15:45:15 -07001547 }
1548 __ AddObjectId(string_value);
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001549 } else if (AddRuntimeInternalObjectsField(klass)) {
1550 // We need an id that is guaranteed to not be used, use 1/2 of the object alignment.
1551 fake_object_array = reinterpret_cast<mirror::Object*>(
1552 reinterpret_cast<uintptr_t>(obj) + kObjectAlignment / 2);
1553 __ AddObjectId(fake_object_array);
Mathieu Chartier07d7eab2015-06-23 15:45:15 -07001554 }
Andreas Gampe3a913092015-01-10 00:26:17 -08001555 klass = klass->GetSuperClass();
Mathieu Chartier7c1f53e2015-06-03 10:51:13 -07001556 } while (klass != nullptr);
Andreas Gampe3a913092015-01-10 00:26:17 -08001557
Mathieu Chartier07d7eab2015-06-23 15:45:15 -07001558 // Patch the instance field length.
1559 __ UpdateU4(size_patch_offset, output_->Length() - (size_patch_offset + 4));
1560
Jeff Hao848f70a2014-01-15 13:49:50 -08001561 // Output native value character array for strings.
Mathieu Chartier07d7eab2015-06-23 15:45:15 -07001562 CHECK_EQ(obj->IsString(), string_value != nullptr);
1563 if (string_value != nullptr) {
Jeff Hao848f70a2014-01-15 13:49:50 -08001564 mirror::String* s = obj->AsString();
Jeff Hao848f70a2014-01-15 13:49:50 -08001565 __ AddU1(HPROF_PRIMITIVE_ARRAY_DUMP);
Mathieu Chartier07d7eab2015-06-23 15:45:15 -07001566 __ AddObjectId(string_value);
Man Cao8c2ff642015-05-27 17:25:30 -07001567 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(obj));
Vladimir Marko595beb32017-02-06 14:11:54 +00001568 __ AddU4(s->GetLength());
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001569 if (s->IsCompressed()) {
Vladimir Marko595beb32017-02-06 14:11:54 +00001570 __ AddU1(hprof_basic_byte);
1571 __ AddU1List(s->GetValueCompressed(), s->GetLength());
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001572 } else {
Vladimir Marko595beb32017-02-06 14:11:54 +00001573 __ AddU1(hprof_basic_char);
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001574 __ AddU2List(s->GetValue(), s->GetLength());
1575 }
Mathieu Chartier996f75e2017-03-31 11:18:41 -07001576 } else if (fake_object_array != nullptr) {
1577 DumpFakeObjectArray(fake_object_array, fake_roots);
Jeff Hao848f70a2014-01-15 13:49:50 -08001578 }
Andreas Gampe3a913092015-01-10 00:26:17 -08001579}
1580
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001581void Hprof::VisitRoot(mirror::Object* obj, const RootInfo& info) {
Jesse Wilson0b075f12011-11-09 10:57:41 -05001582 static const HprofHeapTag xlate[] = {
1583 HPROF_ROOT_UNKNOWN,
1584 HPROF_ROOT_JNI_GLOBAL,
1585 HPROF_ROOT_JNI_LOCAL,
1586 HPROF_ROOT_JAVA_FRAME,
1587 HPROF_ROOT_NATIVE_STACK,
1588 HPROF_ROOT_STICKY_CLASS,
1589 HPROF_ROOT_THREAD_BLOCK,
1590 HPROF_ROOT_MONITOR_USED,
1591 HPROF_ROOT_THREAD_OBJECT,
1592 HPROF_ROOT_INTERNED_STRING,
1593 HPROF_ROOT_FINALIZING,
1594 HPROF_ROOT_DEBUGGER,
1595 HPROF_ROOT_REFERENCE_CLEANUP,
1596 HPROF_ROOT_VM_INTERNAL,
1597 HPROF_ROOT_JNI_MONITOR,
1598 };
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -08001599 CHECK_LT(info.GetType(), sizeof(xlate) / sizeof(HprofHeapTag));
Andreas Gampe3a913092015-01-10 00:26:17 -08001600 if (obj == nullptr) {
Jesse Wilson0b075f12011-11-09 10:57:41 -05001601 return;
1602 }
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001603 MarkRootObject(obj, 0, xlate[info.GetType()], info.GetThreadId());
Jesse Wilsonc4824e62011-11-01 14:39:04 -04001604}
1605
Elliott Hughesdcfdd2b2012-07-09 18:27:46 -07001606// If "direct_to_ddms" is true, the other arguments are ignored, and data is
1607// sent directly to DDMS.
1608// If "fd" is >= 0, the output will be written to that file descriptor.
1609// Otherwise, "filename" is used to create an output file.
1610void DumpHeap(const char* filename, int fd, bool direct_to_ddms) {
Andreas Gampe3a913092015-01-10 00:26:17 -08001611 CHECK(filename != nullptr);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -08001612 Thread* self = Thread::Current();
Mathieu Chartierecc82302017-02-16 10:20:12 -08001613 // Need to take a heap dump while GC isn't running. See the comment in Heap::VisitObjects().
1614 // Also we need the critical section to avoid visiting the same object twice. See b/34967844
1615 gc::ScopedGCCriticalSection gcs(self,
1616 gc::kGcCauseHprof,
1617 gc::kCollectorTypeHprof);
1618 ScopedSuspendAll ssa(__FUNCTION__, true /* long suspend */);
1619 Hprof hprof(filename, fd, direct_to_ddms);
1620 hprof.Dump();
Jesse Wilsonc4824e62011-11-01 14:39:04 -04001621}
1622
Mathieu Chartierad466ad2015-01-08 16:28:08 -08001623} // namespace hprof
Jesse Wilsonc4824e62011-11-01 14:39:04 -04001624} // namespace art