Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 1 | // Copyright 2011 the V8 project authors. All rights reserved. |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2 | // Redistribution and use in source and binary forms, with or without |
| 3 | // modification, are permitted provided that the following conditions are |
| 4 | // met: |
| 5 | // |
| 6 | // * Redistributions of source code must retain the above copyright |
| 7 | // notice, this list of conditions and the following disclaimer. |
| 8 | // * Redistributions in binary form must reproduce the above |
| 9 | // copyright notice, this list of conditions and the following |
| 10 | // disclaimer in the documentation and/or other materials provided |
| 11 | // with the distribution. |
| 12 | // * Neither the name of Google Inc. nor the names of its |
| 13 | // contributors may be used to endorse or promote products derived |
| 14 | // from this software without specific prior written permission. |
| 15 | // |
| 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | |
| 28 | #ifndef V8_PROFILE_GENERATOR_H_ |
| 29 | #define V8_PROFILE_GENERATOR_H_ |
| 30 | |
Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 31 | #include "allocation.h" |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 32 | #include "hashmap.h" |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 33 | #include "../include/v8-profiler.h" |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 34 | |
| 35 | namespace v8 { |
| 36 | namespace internal { |
| 37 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 38 | class TokenEnumerator { |
| 39 | public: |
| 40 | TokenEnumerator(); |
| 41 | ~TokenEnumerator(); |
| 42 | int GetTokenId(Object* token); |
| 43 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 44 | static const int kNoSecurityToken = -1; |
| 45 | static const int kInheritsSecurityToken = -2; |
| 46 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 47 | private: |
| 48 | static void TokenRemovedCallback(v8::Persistent<v8::Value> handle, |
| 49 | void* parameter); |
| 50 | void TokenRemoved(Object** token_location); |
| 51 | |
| 52 | List<Object**> token_locations_; |
| 53 | List<bool> token_removed_; |
| 54 | |
| 55 | friend class TokenEnumeratorTester; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 56 | |
| 57 | DISALLOW_COPY_AND_ASSIGN(TokenEnumerator); |
| 58 | }; |
| 59 | |
| 60 | |
| 61 | // Provides a storage of strings allocated in C++ heap, to hold them |
| 62 | // forever, even if they disappear from JS heap or external storage. |
| 63 | class StringsStorage { |
| 64 | public: |
| 65 | StringsStorage(); |
| 66 | ~StringsStorage(); |
| 67 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 68 | const char* GetCopy(const char* src); |
| 69 | const char* GetFormatted(const char* format, ...); |
| 70 | const char* GetVFormatted(const char* format, va_list args); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 71 | const char* GetName(String* name); |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 72 | const char* GetName(int index); |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 73 | inline const char* GetFunctionName(String* name); |
| 74 | inline const char* GetFunctionName(const char* name); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 75 | |
| 76 | private: |
Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 77 | static const int kMaxNameSize = 1024; |
| 78 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 79 | INLINE(static bool StringsMatch(void* key1, void* key2)) { |
| 80 | return strcmp(reinterpret_cast<char*>(key1), |
| 81 | reinterpret_cast<char*>(key2)) == 0; |
| 82 | } |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 83 | const char* AddOrDisposeString(char* str, uint32_t hash); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 84 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 85 | // Mapping of strings by String::Hash to const char* strings. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 86 | HashMap names_; |
| 87 | |
| 88 | DISALLOW_COPY_AND_ASSIGN(StringsStorage); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 89 | }; |
| 90 | |
| 91 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 92 | class CodeEntry { |
| 93 | public: |
| 94 | // CodeEntry doesn't own name strings, just references them. |
| 95 | INLINE(CodeEntry(Logger::LogEventsAndTags tag, |
| 96 | const char* name_prefix, |
| 97 | const char* name, |
| 98 | const char* resource_name, |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 99 | int line_number, |
| 100 | int security_token_id)); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 101 | |
| 102 | INLINE(bool is_js_function() const) { return is_js_function_tag(tag_); } |
| 103 | INLINE(const char* name_prefix() const) { return name_prefix_; } |
| 104 | INLINE(bool has_name_prefix() const) { return name_prefix_[0] != '\0'; } |
| 105 | INLINE(const char* name() const) { return name_; } |
| 106 | INLINE(const char* resource_name() const) { return resource_name_; } |
| 107 | INLINE(int line_number() const) { return line_number_; } |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 108 | INLINE(int shared_id() const) { return shared_id_; } |
| 109 | INLINE(void set_shared_id(int shared_id)) { shared_id_ = shared_id; } |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 110 | INLINE(int security_token_id() const) { return security_token_id_; } |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 111 | |
| 112 | INLINE(static bool is_js_function_tag(Logger::LogEventsAndTags tag)); |
| 113 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 114 | void CopyData(const CodeEntry& source); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 115 | uint32_t GetCallUid() const; |
| 116 | bool IsSameAs(CodeEntry* entry) const; |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 117 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 118 | static const char* const kEmptyNamePrefix; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 119 | |
| 120 | private: |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 121 | Logger::LogEventsAndTags tag_; |
| 122 | const char* name_prefix_; |
| 123 | const char* name_; |
| 124 | const char* resource_name_; |
| 125 | int line_number_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 126 | int shared_id_; |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 127 | int security_token_id_; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 128 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 129 | DISALLOW_COPY_AND_ASSIGN(CodeEntry); |
| 130 | }; |
| 131 | |
| 132 | |
| 133 | class ProfileTree; |
| 134 | |
| 135 | class ProfileNode { |
| 136 | public: |
| 137 | INLINE(ProfileNode(ProfileTree* tree, CodeEntry* entry)); |
| 138 | |
| 139 | ProfileNode* FindChild(CodeEntry* entry); |
| 140 | ProfileNode* FindOrAddChild(CodeEntry* entry); |
| 141 | INLINE(void IncrementSelfTicks()) { ++self_ticks_; } |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 142 | INLINE(void IncreaseSelfTicks(unsigned amount)) { self_ticks_ += amount; } |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 143 | INLINE(void IncreaseTotalTicks(unsigned amount)) { total_ticks_ += amount; } |
| 144 | |
| 145 | INLINE(CodeEntry* entry() const) { return entry_; } |
| 146 | INLINE(unsigned self_ticks() const) { return self_ticks_; } |
| 147 | INLINE(unsigned total_ticks() const) { return total_ticks_; } |
| 148 | INLINE(const List<ProfileNode*>* children() const) { return &children_list_; } |
| 149 | double GetSelfMillis() const; |
| 150 | double GetTotalMillis() const; |
| 151 | |
| 152 | void Print(int indent); |
| 153 | |
| 154 | private: |
| 155 | INLINE(static bool CodeEntriesMatch(void* entry1, void* entry2)) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 156 | return reinterpret_cast<CodeEntry*>(entry1)->IsSameAs( |
| 157 | reinterpret_cast<CodeEntry*>(entry2)); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 158 | } |
| 159 | |
| 160 | INLINE(static uint32_t CodeEntryHash(CodeEntry* entry)) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 161 | return entry->GetCallUid(); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 162 | } |
| 163 | |
| 164 | ProfileTree* tree_; |
| 165 | CodeEntry* entry_; |
| 166 | unsigned total_ticks_; |
| 167 | unsigned self_ticks_; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 168 | // Mapping from CodeEntry* to ProfileNode* |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 169 | HashMap children_; |
| 170 | List<ProfileNode*> children_list_; |
| 171 | |
| 172 | DISALLOW_COPY_AND_ASSIGN(ProfileNode); |
| 173 | }; |
| 174 | |
| 175 | |
| 176 | class ProfileTree { |
| 177 | public: |
| 178 | ProfileTree(); |
| 179 | ~ProfileTree(); |
| 180 | |
| 181 | void AddPathFromEnd(const Vector<CodeEntry*>& path); |
| 182 | void AddPathFromStart(const Vector<CodeEntry*>& path); |
| 183 | void CalculateTotalTicks(); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 184 | void FilteredClone(ProfileTree* src, int security_token_id); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 185 | |
| 186 | double TicksToMillis(unsigned ticks) const { |
| 187 | return ticks * ms_to_ticks_scale_; |
| 188 | } |
| 189 | ProfileNode* root() const { return root_; } |
| 190 | void SetTickRatePerMs(double ticks_per_ms); |
| 191 | |
| 192 | void ShortPrint(); |
| 193 | void Print() { |
| 194 | root_->Print(0); |
| 195 | } |
| 196 | |
| 197 | private: |
| 198 | template <typename Callback> |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 199 | void TraverseDepthFirst(Callback* callback); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 200 | |
| 201 | CodeEntry root_entry_; |
| 202 | ProfileNode* root_; |
| 203 | double ms_to_ticks_scale_; |
| 204 | |
| 205 | DISALLOW_COPY_AND_ASSIGN(ProfileTree); |
| 206 | }; |
| 207 | |
| 208 | |
| 209 | class CpuProfile { |
| 210 | public: |
| 211 | CpuProfile(const char* title, unsigned uid) |
| 212 | : title_(title), uid_(uid) { } |
| 213 | |
| 214 | // Add pc -> ... -> main() call path to the profile. |
| 215 | void AddPath(const Vector<CodeEntry*>& path); |
| 216 | void CalculateTotalTicks(); |
| 217 | void SetActualSamplingRate(double actual_sampling_rate); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 218 | CpuProfile* FilteredClone(int security_token_id); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 219 | |
| 220 | INLINE(const char* title() const) { return title_; } |
| 221 | INLINE(unsigned uid() const) { return uid_; } |
| 222 | INLINE(const ProfileTree* top_down() const) { return &top_down_; } |
| 223 | INLINE(const ProfileTree* bottom_up() const) { return &bottom_up_; } |
| 224 | |
| 225 | void UpdateTicksScale(); |
| 226 | |
| 227 | void ShortPrint(); |
| 228 | void Print(); |
| 229 | |
| 230 | private: |
| 231 | const char* title_; |
| 232 | unsigned uid_; |
| 233 | ProfileTree top_down_; |
| 234 | ProfileTree bottom_up_; |
| 235 | |
| 236 | DISALLOW_COPY_AND_ASSIGN(CpuProfile); |
| 237 | }; |
| 238 | |
| 239 | |
| 240 | class CodeMap { |
| 241 | public: |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 242 | CodeMap() : next_shared_id_(1) { } |
Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 243 | void AddCode(Address addr, CodeEntry* entry, unsigned size); |
| 244 | void MoveCode(Address from, Address to); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 245 | CodeEntry* FindEntry(Address addr); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 246 | int GetSharedId(Address addr); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 247 | |
| 248 | void Print(); |
| 249 | |
| 250 | private: |
| 251 | struct CodeEntryInfo { |
| 252 | CodeEntryInfo(CodeEntry* an_entry, unsigned a_size) |
| 253 | : entry(an_entry), size(a_size) { } |
| 254 | CodeEntry* entry; |
| 255 | unsigned size; |
| 256 | }; |
| 257 | |
| 258 | struct CodeTreeConfig { |
| 259 | typedef Address Key; |
| 260 | typedef CodeEntryInfo Value; |
| 261 | static const Key kNoKey; |
Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 262 | static const Value NoValue() { return CodeEntryInfo(NULL, 0); } |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 263 | static int Compare(const Key& a, const Key& b) { |
| 264 | return a < b ? -1 : (a > b ? 1 : 0); |
| 265 | } |
| 266 | }; |
| 267 | typedef SplayTree<CodeTreeConfig> CodeTree; |
| 268 | |
| 269 | class CodeTreePrinter { |
| 270 | public: |
| 271 | void Call(const Address& key, const CodeEntryInfo& value); |
| 272 | }; |
| 273 | |
Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 274 | void DeleteAllCoveredCode(Address start, Address end); |
| 275 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 276 | // Fake CodeEntry pointer to distinguish shared function entries. |
| 277 | static CodeEntry* const kSharedFunctionCodeEntry; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 278 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 279 | CodeTree tree_; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 280 | int next_shared_id_; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 281 | |
| 282 | DISALLOW_COPY_AND_ASSIGN(CodeMap); |
| 283 | }; |
| 284 | |
| 285 | |
| 286 | class CpuProfilesCollection { |
| 287 | public: |
| 288 | CpuProfilesCollection(); |
| 289 | ~CpuProfilesCollection(); |
| 290 | |
| 291 | bool StartProfiling(const char* title, unsigned uid); |
| 292 | bool StartProfiling(String* title, unsigned uid); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 293 | CpuProfile* StopProfiling(int security_token_id, |
| 294 | const char* title, |
| 295 | double actual_sampling_rate); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 296 | List<CpuProfile*>* Profiles(int security_token_id); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 297 | const char* GetName(String* name) { |
| 298 | return function_and_resource_names_.GetName(name); |
| 299 | } |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 300 | const char* GetName(int args_count) { |
| 301 | return function_and_resource_names_.GetName(args_count); |
| 302 | } |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 303 | CpuProfile* GetProfile(int security_token_id, unsigned uid); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 304 | bool IsLastProfile(const char* title); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 305 | void RemoveProfile(CpuProfile* profile); |
| 306 | bool HasDetachedProfiles() { return detached_profiles_.length() > 0; } |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 307 | |
| 308 | CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 309 | String* name, String* resource_name, int line_number); |
| 310 | CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, const char* name); |
| 311 | CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 312 | const char* name_prefix, String* name); |
| 313 | CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, int args_count); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 314 | CodeEntry* NewCodeEntry(int security_token_id); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 315 | |
| 316 | // Called from profile generator thread. |
| 317 | void AddPathToCurrentProfiles(const Vector<CodeEntry*>& path); |
| 318 | |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 319 | // Limits the number of profiles that can be simultaneously collected. |
| 320 | static const int kMaxSimultaneousProfiles = 100; |
| 321 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 322 | private: |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 323 | const char* GetFunctionName(String* name) { |
| 324 | return function_and_resource_names_.GetFunctionName(name); |
| 325 | } |
| 326 | const char* GetFunctionName(const char* name) { |
| 327 | return function_and_resource_names_.GetFunctionName(name); |
| 328 | } |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 329 | int GetProfileIndex(unsigned uid); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 330 | List<CpuProfile*>* GetProfilesList(int security_token_id); |
| 331 | int TokenToIndex(int security_token_id); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 332 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 333 | INLINE(static bool UidsMatch(void* key1, void* key2)) { |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 334 | return key1 == key2; |
| 335 | } |
| 336 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 337 | StringsStorage function_and_resource_names_; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 338 | List<CodeEntry*> code_entries_; |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 339 | List<List<CpuProfile*>* > profiles_by_token_; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 340 | // Mapping from profiles' uids to indexes in the second nested list |
| 341 | // of profiles_by_token_. |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 342 | HashMap profiles_uids_; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 343 | List<CpuProfile*> detached_profiles_; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 344 | |
| 345 | // Accessed by VM thread and profile generator thread. |
| 346 | List<CpuProfile*> current_profiles_; |
| 347 | Semaphore* current_profiles_semaphore_; |
| 348 | |
| 349 | DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection); |
| 350 | }; |
| 351 | |
| 352 | |
| 353 | class SampleRateCalculator { |
| 354 | public: |
| 355 | SampleRateCalculator() |
| 356 | : result_(Logger::kSamplingIntervalMs * kResultScale), |
| 357 | ticks_per_ms_(Logger::kSamplingIntervalMs), |
| 358 | measurements_count_(0), |
| 359 | wall_time_query_countdown_(1) { |
| 360 | } |
| 361 | |
| 362 | double ticks_per_ms() { |
| 363 | return result_ / static_cast<double>(kResultScale); |
| 364 | } |
| 365 | void Tick(); |
| 366 | void UpdateMeasurements(double current_time); |
| 367 | |
| 368 | // Instead of querying current wall time each tick, |
| 369 | // we use this constant to control query intervals. |
| 370 | static const unsigned kWallTimeQueryIntervalMs = 100; |
| 371 | |
| 372 | private: |
| 373 | // As the result needs to be accessed from a different thread, we |
| 374 | // use type that guarantees atomic writes to memory. There should |
| 375 | // be <= 1000 ticks per second, thus storing a value of a 10 ** 5 |
| 376 | // order should provide enough precision while keeping away from a |
| 377 | // potential overflow. |
| 378 | static const int kResultScale = 100000; |
| 379 | |
| 380 | AtomicWord result_; |
| 381 | // All other fields are accessed only from the sampler thread. |
| 382 | double ticks_per_ms_; |
| 383 | unsigned measurements_count_; |
| 384 | unsigned wall_time_query_countdown_; |
| 385 | double last_wall_time_; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 386 | |
| 387 | DISALLOW_COPY_AND_ASSIGN(SampleRateCalculator); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 388 | }; |
| 389 | |
| 390 | |
| 391 | class ProfileGenerator { |
| 392 | public: |
| 393 | explicit ProfileGenerator(CpuProfilesCollection* profiles); |
| 394 | |
| 395 | INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 396 | String* name, |
| 397 | String* resource_name, |
| 398 | int line_number)) { |
| 399 | return profiles_->NewCodeEntry(tag, name, resource_name, line_number); |
| 400 | } |
| 401 | |
| 402 | INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 403 | const char* name)) { |
| 404 | return profiles_->NewCodeEntry(tag, name); |
| 405 | } |
| 406 | |
| 407 | INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 408 | const char* name_prefix, |
| 409 | String* name)) { |
| 410 | return profiles_->NewCodeEntry(tag, name_prefix, name); |
| 411 | } |
| 412 | |
| 413 | INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 414 | int args_count)) { |
| 415 | return profiles_->NewCodeEntry(tag, args_count); |
| 416 | } |
| 417 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 418 | INLINE(CodeEntry* NewCodeEntry(int security_token_id)) { |
| 419 | return profiles_->NewCodeEntry(security_token_id); |
| 420 | } |
| 421 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 422 | void RecordTickSample(const TickSample& sample); |
| 423 | |
| 424 | INLINE(CodeMap* code_map()) { return &code_map_; } |
| 425 | |
| 426 | INLINE(void Tick()) { sample_rate_calc_.Tick(); } |
| 427 | INLINE(double actual_sampling_rate()) { |
| 428 | return sample_rate_calc_.ticks_per_ms(); |
| 429 | } |
| 430 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 431 | static const char* const kAnonymousFunctionName; |
| 432 | static const char* const kProgramEntryName; |
| 433 | static const char* const kGarbageCollectorEntryName; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 434 | |
| 435 | private: |
| 436 | INLINE(CodeEntry* EntryForVMState(StateTag tag)); |
| 437 | |
| 438 | CpuProfilesCollection* profiles_; |
| 439 | CodeMap code_map_; |
| 440 | CodeEntry* program_entry_; |
| 441 | CodeEntry* gc_entry_; |
| 442 | SampleRateCalculator sample_rate_calc_; |
| 443 | |
| 444 | DISALLOW_COPY_AND_ASSIGN(ProfileGenerator); |
| 445 | }; |
| 446 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 447 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 448 | class HeapEntry; |
| 449 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 450 | class HeapGraphEdge BASE_EMBEDDED { |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 451 | public: |
| 452 | enum Type { |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 453 | kContextVariable = v8::HeapGraphEdge::kContextVariable, |
| 454 | kElement = v8::HeapGraphEdge::kElement, |
| 455 | kProperty = v8::HeapGraphEdge::kProperty, |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 456 | kInternal = v8::HeapGraphEdge::kInternal, |
| 457 | kHidden = v8::HeapGraphEdge::kHidden, |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 458 | kShortcut = v8::HeapGraphEdge::kShortcut, |
| 459 | kWeak = v8::HeapGraphEdge::kWeak |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 460 | }; |
| 461 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 462 | HeapGraphEdge() { } |
| 463 | void Init(int child_index, Type type, const char* name, HeapEntry* to); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 464 | void Init(int child_index, Type type, int index, HeapEntry* to); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 465 | void Init(int child_index, int index, HeapEntry* to); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 466 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 467 | Type type() { return static_cast<Type>(type_); } |
| 468 | int index() { |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 469 | ASSERT(type_ == kElement || type_ == kHidden || type_ == kWeak); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 470 | return index_; |
| 471 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 472 | const char* name() { |
| 473 | ASSERT(type_ == kContextVariable |
| 474 | || type_ == kProperty |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 475 | || type_ == kInternal |
| 476 | || type_ == kShortcut); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 477 | return name_; |
| 478 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 479 | HeapEntry* to() { return to_; } |
| 480 | |
| 481 | HeapEntry* From(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 482 | |
| 483 | private: |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 484 | int child_index_ : 29; |
| 485 | unsigned type_ : 3; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 486 | union { |
| 487 | int index_; |
| 488 | const char* name_; |
| 489 | }; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 490 | HeapEntry* to_; |
| 491 | |
| 492 | DISALLOW_COPY_AND_ASSIGN(HeapGraphEdge); |
| 493 | }; |
| 494 | |
| 495 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 496 | class HeapSnapshot; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 497 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 498 | // HeapEntry instances represent an entity from the heap (or a special |
| 499 | // virtual node, e.g. root). To make heap snapshots more compact, |
| 500 | // HeapEntries has a special memory layout (no Vectors or Lists used): |
| 501 | // |
| 502 | // +-----------------+ |
| 503 | // HeapEntry |
| 504 | // +-----------------+ |
| 505 | // HeapGraphEdge | |
| 506 | // ... } children_count |
| 507 | // HeapGraphEdge | |
| 508 | // +-----------------+ |
| 509 | // HeapGraphEdge* | |
| 510 | // ... } retainers_count |
| 511 | // HeapGraphEdge* | |
| 512 | // +-----------------+ |
| 513 | // |
| 514 | // In a HeapSnapshot, all entries are hand-allocated in a continuous array |
| 515 | // of raw bytes. |
| 516 | // |
| 517 | class HeapEntry BASE_EMBEDDED { |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 518 | public: |
| 519 | enum Type { |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 520 | kHidden = v8::HeapGraphNode::kHidden, |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 521 | kArray = v8::HeapGraphNode::kArray, |
| 522 | kString = v8::HeapGraphNode::kString, |
| 523 | kObject = v8::HeapGraphNode::kObject, |
| 524 | kCode = v8::HeapGraphNode::kCode, |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 525 | kClosure = v8::HeapGraphNode::kClosure, |
| 526 | kRegExp = v8::HeapGraphNode::kRegExp, |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 527 | kHeapNumber = v8::HeapGraphNode::kHeapNumber, |
| 528 | kNative = v8::HeapGraphNode::kNative |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 529 | }; |
| 530 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 531 | HeapEntry() { } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 532 | void Init(HeapSnapshot* snapshot, |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 533 | Type type, |
| 534 | const char* name, |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 535 | uint64_t id, |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 536 | int self_size, |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 537 | int children_count, |
| 538 | int retainers_count); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 539 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 540 | HeapSnapshot* snapshot() { return snapshot_; } |
| 541 | Type type() { return static_cast<Type>(type_); } |
| 542 | const char* name() { return name_; } |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 543 | inline uint64_t id(); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 544 | int self_size() { return self_size_; } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 545 | int retained_size() { return retained_size_; } |
| 546 | void add_retained_size(int size) { retained_size_ += size; } |
| 547 | void set_retained_size(int value) { retained_size_ = value; } |
| 548 | int ordered_index() { return ordered_index_; } |
| 549 | void set_ordered_index(int value) { ordered_index_ = value; } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 550 | |
| 551 | Vector<HeapGraphEdge> children() { |
| 552 | return Vector<HeapGraphEdge>(children_arr(), children_count_); } |
| 553 | Vector<HeapGraphEdge*> retainers() { |
| 554 | return Vector<HeapGraphEdge*>(retainers_arr(), retainers_count_); } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 555 | HeapEntry* dominator() { return dominator_; } |
Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 556 | void set_dominator(HeapEntry* entry) { |
| 557 | ASSERT(entry != NULL); |
| 558 | dominator_ = entry; |
| 559 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 560 | |
| 561 | void clear_paint() { painted_ = kUnpainted; } |
| 562 | bool painted_reachable() { return painted_ == kPainted; } |
| 563 | void paint_reachable() { |
| 564 | ASSERT(painted_ == kUnpainted); |
| 565 | painted_ = kPainted; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 566 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 567 | bool not_painted_reachable_from_others() { |
| 568 | return painted_ != kPaintedReachableFromOthers; |
| 569 | } |
| 570 | void paint_reachable_from_others() { |
| 571 | painted_ = kPaintedReachableFromOthers; |
| 572 | } |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 573 | template<class Visitor> |
| 574 | void ApplyAndPaintAllReachable(Visitor* visitor); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 575 | void PaintAllReachable(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 576 | |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 577 | void SetIndexedReference(HeapGraphEdge::Type type, |
| 578 | int child_index, |
| 579 | int index, |
| 580 | HeapEntry* entry, |
| 581 | int retainer_index); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 582 | void SetNamedReference(HeapGraphEdge::Type type, |
| 583 | int child_index, |
| 584 | const char* name, |
| 585 | HeapEntry* entry, |
| 586 | int retainer_index); |
| 587 | void SetUnidirElementReference(int child_index, int index, HeapEntry* entry); |
| 588 | |
| 589 | int EntrySize() { return EntriesSize(1, children_count_, retainers_count_); } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 590 | int RetainedSize(bool exact); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 591 | |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 592 | void Print( |
| 593 | const char* prefix, const char* edge_name, int max_depth, int indent); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 594 | |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 595 | Handle<HeapObject> GetHeapObject(); |
| 596 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 597 | static int EntriesSize(int entries_count, |
| 598 | int children_count, |
| 599 | int retainers_count); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 600 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 601 | private: |
| 602 | HeapGraphEdge* children_arr() { |
| 603 | return reinterpret_cast<HeapGraphEdge*>(this + 1); |
| 604 | } |
| 605 | HeapGraphEdge** retainers_arr() { |
| 606 | return reinterpret_cast<HeapGraphEdge**>(children_arr() + children_count_); |
| 607 | } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 608 | void CalculateExactRetainedSize(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 609 | const char* TypeAsString(); |
| 610 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 611 | unsigned painted_: 2; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 612 | unsigned type_: 4; |
| 613 | int children_count_: 26; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 614 | int retainers_count_; |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 615 | int self_size_; |
| 616 | union { |
| 617 | int ordered_index_; // Used during dominator tree building. |
| 618 | int retained_size_; // At that moment, there is no retained size yet. |
| 619 | }; |
| 620 | HeapEntry* dominator_; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 621 | HeapSnapshot* snapshot_; |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 622 | struct Id { |
| 623 | uint32_t id1_; |
| 624 | uint32_t id2_; |
| 625 | } id_; // This is to avoid extra padding of 64-bit value. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 626 | const char* name_; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 627 | |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 628 | // Paints used for exact retained sizes calculation. |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 629 | static const unsigned kUnpainted = 0; |
| 630 | static const unsigned kPainted = 1; |
| 631 | static const unsigned kPaintedReachableFromOthers = 2; |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 632 | |
| 633 | static const int kExactRetainedSizeTag = 1; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 634 | |
| 635 | DISALLOW_COPY_AND_ASSIGN(HeapEntry); |
| 636 | }; |
| 637 | |
| 638 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 639 | class HeapSnapshotsCollection; |
| 640 | |
| 641 | // HeapSnapshot represents a single heap snapshot. It is stored in |
| 642 | // HeapSnapshotsCollection, which is also a factory for |
| 643 | // HeapSnapshots. All HeapSnapshots share strings copied from JS heap |
| 644 | // to be able to return them even if they were collected. |
| 645 | // HeapSnapshotGenerator fills in a HeapSnapshot. |
| 646 | class HeapSnapshot { |
| 647 | public: |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 648 | enum Type { |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 649 | kFull = v8::HeapSnapshot::kFull |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 650 | }; |
| 651 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 652 | HeapSnapshot(HeapSnapshotsCollection* collection, |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 653 | Type type, |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 654 | const char* title, |
| 655 | unsigned uid); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 656 | ~HeapSnapshot(); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 657 | void Delete(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 658 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 659 | HeapSnapshotsCollection* collection() { return collection_; } |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 660 | Type type() { return type_; } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 661 | const char* title() { return title_; } |
| 662 | unsigned uid() { return uid_; } |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 663 | HeapEntry* root() { return root_entry_; } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 664 | HeapEntry* gc_roots() { return gc_roots_entry_; } |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 665 | HeapEntry* natives_root() { return natives_root_entry_; } |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 666 | HeapEntry* gc_subroot(int index) { return gc_subroot_entries_[index]; } |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 667 | List<HeapEntry*>* entries() { return &entries_; } |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 668 | int raw_entries_size() { return raw_entries_size_; } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 669 | |
| 670 | void AllocateEntries( |
| 671 | int entries_count, int children_count, int retainers_count); |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 672 | HeapEntry* AddEntry(HeapEntry::Type type, |
| 673 | const char* name, |
| 674 | uint64_t id, |
| 675 | int size, |
| 676 | int children_count, |
| 677 | int retainers_count); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 678 | HeapEntry* AddRootEntry(int children_count); |
| 679 | HeapEntry* AddGcRootsEntry(int children_count, int retainers_count); |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 680 | HeapEntry* AddGcSubrootEntry(int tag, |
| 681 | int children_count, |
| 682 | int retainers_count); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 683 | HeapEntry* AddNativesRootEntry(int children_count, int retainers_count); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 684 | void ClearPaint(); |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 685 | HeapEntry* GetEntryById(uint64_t id); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 686 | List<HeapEntry*>* GetSortedEntriesList(); |
| 687 | template<class Visitor> |
| 688 | void IterateEntries(Visitor* visitor) { entries_.Iterate(visitor); } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 689 | void SetDominatorsToSelf(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 690 | |
| 691 | void Print(int max_depth); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 692 | void PrintEntriesSize(); |
| 693 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 694 | private: |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 695 | HeapEntry* GetNextEntryToInit(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 696 | |
| 697 | HeapSnapshotsCollection* collection_; |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 698 | Type type_; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 699 | const char* title_; |
| 700 | unsigned uid_; |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 701 | HeapEntry* root_entry_; |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 702 | HeapEntry* gc_roots_entry_; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 703 | HeapEntry* natives_root_entry_; |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 704 | HeapEntry* gc_subroot_entries_[VisitorSynchronization::kNumberOfSyncTags]; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 705 | char* raw_entries_; |
| 706 | List<HeapEntry*> entries_; |
| 707 | bool entries_sorted_; |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 708 | int raw_entries_size_; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 709 | |
| 710 | friend class HeapSnapshotTester; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 711 | |
| 712 | DISALLOW_COPY_AND_ASSIGN(HeapSnapshot); |
| 713 | }; |
| 714 | |
| 715 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 716 | class HeapObjectsMap { |
| 717 | public: |
| 718 | HeapObjectsMap(); |
| 719 | ~HeapObjectsMap(); |
| 720 | |
| 721 | void SnapshotGenerationFinished(); |
| 722 | uint64_t FindObject(Address addr); |
| 723 | void MoveObject(Address from, Address to); |
| 724 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 725 | static uint64_t GenerateId(v8::RetainedObjectInfo* info); |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 726 | static inline uint64_t GetNthGcSubrootId(int delta); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 727 | |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 728 | static const int kObjectIdStep = 2; |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 729 | static const uint64_t kInternalRootObjectId; |
| 730 | static const uint64_t kGcRootsObjectId; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 731 | static const uint64_t kNativesRootObjectId; |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 732 | static const uint64_t kGcRootsFirstSubrootId; |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 733 | static const uint64_t kFirstAvailableObjectId; |
| 734 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 735 | private: |
| 736 | struct EntryInfo { |
| 737 | explicit EntryInfo(uint64_t id) : id(id), accessed(true) { } |
| 738 | EntryInfo(uint64_t id, bool accessed) : id(id), accessed(accessed) { } |
| 739 | uint64_t id; |
| 740 | bool accessed; |
| 741 | }; |
| 742 | |
| 743 | void AddEntry(Address addr, uint64_t id); |
| 744 | uint64_t FindEntry(Address addr); |
| 745 | void RemoveDeadEntries(); |
| 746 | |
| 747 | static bool AddressesMatch(void* key1, void* key2) { |
| 748 | return key1 == key2; |
| 749 | } |
| 750 | |
| 751 | static uint32_t AddressHash(Address addr) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 752 | return ComputeIntegerHash( |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 753 | static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr)), |
| 754 | v8::internal::kZeroHashSeed); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 755 | } |
| 756 | |
| 757 | bool initial_fill_mode_; |
| 758 | uint64_t next_id_; |
| 759 | HashMap entries_map_; |
| 760 | List<EntryInfo>* entries_; |
| 761 | |
| 762 | DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap); |
| 763 | }; |
| 764 | |
| 765 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 766 | class HeapSnapshotsCollection { |
| 767 | public: |
| 768 | HeapSnapshotsCollection(); |
| 769 | ~HeapSnapshotsCollection(); |
| 770 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 771 | bool is_tracking_objects() { return is_tracking_objects_; } |
| 772 | |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 773 | HeapSnapshot* NewSnapshot( |
| 774 | HeapSnapshot::Type type, const char* name, unsigned uid); |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 775 | void SnapshotGenerationFinished(HeapSnapshot* snapshot); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 776 | List<HeapSnapshot*>* snapshots() { return &snapshots_; } |
| 777 | HeapSnapshot* GetSnapshot(unsigned uid); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 778 | void RemoveSnapshot(HeapSnapshot* snapshot); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 779 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 780 | StringsStorage* names() { return &names_; } |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 781 | TokenEnumerator* token_enumerator() { return token_enumerator_; } |
| 782 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 783 | uint64_t GetObjectId(Address addr) { return ids_.FindObject(addr); } |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 784 | Handle<HeapObject> FindHeapObjectById(uint64_t id); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 785 | void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); } |
| 786 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 787 | private: |
| 788 | INLINE(static bool HeapSnapshotsMatch(void* key1, void* key2)) { |
| 789 | return key1 == key2; |
| 790 | } |
| 791 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 792 | bool is_tracking_objects_; // Whether tracking object moves is needed. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 793 | List<HeapSnapshot*> snapshots_; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 794 | // Mapping from snapshots' uids to HeapSnapshot* pointers. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 795 | HashMap snapshots_uids_; |
| 796 | StringsStorage names_; |
| 797 | TokenEnumerator* token_enumerator_; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 798 | // Mapping from HeapObject addresses to objects' uids. |
| 799 | HeapObjectsMap ids_; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 800 | |
| 801 | DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsCollection); |
| 802 | }; |
| 803 | |
| 804 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 805 | // A typedef for referencing anything that can be snapshotted living |
| 806 | // in any kind of heap memory. |
| 807 | typedef void* HeapThing; |
| 808 | |
| 809 | |
| 810 | // An interface that creates HeapEntries by HeapThings. |
| 811 | class HeapEntriesAllocator { |
| 812 | public: |
| 813 | virtual ~HeapEntriesAllocator() { } |
| 814 | virtual HeapEntry* AllocateEntry( |
| 815 | HeapThing ptr, int children_count, int retainers_count) = 0; |
| 816 | }; |
| 817 | |
| 818 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 819 | // The HeapEntriesMap instance is used to track a mapping between |
| 820 | // real heap objects and their representations in heap snapshots. |
| 821 | class HeapEntriesMap { |
| 822 | public: |
| 823 | HeapEntriesMap(); |
| 824 | ~HeapEntriesMap(); |
| 825 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 826 | void AllocateEntries(); |
| 827 | HeapEntry* Map(HeapThing thing); |
| 828 | void Pair(HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry); |
| 829 | void CountReference(HeapThing from, HeapThing to, |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 830 | int* prev_children_count = NULL, |
| 831 | int* prev_retainers_count = NULL); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 832 | |
| 833 | int entries_count() { return entries_count_; } |
| 834 | int total_children_count() { return total_children_count_; } |
| 835 | int total_retainers_count() { return total_retainers_count_; } |
| 836 | |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 837 | static HeapEntry* const kHeapEntryPlaceholder; |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 838 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 839 | private: |
| 840 | struct EntryInfo { |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 841 | EntryInfo(HeapEntry* entry, HeapEntriesAllocator* allocator) |
| 842 | : entry(entry), |
| 843 | allocator(allocator), |
| 844 | children_count(0), |
| 845 | retainers_count(0) { |
| 846 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 847 | HeapEntry* entry; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 848 | HeapEntriesAllocator* allocator; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 849 | int children_count; |
| 850 | int retainers_count; |
| 851 | }; |
| 852 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 853 | static uint32_t Hash(HeapThing thing) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 854 | return ComputeIntegerHash( |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 855 | static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing)), |
| 856 | v8::internal::kZeroHashSeed); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 857 | } |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 858 | static bool HeapThingsMatch(HeapThing key1, HeapThing key2) { |
| 859 | return key1 == key2; |
| 860 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 861 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 862 | HashMap entries_; |
| 863 | int entries_count_; |
| 864 | int total_children_count_; |
| 865 | int total_retainers_count_; |
| 866 | |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 867 | friend class HeapObjectsSet; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 868 | |
| 869 | DISALLOW_COPY_AND_ASSIGN(HeapEntriesMap); |
| 870 | }; |
| 871 | |
| 872 | |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 873 | class HeapObjectsSet { |
| 874 | public: |
| 875 | HeapObjectsSet(); |
| 876 | void Clear(); |
| 877 | bool Contains(Object* object); |
| 878 | void Insert(Object* obj); |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 879 | const char* GetTag(Object* obj); |
| 880 | void SetTag(Object* obj, const char* tag); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 881 | |
| 882 | private: |
| 883 | HashMap entries_; |
| 884 | |
| 885 | DISALLOW_COPY_AND_ASSIGN(HeapObjectsSet); |
| 886 | }; |
| 887 | |
| 888 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 889 | // An interface used to populate a snapshot with nodes and edges. |
| 890 | class SnapshotFillerInterface { |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 891 | public: |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 892 | virtual ~SnapshotFillerInterface() { } |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 893 | virtual HeapEntry* AddEntry(HeapThing ptr, |
| 894 | HeapEntriesAllocator* allocator) = 0; |
| 895 | virtual HeapEntry* FindEntry(HeapThing ptr) = 0; |
| 896 | virtual HeapEntry* FindOrAddEntry(HeapThing ptr, |
| 897 | HeapEntriesAllocator* allocator) = 0; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 898 | virtual void SetIndexedReference(HeapGraphEdge::Type type, |
| 899 | HeapThing parent_ptr, |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 900 | HeapEntry* parent_entry, |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 901 | int index, |
| 902 | HeapThing child_ptr, |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 903 | HeapEntry* child_entry) = 0; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 904 | virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, |
| 905 | HeapThing parent_ptr, |
| 906 | HeapEntry* parent_entry, |
| 907 | HeapThing child_ptr, |
| 908 | HeapEntry* child_entry) = 0; |
| 909 | virtual void SetNamedReference(HeapGraphEdge::Type type, |
| 910 | HeapThing parent_ptr, |
| 911 | HeapEntry* parent_entry, |
| 912 | const char* reference_name, |
| 913 | HeapThing child_ptr, |
| 914 | HeapEntry* child_entry) = 0; |
| 915 | virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type, |
| 916 | HeapThing parent_ptr, |
| 917 | HeapEntry* parent_entry, |
| 918 | HeapThing child_ptr, |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 919 | HeapEntry* child_entry) = 0; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 920 | }; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 921 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 922 | |
| 923 | class SnapshottingProgressReportingInterface { |
| 924 | public: |
| 925 | virtual ~SnapshottingProgressReportingInterface() { } |
| 926 | virtual void ProgressStep() = 0; |
| 927 | virtual bool ProgressReport(bool force) = 0; |
| 928 | }; |
| 929 | |
| 930 | |
| 931 | // An implementation of V8 heap graph extractor. |
| 932 | class V8HeapExplorer : public HeapEntriesAllocator { |
| 933 | public: |
| 934 | V8HeapExplorer(HeapSnapshot* snapshot, |
| 935 | SnapshottingProgressReportingInterface* progress); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 936 | virtual ~V8HeapExplorer(); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 937 | virtual HeapEntry* AllocateEntry( |
| 938 | HeapThing ptr, int children_count, int retainers_count); |
| 939 | void AddRootEntries(SnapshotFillerInterface* filler); |
Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 940 | int EstimateObjectsCount(HeapIterator* iterator); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 941 | bool IterateAndExtractReferences(SnapshotFillerInterface* filler); |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 942 | void TagGlobalObjects(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 943 | |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 944 | static String* GetConstructorName(JSObject* object); |
| 945 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 946 | static HeapObject* const kInternalRootObject; |
| 947 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 948 | private: |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 949 | HeapEntry* AddEntry( |
| 950 | HeapObject* object, int children_count, int retainers_count); |
| 951 | HeapEntry* AddEntry(HeapObject* object, |
| 952 | HeapEntry::Type type, |
| 953 | const char* name, |
| 954 | int children_count, |
| 955 | int retainers_count); |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 956 | const char* GetSystemEntryName(HeapObject* object); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 957 | void ExtractReferences(HeapObject* obj); |
| 958 | void ExtractClosureReferences(JSObject* js_obj, HeapEntry* entry); |
| 959 | void ExtractPropertyReferences(JSObject* js_obj, HeapEntry* entry); |
| 960 | void ExtractElementReferences(JSObject* js_obj, HeapEntry* entry); |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 961 | void ExtractInternalReferences(JSObject* js_obj, HeapEntry* entry); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 962 | void SetClosureReference(HeapObject* parent_obj, |
| 963 | HeapEntry* parent, |
| 964 | String* reference_name, |
| 965 | Object* child); |
| 966 | void SetElementReference(HeapObject* parent_obj, |
| 967 | HeapEntry* parent, |
| 968 | int index, |
| 969 | Object* child); |
| 970 | void SetInternalReference(HeapObject* parent_obj, |
| 971 | HeapEntry* parent, |
| 972 | const char* reference_name, |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 973 | Object* child, |
| 974 | int field_offset = -1); |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 975 | void SetInternalReference(HeapObject* parent_obj, |
| 976 | HeapEntry* parent, |
| 977 | int index, |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 978 | Object* child, |
| 979 | int field_offset = -1); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 980 | void SetHiddenReference(HeapObject* parent_obj, |
| 981 | HeapEntry* parent, |
| 982 | int index, |
| 983 | Object* child); |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 984 | void SetWeakReference(HeapObject* parent_obj, |
| 985 | HeapEntry* parent_entry, |
| 986 | int index, |
| 987 | Object* child_obj, |
| 988 | int field_offset); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 989 | void SetPropertyReference(HeapObject* parent_obj, |
| 990 | HeapEntry* parent, |
| 991 | String* reference_name, |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 992 | Object* child, |
Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 993 | const char* name_format_string = NULL, |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 994 | int field_offset = -1); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 995 | void SetPropertyShortcutReference(HeapObject* parent_obj, |
| 996 | HeapEntry* parent, |
| 997 | String* reference_name, |
| 998 | Object* child); |
| 999 | void SetRootShortcutReference(Object* child); |
| 1000 | void SetRootGcRootsReference(); |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 1001 | void SetGcRootsReference(VisitorSynchronization::SyncTag tag); |
| 1002 | void SetGcSubrootReference( |
| 1003 | VisitorSynchronization::SyncTag tag, bool is_weak, Object* child); |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1004 | void TagObject(Object* obj, const char* tag); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1005 | |
| 1006 | HeapEntry* GetEntry(Object* obj); |
| 1007 | |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 1008 | static inline HeapObject* GetNthGcSubrootObject(int delta); |
| 1009 | static inline int GetGcSubrootOrder(HeapObject* subroot); |
| 1010 | |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1011 | Heap* heap_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1012 | HeapSnapshot* snapshot_; |
| 1013 | HeapSnapshotsCollection* collection_; |
| 1014 | SnapshottingProgressReportingInterface* progress_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1015 | SnapshotFillerInterface* filler_; |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 1016 | HeapObjectsSet objects_tags_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1017 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1018 | static HeapObject* const kGcRootsObject; |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 1019 | static HeapObject* const kFirstGcSubrootObject; |
| 1020 | static HeapObject* const kLastGcSubrootObject; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1021 | |
| 1022 | friend class IndexedReferencesExtractor; |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 1023 | friend class GcSubrootsEnumerator; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1024 | friend class RootsReferencesExtractor; |
| 1025 | |
| 1026 | DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer); |
| 1027 | }; |
| 1028 | |
| 1029 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1030 | // An implementation of retained native objects extractor. |
| 1031 | class NativeObjectsExplorer : public HeapEntriesAllocator { |
| 1032 | public: |
| 1033 | NativeObjectsExplorer(HeapSnapshot* snapshot, |
| 1034 | SnapshottingProgressReportingInterface* progress); |
| 1035 | virtual ~NativeObjectsExplorer(); |
| 1036 | virtual HeapEntry* AllocateEntry( |
| 1037 | HeapThing ptr, int children_count, int retainers_count); |
| 1038 | void AddRootEntries(SnapshotFillerInterface* filler); |
| 1039 | int EstimateObjectsCount(); |
| 1040 | bool IterateAndExtractReferences(SnapshotFillerInterface* filler); |
| 1041 | |
| 1042 | private: |
| 1043 | void FillRetainedObjects(); |
| 1044 | List<HeapObject*>* GetListMaybeDisposeInfo(v8::RetainedObjectInfo* info); |
| 1045 | void SetNativeRootReference(v8::RetainedObjectInfo* info); |
| 1046 | void SetRootNativesRootReference(); |
| 1047 | void SetWrapperNativeReferences(HeapObject* wrapper, |
| 1048 | v8::RetainedObjectInfo* info); |
| 1049 | void VisitSubtreeWrapper(Object** p, uint16_t class_id); |
| 1050 | |
| 1051 | static uint32_t InfoHash(v8::RetainedObjectInfo* info) { |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 1052 | return ComputeIntegerHash(static_cast<uint32_t>(info->GetHash()), |
| 1053 | v8::internal::kZeroHashSeed); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1054 | } |
| 1055 | static bool RetainedInfosMatch(void* key1, void* key2) { |
| 1056 | return key1 == key2 || |
| 1057 | (reinterpret_cast<v8::RetainedObjectInfo*>(key1))->IsEquivalent( |
| 1058 | reinterpret_cast<v8::RetainedObjectInfo*>(key2)); |
| 1059 | } |
| 1060 | |
| 1061 | HeapSnapshot* snapshot_; |
| 1062 | HeapSnapshotsCollection* collection_; |
| 1063 | SnapshottingProgressReportingInterface* progress_; |
| 1064 | bool embedder_queried_; |
| 1065 | HeapObjectsSet in_groups_; |
| 1066 | // RetainedObjectInfo* -> List<HeapObject*>* |
| 1067 | HashMap objects_by_info_; |
| 1068 | // Used during references extraction. |
| 1069 | SnapshotFillerInterface* filler_; |
| 1070 | |
| 1071 | static HeapThing const kNativesRootObject; |
| 1072 | |
| 1073 | friend class GlobalHandlesExtractor; |
| 1074 | |
| 1075 | DISALLOW_COPY_AND_ASSIGN(NativeObjectsExplorer); |
| 1076 | }; |
| 1077 | |
| 1078 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1079 | class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface { |
| 1080 | public: |
| 1081 | HeapSnapshotGenerator(HeapSnapshot* snapshot, |
| 1082 | v8::ActivityControl* control); |
| 1083 | bool GenerateSnapshot(); |
| 1084 | |
| 1085 | private: |
| 1086 | bool ApproximateRetainedSizes(); |
| 1087 | bool BuildDominatorTree(const Vector<HeapEntry*>& entries, |
| 1088 | Vector<HeapEntry*>* dominators); |
| 1089 | bool CountEntriesAndReferences(); |
| 1090 | bool FillReferences(); |
| 1091 | void FillReversePostorderIndexes(Vector<HeapEntry*>* entries); |
| 1092 | void ProgressStep(); |
| 1093 | bool ProgressReport(bool force = false); |
| 1094 | bool SetEntriesDominators(); |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1095 | void SetProgressTotal(int iterations_count); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 1096 | |
| 1097 | HeapSnapshot* snapshot_; |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1098 | v8::ActivityControl* control_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1099 | V8HeapExplorer v8_heap_explorer_; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1100 | NativeObjectsExplorer dom_explorer_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1101 | // Mapping from HeapThing pointers to HeapEntry* pointers. |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 1102 | HeapEntriesMap entries_; |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1103 | // Used during snapshot generation. |
| 1104 | int progress_counter_; |
| 1105 | int progress_total_; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 1106 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 1107 | DISALLOW_COPY_AND_ASSIGN(HeapSnapshotGenerator); |
| 1108 | }; |
| 1109 | |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1110 | class OutputStreamWriter; |
| 1111 | |
| 1112 | class HeapSnapshotJSONSerializer { |
| 1113 | public: |
| 1114 | explicit HeapSnapshotJSONSerializer(HeapSnapshot* snapshot) |
| 1115 | : snapshot_(snapshot), |
| 1116 | nodes_(ObjectsMatch), |
| 1117 | strings_(ObjectsMatch), |
| 1118 | next_node_id_(1), |
| 1119 | next_string_id_(1), |
| 1120 | writer_(NULL) { |
| 1121 | } |
| 1122 | void Serialize(v8::OutputStream* stream); |
| 1123 | |
| 1124 | private: |
| 1125 | INLINE(static bool ObjectsMatch(void* key1, void* key2)) { |
| 1126 | return key1 == key2; |
| 1127 | } |
| 1128 | |
| 1129 | INLINE(static uint32_t ObjectHash(const void* key)) { |
| 1130 | return ComputeIntegerHash( |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame^] | 1131 | static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key)), |
| 1132 | v8::internal::kZeroHashSeed); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1133 | } |
| 1134 | |
| 1135 | void EnumerateNodes(); |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1136 | HeapSnapshot* CreateFakeSnapshot(); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1137 | int GetNodeId(HeapEntry* entry); |
| 1138 | int GetStringId(const char* s); |
| 1139 | void SerializeEdge(HeapGraphEdge* edge); |
| 1140 | void SerializeImpl(); |
| 1141 | void SerializeNode(HeapEntry* entry); |
| 1142 | void SerializeNodes(); |
| 1143 | void SerializeSnapshot(); |
| 1144 | void SerializeString(const unsigned char* s); |
| 1145 | void SerializeStrings(); |
| 1146 | void SortHashMap(HashMap* map, List<HashMap::Entry*>* sorted_entries); |
| 1147 | |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1148 | static const int kMaxSerializableSnapshotRawSize; |
| 1149 | |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1150 | HeapSnapshot* snapshot_; |
| 1151 | HashMap nodes_; |
| 1152 | HashMap strings_; |
| 1153 | int next_node_id_; |
| 1154 | int next_string_id_; |
| 1155 | OutputStreamWriter* writer_; |
| 1156 | |
| 1157 | friend class HeapSnapshotJSONSerializerEnumerator; |
| 1158 | friend class HeapSnapshotJSONSerializerIterator; |
| 1159 | |
| 1160 | DISALLOW_COPY_AND_ASSIGN(HeapSnapshotJSONSerializer); |
| 1161 | }; |
| 1162 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 1163 | } } // namespace v8::internal |
| 1164 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 1165 | #endif // V8_PROFILE_GENERATOR_H_ |