Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 1 | // Copyright 2010 the V8 project authors. All rights reserved. |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 2 | // Redistribution and use in source and binary forms, with or without |
| 3 | // modification, are permitted provided that the following conditions are |
| 4 | // met: |
| 5 | // |
| 6 | // * Redistributions of source code must retain the above copyright |
| 7 | // notice, this list of conditions and the following disclaimer. |
| 8 | // * Redistributions in binary form must reproduce the above |
| 9 | // copyright notice, this list of conditions and the following |
| 10 | // disclaimer in the documentation and/or other materials provided |
| 11 | // with the distribution. |
| 12 | // * Neither the name of Google Inc. nor the names of its |
| 13 | // contributors may be used to endorse or promote products derived |
| 14 | // from this software without specific prior written permission. |
| 15 | // |
| 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | |
| 28 | #ifndef V8_PROFILE_GENERATOR_H_ |
| 29 | #define V8_PROFILE_GENERATOR_H_ |
| 30 | |
Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame] | 31 | #include "allocation.h" |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 32 | #include "hashmap.h" |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 33 | #include "../include/v8-profiler.h" |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 34 | |
| 35 | namespace v8 { |
| 36 | namespace internal { |
| 37 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 38 | class TokenEnumerator { |
| 39 | public: |
| 40 | TokenEnumerator(); |
| 41 | ~TokenEnumerator(); |
| 42 | int GetTokenId(Object* token); |
| 43 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 44 | static const int kNoSecurityToken = -1; |
| 45 | static const int kInheritsSecurityToken = -2; |
| 46 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 47 | private: |
| 48 | static void TokenRemovedCallback(v8::Persistent<v8::Value> handle, |
| 49 | void* parameter); |
| 50 | void TokenRemoved(Object** token_location); |
| 51 | |
| 52 | List<Object**> token_locations_; |
| 53 | List<bool> token_removed_; |
| 54 | |
| 55 | friend class TokenEnumeratorTester; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 56 | |
| 57 | DISALLOW_COPY_AND_ASSIGN(TokenEnumerator); |
| 58 | }; |
| 59 | |
| 60 | |
| 61 | // Provides a storage of strings allocated in C++ heap, to hold them |
| 62 | // forever, even if they disappear from JS heap or external storage. |
| 63 | class StringsStorage { |
| 64 | public: |
| 65 | StringsStorage(); |
| 66 | ~StringsStorage(); |
| 67 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 68 | const char* GetCopy(const char* src); |
| 69 | const char* GetFormatted(const char* format, ...); |
| 70 | const char* GetVFormatted(const char* format, va_list args); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 71 | const char* GetName(String* name); |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 72 | const char* GetName(int index); |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 73 | inline const char* GetFunctionName(String* name); |
| 74 | inline const char* GetFunctionName(const char* name); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 75 | |
| 76 | private: |
| 77 | INLINE(static bool StringsMatch(void* key1, void* key2)) { |
| 78 | return strcmp(reinterpret_cast<char*>(key1), |
| 79 | reinterpret_cast<char*>(key2)) == 0; |
| 80 | } |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 81 | const char* AddOrDisposeString(char* str, uint32_t hash); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 82 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 83 | // Mapping of strings by String::Hash to const char* strings. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 84 | HashMap names_; |
| 85 | |
| 86 | DISALLOW_COPY_AND_ASSIGN(StringsStorage); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 87 | }; |
| 88 | |
| 89 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 90 | class CodeEntry { |
| 91 | public: |
| 92 | // CodeEntry doesn't own name strings, just references them. |
| 93 | INLINE(CodeEntry(Logger::LogEventsAndTags tag, |
| 94 | const char* name_prefix, |
| 95 | const char* name, |
| 96 | const char* resource_name, |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 97 | int line_number, |
| 98 | int security_token_id)); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 99 | |
| 100 | INLINE(bool is_js_function() const) { return is_js_function_tag(tag_); } |
| 101 | INLINE(const char* name_prefix() const) { return name_prefix_; } |
| 102 | INLINE(bool has_name_prefix() const) { return name_prefix_[0] != '\0'; } |
| 103 | INLINE(const char* name() const) { return name_; } |
| 104 | INLINE(const char* resource_name() const) { return resource_name_; } |
| 105 | INLINE(int line_number() const) { return line_number_; } |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 106 | INLINE(int shared_id() const) { return shared_id_; } |
| 107 | INLINE(void set_shared_id(int shared_id)) { shared_id_ = shared_id; } |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 108 | INLINE(int security_token_id() const) { return security_token_id_; } |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 109 | |
| 110 | INLINE(static bool is_js_function_tag(Logger::LogEventsAndTags tag)); |
| 111 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 112 | void CopyData(const CodeEntry& source); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 113 | uint32_t GetCallUid() const; |
| 114 | bool IsSameAs(CodeEntry* entry) const; |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 115 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 116 | static const char* const kEmptyNamePrefix; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 117 | |
| 118 | private: |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 119 | Logger::LogEventsAndTags tag_; |
| 120 | const char* name_prefix_; |
| 121 | const char* name_; |
| 122 | const char* resource_name_; |
| 123 | int line_number_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 124 | int shared_id_; |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 125 | int security_token_id_; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 126 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 127 | DISALLOW_COPY_AND_ASSIGN(CodeEntry); |
| 128 | }; |
| 129 | |
| 130 | |
| 131 | class ProfileTree; |
| 132 | |
| 133 | class ProfileNode { |
| 134 | public: |
| 135 | INLINE(ProfileNode(ProfileTree* tree, CodeEntry* entry)); |
| 136 | |
| 137 | ProfileNode* FindChild(CodeEntry* entry); |
| 138 | ProfileNode* FindOrAddChild(CodeEntry* entry); |
| 139 | INLINE(void IncrementSelfTicks()) { ++self_ticks_; } |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 140 | INLINE(void IncreaseSelfTicks(unsigned amount)) { self_ticks_ += amount; } |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 141 | INLINE(void IncreaseTotalTicks(unsigned amount)) { total_ticks_ += amount; } |
| 142 | |
| 143 | INLINE(CodeEntry* entry() const) { return entry_; } |
| 144 | INLINE(unsigned self_ticks() const) { return self_ticks_; } |
| 145 | INLINE(unsigned total_ticks() const) { return total_ticks_; } |
| 146 | INLINE(const List<ProfileNode*>* children() const) { return &children_list_; } |
| 147 | double GetSelfMillis() const; |
| 148 | double GetTotalMillis() const; |
| 149 | |
| 150 | void Print(int indent); |
| 151 | |
| 152 | private: |
| 153 | INLINE(static bool CodeEntriesMatch(void* entry1, void* entry2)) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 154 | return reinterpret_cast<CodeEntry*>(entry1)->IsSameAs( |
| 155 | reinterpret_cast<CodeEntry*>(entry2)); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 156 | } |
| 157 | |
| 158 | INLINE(static uint32_t CodeEntryHash(CodeEntry* entry)) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 159 | return entry->GetCallUid(); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 160 | } |
| 161 | |
| 162 | ProfileTree* tree_; |
| 163 | CodeEntry* entry_; |
| 164 | unsigned total_ticks_; |
| 165 | unsigned self_ticks_; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 166 | // Mapping from CodeEntry* to ProfileNode* |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 167 | HashMap children_; |
| 168 | List<ProfileNode*> children_list_; |
| 169 | |
| 170 | DISALLOW_COPY_AND_ASSIGN(ProfileNode); |
| 171 | }; |
| 172 | |
| 173 | |
| 174 | class ProfileTree { |
| 175 | public: |
| 176 | ProfileTree(); |
| 177 | ~ProfileTree(); |
| 178 | |
| 179 | void AddPathFromEnd(const Vector<CodeEntry*>& path); |
| 180 | void AddPathFromStart(const Vector<CodeEntry*>& path); |
| 181 | void CalculateTotalTicks(); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 182 | void FilteredClone(ProfileTree* src, int security_token_id); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 183 | |
| 184 | double TicksToMillis(unsigned ticks) const { |
| 185 | return ticks * ms_to_ticks_scale_; |
| 186 | } |
| 187 | ProfileNode* root() const { return root_; } |
| 188 | void SetTickRatePerMs(double ticks_per_ms); |
| 189 | |
| 190 | void ShortPrint(); |
| 191 | void Print() { |
| 192 | root_->Print(0); |
| 193 | } |
| 194 | |
| 195 | private: |
| 196 | template <typename Callback> |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 197 | void TraverseDepthFirst(Callback* callback); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 198 | |
| 199 | CodeEntry root_entry_; |
| 200 | ProfileNode* root_; |
| 201 | double ms_to_ticks_scale_; |
| 202 | |
| 203 | DISALLOW_COPY_AND_ASSIGN(ProfileTree); |
| 204 | }; |
| 205 | |
| 206 | |
| 207 | class CpuProfile { |
| 208 | public: |
| 209 | CpuProfile(const char* title, unsigned uid) |
| 210 | : title_(title), uid_(uid) { } |
| 211 | |
| 212 | // Add pc -> ... -> main() call path to the profile. |
| 213 | void AddPath(const Vector<CodeEntry*>& path); |
| 214 | void CalculateTotalTicks(); |
| 215 | void SetActualSamplingRate(double actual_sampling_rate); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 216 | CpuProfile* FilteredClone(int security_token_id); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 217 | |
| 218 | INLINE(const char* title() const) { return title_; } |
| 219 | INLINE(unsigned uid() const) { return uid_; } |
| 220 | INLINE(const ProfileTree* top_down() const) { return &top_down_; } |
| 221 | INLINE(const ProfileTree* bottom_up() const) { return &bottom_up_; } |
| 222 | |
| 223 | void UpdateTicksScale(); |
| 224 | |
| 225 | void ShortPrint(); |
| 226 | void Print(); |
| 227 | |
| 228 | private: |
| 229 | const char* title_; |
| 230 | unsigned uid_; |
| 231 | ProfileTree top_down_; |
| 232 | ProfileTree bottom_up_; |
| 233 | |
| 234 | DISALLOW_COPY_AND_ASSIGN(CpuProfile); |
| 235 | }; |
| 236 | |
| 237 | |
| 238 | class CodeMap { |
| 239 | public: |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 240 | CodeMap() : next_shared_id_(1) { } |
Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 241 | void AddCode(Address addr, CodeEntry* entry, unsigned size); |
| 242 | void MoveCode(Address from, Address to); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 243 | CodeEntry* FindEntry(Address addr); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 244 | int GetSharedId(Address addr); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 245 | |
| 246 | void Print(); |
| 247 | |
| 248 | private: |
| 249 | struct CodeEntryInfo { |
| 250 | CodeEntryInfo(CodeEntry* an_entry, unsigned a_size) |
| 251 | : entry(an_entry), size(a_size) { } |
| 252 | CodeEntry* entry; |
| 253 | unsigned size; |
| 254 | }; |
| 255 | |
| 256 | struct CodeTreeConfig { |
| 257 | typedef Address Key; |
| 258 | typedef CodeEntryInfo Value; |
| 259 | static const Key kNoKey; |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 260 | static const Value kNoValue; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 261 | static int Compare(const Key& a, const Key& b) { |
| 262 | return a < b ? -1 : (a > b ? 1 : 0); |
| 263 | } |
| 264 | }; |
| 265 | typedef SplayTree<CodeTreeConfig> CodeTree; |
| 266 | |
| 267 | class CodeTreePrinter { |
| 268 | public: |
| 269 | void Call(const Address& key, const CodeEntryInfo& value); |
| 270 | }; |
| 271 | |
Ben Murdoch | 589d697 | 2011-11-30 16:04:58 +0000 | [diff] [blame] | 272 | void DeleteAllCoveredCode(Address start, Address end); |
| 273 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 274 | // Fake CodeEntry pointer to distinguish shared function entries. |
| 275 | static CodeEntry* const kSharedFunctionCodeEntry; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 276 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 277 | CodeTree tree_; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 278 | int next_shared_id_; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 279 | |
| 280 | DISALLOW_COPY_AND_ASSIGN(CodeMap); |
| 281 | }; |
| 282 | |
| 283 | |
| 284 | class CpuProfilesCollection { |
| 285 | public: |
| 286 | CpuProfilesCollection(); |
| 287 | ~CpuProfilesCollection(); |
| 288 | |
| 289 | bool StartProfiling(const char* title, unsigned uid); |
| 290 | bool StartProfiling(String* title, unsigned uid); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 291 | CpuProfile* StopProfiling(int security_token_id, |
| 292 | const char* title, |
| 293 | double actual_sampling_rate); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 294 | List<CpuProfile*>* Profiles(int security_token_id); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 295 | const char* GetName(String* name) { |
| 296 | return function_and_resource_names_.GetName(name); |
| 297 | } |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 298 | const char* GetName(int args_count) { |
| 299 | return function_and_resource_names_.GetName(args_count); |
| 300 | } |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 301 | CpuProfile* GetProfile(int security_token_id, unsigned uid); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 302 | bool IsLastProfile(const char* title); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 303 | void RemoveProfile(CpuProfile* profile); |
| 304 | bool HasDetachedProfiles() { return detached_profiles_.length() > 0; } |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 305 | |
| 306 | CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 307 | String* name, String* resource_name, int line_number); |
| 308 | CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, const char* name); |
| 309 | CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 310 | const char* name_prefix, String* name); |
| 311 | CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, int args_count); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 312 | CodeEntry* NewCodeEntry(int security_token_id); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 313 | |
| 314 | // Called from profile generator thread. |
| 315 | void AddPathToCurrentProfiles(const Vector<CodeEntry*>& path); |
| 316 | |
Kristian Monsen | 80d68ea | 2010-09-08 11:05:35 +0100 | [diff] [blame] | 317 | // Limits the number of profiles that can be simultaneously collected. |
| 318 | static const int kMaxSimultaneousProfiles = 100; |
| 319 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 320 | private: |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 321 | const char* GetFunctionName(String* name) { |
| 322 | return function_and_resource_names_.GetFunctionName(name); |
| 323 | } |
| 324 | const char* GetFunctionName(const char* name) { |
| 325 | return function_and_resource_names_.GetFunctionName(name); |
| 326 | } |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 327 | int GetProfileIndex(unsigned uid); |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 328 | List<CpuProfile*>* GetProfilesList(int security_token_id); |
| 329 | int TokenToIndex(int security_token_id); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 330 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 331 | INLINE(static bool UidsMatch(void* key1, void* key2)) { |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 332 | return key1 == key2; |
| 333 | } |
| 334 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 335 | StringsStorage function_and_resource_names_; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 336 | List<CodeEntry*> code_entries_; |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 337 | List<List<CpuProfile*>* > profiles_by_token_; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 338 | // Mapping from profiles' uids to indexes in the second nested list |
| 339 | // of profiles_by_token_. |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 340 | HashMap profiles_uids_; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 341 | List<CpuProfile*> detached_profiles_; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 342 | |
| 343 | // Accessed by VM thread and profile generator thread. |
| 344 | List<CpuProfile*> current_profiles_; |
| 345 | Semaphore* current_profiles_semaphore_; |
| 346 | |
| 347 | DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection); |
| 348 | }; |
| 349 | |
| 350 | |
| 351 | class SampleRateCalculator { |
| 352 | public: |
| 353 | SampleRateCalculator() |
| 354 | : result_(Logger::kSamplingIntervalMs * kResultScale), |
| 355 | ticks_per_ms_(Logger::kSamplingIntervalMs), |
| 356 | measurements_count_(0), |
| 357 | wall_time_query_countdown_(1) { |
| 358 | } |
| 359 | |
| 360 | double ticks_per_ms() { |
| 361 | return result_ / static_cast<double>(kResultScale); |
| 362 | } |
| 363 | void Tick(); |
| 364 | void UpdateMeasurements(double current_time); |
| 365 | |
| 366 | // Instead of querying current wall time each tick, |
| 367 | // we use this constant to control query intervals. |
| 368 | static const unsigned kWallTimeQueryIntervalMs = 100; |
| 369 | |
| 370 | private: |
| 371 | // As the result needs to be accessed from a different thread, we |
| 372 | // use type that guarantees atomic writes to memory. There should |
| 373 | // be <= 1000 ticks per second, thus storing a value of a 10 ** 5 |
| 374 | // order should provide enough precision while keeping away from a |
| 375 | // potential overflow. |
| 376 | static const int kResultScale = 100000; |
| 377 | |
| 378 | AtomicWord result_; |
| 379 | // All other fields are accessed only from the sampler thread. |
| 380 | double ticks_per_ms_; |
| 381 | unsigned measurements_count_; |
| 382 | unsigned wall_time_query_countdown_; |
| 383 | double last_wall_time_; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 384 | |
| 385 | DISALLOW_COPY_AND_ASSIGN(SampleRateCalculator); |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 386 | }; |
| 387 | |
| 388 | |
| 389 | class ProfileGenerator { |
| 390 | public: |
| 391 | explicit ProfileGenerator(CpuProfilesCollection* profiles); |
| 392 | |
| 393 | INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 394 | String* name, |
| 395 | String* resource_name, |
| 396 | int line_number)) { |
| 397 | return profiles_->NewCodeEntry(tag, name, resource_name, line_number); |
| 398 | } |
| 399 | |
| 400 | INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 401 | const char* name)) { |
| 402 | return profiles_->NewCodeEntry(tag, name); |
| 403 | } |
| 404 | |
| 405 | INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 406 | const char* name_prefix, |
| 407 | String* name)) { |
| 408 | return profiles_->NewCodeEntry(tag, name_prefix, name); |
| 409 | } |
| 410 | |
| 411 | INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, |
| 412 | int args_count)) { |
| 413 | return profiles_->NewCodeEntry(tag, args_count); |
| 414 | } |
| 415 | |
Leon Clarke | f7060e2 | 2010-06-03 12:02:55 +0100 | [diff] [blame] | 416 | INLINE(CodeEntry* NewCodeEntry(int security_token_id)) { |
| 417 | return profiles_->NewCodeEntry(security_token_id); |
| 418 | } |
| 419 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 420 | void RecordTickSample(const TickSample& sample); |
| 421 | |
| 422 | INLINE(CodeMap* code_map()) { return &code_map_; } |
| 423 | |
| 424 | INLINE(void Tick()) { sample_rate_calc_.Tick(); } |
| 425 | INLINE(double actual_sampling_rate()) { |
| 426 | return sample_rate_calc_.ticks_per_ms(); |
| 427 | } |
| 428 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 429 | static const char* const kAnonymousFunctionName; |
| 430 | static const char* const kProgramEntryName; |
| 431 | static const char* const kGarbageCollectorEntryName; |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 432 | |
| 433 | private: |
| 434 | INLINE(CodeEntry* EntryForVMState(StateTag tag)); |
| 435 | |
| 436 | CpuProfilesCollection* profiles_; |
| 437 | CodeMap code_map_; |
| 438 | CodeEntry* program_entry_; |
| 439 | CodeEntry* gc_entry_; |
| 440 | SampleRateCalculator sample_rate_calc_; |
| 441 | |
| 442 | DISALLOW_COPY_AND_ASSIGN(ProfileGenerator); |
| 443 | }; |
| 444 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 445 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 446 | class HeapEntry; |
| 447 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 448 | class HeapGraphEdge BASE_EMBEDDED { |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 449 | public: |
| 450 | enum Type { |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 451 | kContextVariable = v8::HeapGraphEdge::kContextVariable, |
| 452 | kElement = v8::HeapGraphEdge::kElement, |
| 453 | kProperty = v8::HeapGraphEdge::kProperty, |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 454 | kInternal = v8::HeapGraphEdge::kInternal, |
| 455 | kHidden = v8::HeapGraphEdge::kHidden, |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 456 | kShortcut = v8::HeapGraphEdge::kShortcut |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 457 | }; |
| 458 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 459 | HeapGraphEdge() { } |
| 460 | void Init(int child_index, Type type, const char* name, HeapEntry* to); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 461 | void Init(int child_index, Type type, int index, HeapEntry* to); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 462 | void Init(int child_index, int index, HeapEntry* to); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 463 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 464 | Type type() { return static_cast<Type>(type_); } |
| 465 | int index() { |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 466 | ASSERT(type_ == kElement || type_ == kHidden); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 467 | return index_; |
| 468 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 469 | const char* name() { |
| 470 | ASSERT(type_ == kContextVariable |
| 471 | || type_ == kProperty |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 472 | || type_ == kInternal |
| 473 | || type_ == kShortcut); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 474 | return name_; |
| 475 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 476 | HeapEntry* to() { return to_; } |
| 477 | |
| 478 | HeapEntry* From(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 479 | |
| 480 | private: |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 481 | int child_index_ : 29; |
| 482 | unsigned type_ : 3; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 483 | union { |
| 484 | int index_; |
| 485 | const char* name_; |
| 486 | }; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 487 | HeapEntry* to_; |
| 488 | |
| 489 | DISALLOW_COPY_AND_ASSIGN(HeapGraphEdge); |
| 490 | }; |
| 491 | |
| 492 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 493 | class HeapSnapshot; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 494 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 495 | // HeapEntry instances represent an entity from the heap (or a special |
| 496 | // virtual node, e.g. root). To make heap snapshots more compact, |
| 497 | // HeapEntries has a special memory layout (no Vectors or Lists used): |
| 498 | // |
| 499 | // +-----------------+ |
| 500 | // HeapEntry |
| 501 | // +-----------------+ |
| 502 | // HeapGraphEdge | |
| 503 | // ... } children_count |
| 504 | // HeapGraphEdge | |
| 505 | // +-----------------+ |
| 506 | // HeapGraphEdge* | |
| 507 | // ... } retainers_count |
| 508 | // HeapGraphEdge* | |
| 509 | // +-----------------+ |
| 510 | // |
| 511 | // In a HeapSnapshot, all entries are hand-allocated in a continuous array |
| 512 | // of raw bytes. |
| 513 | // |
| 514 | class HeapEntry BASE_EMBEDDED { |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 515 | public: |
| 516 | enum Type { |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 517 | kHidden = v8::HeapGraphNode::kHidden, |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 518 | kArray = v8::HeapGraphNode::kArray, |
| 519 | kString = v8::HeapGraphNode::kString, |
| 520 | kObject = v8::HeapGraphNode::kObject, |
| 521 | kCode = v8::HeapGraphNode::kCode, |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 522 | kClosure = v8::HeapGraphNode::kClosure, |
| 523 | kRegExp = v8::HeapGraphNode::kRegExp, |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 524 | kHeapNumber = v8::HeapGraphNode::kHeapNumber, |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 525 | kNative = v8::HeapGraphNode::kNative |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 526 | }; |
| 527 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 528 | HeapEntry() { } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 529 | void Init(HeapSnapshot* snapshot, |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 530 | Type type, |
| 531 | const char* name, |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 532 | uint64_t id, |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 533 | int self_size, |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 534 | int children_count, |
| 535 | int retainers_count); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 536 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 537 | HeapSnapshot* snapshot() { return snapshot_; } |
| 538 | Type type() { return static_cast<Type>(type_); } |
| 539 | const char* name() { return name_; } |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 540 | inline uint64_t id(); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 541 | int self_size() { return self_size_; } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 542 | int retained_size() { return retained_size_; } |
| 543 | void add_retained_size(int size) { retained_size_ += size; } |
| 544 | void set_retained_size(int value) { retained_size_ = value; } |
| 545 | int ordered_index() { return ordered_index_; } |
| 546 | void set_ordered_index(int value) { ordered_index_ = value; } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 547 | |
| 548 | Vector<HeapGraphEdge> children() { |
| 549 | return Vector<HeapGraphEdge>(children_arr(), children_count_); } |
| 550 | Vector<HeapGraphEdge*> retainers() { |
| 551 | return Vector<HeapGraphEdge*>(retainers_arr(), retainers_count_); } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 552 | HeapEntry* dominator() { return dominator_; } |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 553 | void set_dominator(HeapEntry* entry) { dominator_ = entry; } |
| 554 | |
| 555 | void clear_paint() { painted_ = kUnpainted; } |
| 556 | bool painted_reachable() { return painted_ == kPainted; } |
| 557 | void paint_reachable() { |
| 558 | ASSERT(painted_ == kUnpainted); |
| 559 | painted_ = kPainted; |
Ben Murdoch | 592a9fc | 2012-03-05 11:04:45 +0000 | [diff] [blame] | 560 | } |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 561 | bool not_painted_reachable_from_others() { |
| 562 | return painted_ != kPaintedReachableFromOthers; |
| 563 | } |
| 564 | void paint_reachable_from_others() { |
| 565 | painted_ = kPaintedReachableFromOthers; |
| 566 | } |
| 567 | template<class Visitor> |
| 568 | void ApplyAndPaintAllReachable(Visitor* visitor); |
| 569 | void PaintAllReachable(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 570 | |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 571 | void SetIndexedReference(HeapGraphEdge::Type type, |
| 572 | int child_index, |
| 573 | int index, |
| 574 | HeapEntry* entry, |
| 575 | int retainer_index); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 576 | void SetNamedReference(HeapGraphEdge::Type type, |
| 577 | int child_index, |
| 578 | const char* name, |
| 579 | HeapEntry* entry, |
| 580 | int retainer_index); |
| 581 | void SetUnidirElementReference(int child_index, int index, HeapEntry* entry); |
| 582 | |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 583 | int EntrySize() { return EntriesSize(1, children_count_, retainers_count_); } |
| 584 | int RetainedSize(bool exact); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 585 | |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 586 | void Print(int max_depth, int indent); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 587 | |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 588 | Handle<HeapObject> GetHeapObject(); |
| 589 | |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 590 | static int EntriesSize(int entries_count, |
| 591 | int children_count, |
| 592 | int retainers_count); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 593 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 594 | private: |
| 595 | HeapGraphEdge* children_arr() { |
| 596 | return reinterpret_cast<HeapGraphEdge*>(this + 1); |
| 597 | } |
| 598 | HeapGraphEdge** retainers_arr() { |
| 599 | return reinterpret_cast<HeapGraphEdge**>(children_arr() + children_count_); |
| 600 | } |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 601 | void CalculateExactRetainedSize(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 602 | const char* TypeAsString(); |
| 603 | |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 604 | unsigned painted_: 2; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 605 | unsigned type_: 4; |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 606 | int children_count_: 26; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 607 | int retainers_count_; |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 608 | int self_size_; |
| 609 | union { |
| 610 | int ordered_index_; // Used during dominator tree building. |
| 611 | int retained_size_; // At that moment, there is no retained size yet. |
| 612 | }; |
| 613 | HeapEntry* dominator_; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 614 | HeapSnapshot* snapshot_; |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 615 | struct Id { |
| 616 | uint32_t id1_; |
| 617 | uint32_t id2_; |
| 618 | } id_; // This is to avoid extra padding of 64-bit value. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 619 | const char* name_; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 620 | |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 621 | // Paints used for exact retained sizes calculation. |
| 622 | static const unsigned kUnpainted = 0; |
| 623 | static const unsigned kPainted = 1; |
| 624 | static const unsigned kPaintedReachableFromOthers = 2; |
| 625 | |
| 626 | static const int kExactRetainedSizeTag = 1; |
| 627 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 628 | DISALLOW_COPY_AND_ASSIGN(HeapEntry); |
| 629 | }; |
| 630 | |
| 631 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 632 | class HeapSnapshotsCollection; |
| 633 | |
| 634 | // HeapSnapshot represents a single heap snapshot. It is stored in |
| 635 | // HeapSnapshotsCollection, which is also a factory for |
| 636 | // HeapSnapshots. All HeapSnapshots share strings copied from JS heap |
| 637 | // to be able to return them even if they were collected. |
| 638 | // HeapSnapshotGenerator fills in a HeapSnapshot. |
| 639 | class HeapSnapshot { |
| 640 | public: |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 641 | enum Type { |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 642 | kFull = v8::HeapSnapshot::kFull |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 643 | }; |
| 644 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 645 | HeapSnapshot(HeapSnapshotsCollection* collection, |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 646 | Type type, |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 647 | const char* title, |
| 648 | unsigned uid); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 649 | ~HeapSnapshot(); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 650 | void Delete(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 651 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 652 | HeapSnapshotsCollection* collection() { return collection_; } |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 653 | Type type() { return type_; } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 654 | const char* title() { return title_; } |
| 655 | unsigned uid() { return uid_; } |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 656 | HeapEntry* root() { return root_entry_; } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 657 | HeapEntry* gc_roots() { return gc_roots_entry_; } |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 658 | HeapEntry* natives_root() { return natives_root_entry_; } |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 659 | List<HeapEntry*>* entries() { return &entries_; } |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 660 | int raw_entries_size() { return raw_entries_size_; } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 661 | |
| 662 | void AllocateEntries( |
| 663 | int entries_count, int children_count, int retainers_count); |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 664 | HeapEntry* AddEntry(HeapEntry::Type type, |
| 665 | const char* name, |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 666 | uint64_t id, |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 667 | int size, |
| 668 | int children_count, |
| 669 | int retainers_count); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 670 | HeapEntry* AddRootEntry(int children_count); |
| 671 | HeapEntry* AddGcRootsEntry(int children_count, int retainers_count); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 672 | HeapEntry* AddNativesRootEntry(int children_count, int retainers_count); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 673 | void ClearPaint(); |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 674 | HeapEntry* GetEntryById(uint64_t id); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 675 | List<HeapEntry*>* GetSortedEntriesList(); |
| 676 | template<class Visitor> |
| 677 | void IterateEntries(Visitor* visitor) { entries_.Iterate(visitor); } |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 678 | void SetDominatorsToSelf(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 679 | |
| 680 | void Print(int max_depth); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 681 | void PrintEntriesSize(); |
| 682 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 683 | private: |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 684 | HeapEntry* GetNextEntryToInit(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 685 | |
| 686 | HeapSnapshotsCollection* collection_; |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 687 | Type type_; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 688 | const char* title_; |
| 689 | unsigned uid_; |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 690 | HeapEntry* root_entry_; |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 691 | HeapEntry* gc_roots_entry_; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 692 | HeapEntry* natives_root_entry_; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 693 | char* raw_entries_; |
| 694 | List<HeapEntry*> entries_; |
| 695 | bool entries_sorted_; |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 696 | int raw_entries_size_; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 697 | |
| 698 | friend class HeapSnapshotTester; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 699 | |
| 700 | DISALLOW_COPY_AND_ASSIGN(HeapSnapshot); |
| 701 | }; |
| 702 | |
| 703 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 704 | class HeapObjectsMap { |
| 705 | public: |
| 706 | HeapObjectsMap(); |
| 707 | ~HeapObjectsMap(); |
| 708 | |
| 709 | void SnapshotGenerationFinished(); |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 710 | uint64_t FindObject(Address addr); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 711 | void MoveObject(Address from, Address to); |
| 712 | |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 713 | static uint64_t GenerateId(v8::RetainedObjectInfo* info); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 714 | |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 715 | static const uint64_t kInternalRootObjectId; |
| 716 | static const uint64_t kGcRootsObjectId; |
| 717 | static const uint64_t kNativesRootObjectId; |
| 718 | static const uint64_t kFirstAvailableObjectId; |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 719 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 720 | private: |
| 721 | struct EntryInfo { |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 722 | explicit EntryInfo(uint64_t id) : id(id), accessed(true) { } |
| 723 | EntryInfo(uint64_t id, bool accessed) : id(id), accessed(accessed) { } |
| 724 | uint64_t id; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 725 | bool accessed; |
| 726 | }; |
| 727 | |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 728 | void AddEntry(Address addr, uint64_t id); |
| 729 | uint64_t FindEntry(Address addr); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 730 | void RemoveDeadEntries(); |
| 731 | |
| 732 | static bool AddressesMatch(void* key1, void* key2) { |
| 733 | return key1 == key2; |
| 734 | } |
| 735 | |
| 736 | static uint32_t AddressHash(Address addr) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 737 | return ComputeIntegerHash( |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 738 | static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr)), |
| 739 | v8::internal::kZeroHashSeed); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 740 | } |
| 741 | |
| 742 | bool initial_fill_mode_; |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 743 | uint64_t next_id_; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 744 | HashMap entries_map_; |
| 745 | List<EntryInfo>* entries_; |
| 746 | |
| 747 | DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap); |
| 748 | }; |
| 749 | |
| 750 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 751 | class HeapSnapshotsCollection { |
| 752 | public: |
| 753 | HeapSnapshotsCollection(); |
| 754 | ~HeapSnapshotsCollection(); |
| 755 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 756 | bool is_tracking_objects() { return is_tracking_objects_; } |
| 757 | |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 758 | HeapSnapshot* NewSnapshot( |
| 759 | HeapSnapshot::Type type, const char* name, unsigned uid); |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 760 | void SnapshotGenerationFinished(HeapSnapshot* snapshot); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 761 | List<HeapSnapshot*>* snapshots() { return &snapshots_; } |
| 762 | HeapSnapshot* GetSnapshot(unsigned uid); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 763 | void RemoveSnapshot(HeapSnapshot* snapshot); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 764 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 765 | StringsStorage* names() { return &names_; } |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 766 | TokenEnumerator* token_enumerator() { return token_enumerator_; } |
| 767 | |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 768 | uint64_t GetObjectId(Address addr) { return ids_.FindObject(addr); } |
| 769 | Handle<HeapObject> FindHeapObjectById(uint64_t id); |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 770 | void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); } |
| 771 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 772 | private: |
| 773 | INLINE(static bool HeapSnapshotsMatch(void* key1, void* key2)) { |
| 774 | return key1 == key2; |
| 775 | } |
| 776 | |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 777 | bool is_tracking_objects_; // Whether tracking object moves is needed. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 778 | List<HeapSnapshot*> snapshots_; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 779 | // Mapping from snapshots' uids to HeapSnapshot* pointers. |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 780 | HashMap snapshots_uids_; |
| 781 | StringsStorage names_; |
| 782 | TokenEnumerator* token_enumerator_; |
Ben Murdoch | 3bec4d2 | 2010-07-22 14:51:16 +0100 | [diff] [blame] | 783 | // Mapping from HeapObject addresses to objects' uids. |
| 784 | HeapObjectsMap ids_; |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 785 | |
| 786 | DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsCollection); |
| 787 | }; |
| 788 | |
| 789 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 790 | // A typedef for referencing anything that can be snapshotted living |
| 791 | // in any kind of heap memory. |
| 792 | typedef void* HeapThing; |
| 793 | |
| 794 | |
| 795 | // An interface that creates HeapEntries by HeapThings. |
| 796 | class HeapEntriesAllocator { |
| 797 | public: |
| 798 | virtual ~HeapEntriesAllocator() { } |
| 799 | virtual HeapEntry* AllocateEntry( |
| 800 | HeapThing ptr, int children_count, int retainers_count) = 0; |
| 801 | }; |
| 802 | |
| 803 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 804 | // The HeapEntriesMap instance is used to track a mapping between |
| 805 | // real heap objects and their representations in heap snapshots. |
| 806 | class HeapEntriesMap { |
| 807 | public: |
| 808 | HeapEntriesMap(); |
| 809 | ~HeapEntriesMap(); |
| 810 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 811 | void AllocateEntries(); |
| 812 | HeapEntry* Map(HeapThing thing); |
| 813 | void Pair(HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry); |
| 814 | void CountReference(HeapThing from, HeapThing to, |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 815 | int* prev_children_count = NULL, |
| 816 | int* prev_retainers_count = NULL); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 817 | |
| 818 | int entries_count() { return entries_count_; } |
| 819 | int total_children_count() { return total_children_count_; } |
| 820 | int total_retainers_count() { return total_retainers_count_; } |
| 821 | |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 822 | static HeapEntry *const kHeapEntryPlaceholder; |
Steve Block | 791712a | 2010-08-27 10:21:07 +0100 | [diff] [blame] | 823 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 824 | private: |
| 825 | struct EntryInfo { |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 826 | EntryInfo(HeapEntry* entry, HeapEntriesAllocator* allocator) |
| 827 | : entry(entry), |
| 828 | allocator(allocator), |
| 829 | children_count(0), |
| 830 | retainers_count(0) { |
| 831 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 832 | HeapEntry* entry; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 833 | HeapEntriesAllocator* allocator; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 834 | int children_count; |
| 835 | int retainers_count; |
| 836 | }; |
| 837 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 838 | static uint32_t Hash(HeapThing thing) { |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 839 | return ComputeIntegerHash( |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 840 | static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing)), |
| 841 | v8::internal::kZeroHashSeed); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 842 | } |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 843 | static bool HeapThingsMatch(HeapThing key1, HeapThing key2) { |
| 844 | return key1 == key2; |
| 845 | } |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 846 | |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 847 | HashMap entries_; |
| 848 | int entries_count_; |
| 849 | int total_children_count_; |
| 850 | int total_retainers_count_; |
| 851 | |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 852 | friend class HeapObjectsSet; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 853 | |
| 854 | DISALLOW_COPY_AND_ASSIGN(HeapEntriesMap); |
| 855 | }; |
| 856 | |
| 857 | |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 858 | class HeapObjectsSet { |
| 859 | public: |
| 860 | HeapObjectsSet(); |
| 861 | void Clear(); |
| 862 | bool Contains(Object* object); |
| 863 | void Insert(Object* obj); |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 864 | const char* GetTag(Object* obj); |
| 865 | void SetTag(Object* obj, const char* tag); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 866 | |
| 867 | private: |
| 868 | HashMap entries_; |
| 869 | |
| 870 | DISALLOW_COPY_AND_ASSIGN(HeapObjectsSet); |
| 871 | }; |
| 872 | |
| 873 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 874 | // An interface used to populate a snapshot with nodes and edges. |
| 875 | class SnapshotFillerInterface { |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 876 | public: |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 877 | virtual ~SnapshotFillerInterface() { } |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 878 | virtual HeapEntry* AddEntry(HeapThing ptr, |
| 879 | HeapEntriesAllocator* allocator) = 0; |
| 880 | virtual HeapEntry* FindEntry(HeapThing ptr) = 0; |
| 881 | virtual HeapEntry* FindOrAddEntry(HeapThing ptr, |
| 882 | HeapEntriesAllocator* allocator) = 0; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 883 | virtual void SetIndexedReference(HeapGraphEdge::Type type, |
| 884 | HeapThing parent_ptr, |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 885 | HeapEntry* parent_entry, |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 886 | int index, |
| 887 | HeapThing child_ptr, |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 888 | HeapEntry* child_entry) = 0; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 889 | virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, |
| 890 | HeapThing parent_ptr, |
| 891 | HeapEntry* parent_entry, |
| 892 | HeapThing child_ptr, |
| 893 | HeapEntry* child_entry) = 0; |
| 894 | virtual void SetNamedReference(HeapGraphEdge::Type type, |
| 895 | HeapThing parent_ptr, |
| 896 | HeapEntry* parent_entry, |
| 897 | const char* reference_name, |
| 898 | HeapThing child_ptr, |
| 899 | HeapEntry* child_entry) = 0; |
| 900 | virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type, |
| 901 | HeapThing parent_ptr, |
| 902 | HeapEntry* parent_entry, |
| 903 | HeapThing child_ptr, |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 904 | HeapEntry* child_entry) = 0; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 905 | }; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 906 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 907 | |
| 908 | class SnapshottingProgressReportingInterface { |
| 909 | public: |
| 910 | virtual ~SnapshottingProgressReportingInterface() { } |
| 911 | virtual void ProgressStep() = 0; |
| 912 | virtual bool ProgressReport(bool force) = 0; |
| 913 | }; |
| 914 | |
| 915 | |
| 916 | // An implementation of V8 heap graph extractor. |
| 917 | class V8HeapExplorer : public HeapEntriesAllocator { |
| 918 | public: |
| 919 | V8HeapExplorer(HeapSnapshot* snapshot, |
| 920 | SnapshottingProgressReportingInterface* progress); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 921 | virtual ~V8HeapExplorer(); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 922 | virtual HeapEntry* AllocateEntry( |
| 923 | HeapThing ptr, int children_count, int retainers_count); |
| 924 | void AddRootEntries(SnapshotFillerInterface* filler); |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 925 | int EstimateObjectsCount(); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 926 | bool IterateAndExtractReferences(SnapshotFillerInterface* filler); |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 927 | void TagGlobalObjects(); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 928 | |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 929 | static String* GetConstructorName(JSObject* object); |
| 930 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 931 | static HeapObject* const kInternalRootObject; |
| 932 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 933 | private: |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 934 | HeapEntry* AddEntry( |
| 935 | HeapObject* object, int children_count, int retainers_count); |
| 936 | HeapEntry* AddEntry(HeapObject* object, |
| 937 | HeapEntry::Type type, |
| 938 | const char* name, |
| 939 | int children_count, |
| 940 | int retainers_count); |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 941 | const char* GetSystemEntryName(HeapObject* object); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 942 | void ExtractReferences(HeapObject* obj); |
| 943 | void ExtractClosureReferences(JSObject* js_obj, HeapEntry* entry); |
| 944 | void ExtractPropertyReferences(JSObject* js_obj, HeapEntry* entry); |
| 945 | void ExtractElementReferences(JSObject* js_obj, HeapEntry* entry); |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 946 | void ExtractInternalReferences(JSObject* js_obj, HeapEntry* entry); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 947 | void SetClosureReference(HeapObject* parent_obj, |
| 948 | HeapEntry* parent, |
| 949 | String* reference_name, |
| 950 | Object* child); |
| 951 | void SetElementReference(HeapObject* parent_obj, |
| 952 | HeapEntry* parent, |
| 953 | int index, |
| 954 | Object* child); |
| 955 | void SetInternalReference(HeapObject* parent_obj, |
| 956 | HeapEntry* parent, |
| 957 | const char* reference_name, |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 958 | Object* child, |
| 959 | int field_offset = -1); |
Ben Murdoch | f87a203 | 2010-10-22 12:50:53 +0100 | [diff] [blame] | 960 | void SetInternalReference(HeapObject* parent_obj, |
| 961 | HeapEntry* parent, |
| 962 | int index, |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 963 | Object* child, |
| 964 | int field_offset = -1); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 965 | void SetHiddenReference(HeapObject* parent_obj, |
| 966 | HeapEntry* parent, |
| 967 | int index, |
| 968 | Object* child); |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 969 | void SetPropertyReference(HeapObject* parent_obj, |
| 970 | HeapEntry* parent, |
| 971 | String* reference_name, |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 972 | Object* child, |
| 973 | int field_offset = -1); |
Shimeng (Simon) Wang | 8a31eba | 2010-12-06 19:01:33 -0800 | [diff] [blame] | 974 | void SetPropertyShortcutReference(HeapObject* parent_obj, |
| 975 | HeapEntry* parent, |
| 976 | String* reference_name, |
| 977 | Object* child); |
| 978 | void SetRootShortcutReference(Object* child); |
| 979 | void SetRootGcRootsReference(); |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 980 | void SetGcRootsReference(Object* child); |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 981 | void TagObject(Object* obj, const char* tag); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 982 | |
| 983 | HeapEntry* GetEntry(Object* obj); |
| 984 | |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 985 | Heap* heap_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 986 | HeapSnapshot* snapshot_; |
| 987 | HeapSnapshotsCollection* collection_; |
| 988 | SnapshottingProgressReportingInterface* progress_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 989 | SnapshotFillerInterface* filler_; |
Ben Murdoch | 3fb3ca8 | 2011-12-02 17:19:32 +0000 | [diff] [blame] | 990 | HeapObjectsSet objects_tags_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 991 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 992 | static HeapObject* const kGcRootsObject; |
| 993 | |
| 994 | friend class IndexedReferencesExtractor; |
| 995 | friend class RootsReferencesExtractor; |
| 996 | |
| 997 | DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer); |
| 998 | }; |
| 999 | |
| 1000 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1001 | // An implementation of retained native objects extractor. |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 1002 | class NativeObjectsExplorer : public HeapEntriesAllocator { |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1003 | public: |
| 1004 | NativeObjectsExplorer(HeapSnapshot* snapshot, |
| 1005 | SnapshottingProgressReportingInterface* progress); |
| 1006 | virtual ~NativeObjectsExplorer(); |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 1007 | virtual HeapEntry* AllocateEntry( |
| 1008 | HeapThing ptr, int children_count, int retainers_count); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1009 | void AddRootEntries(SnapshotFillerInterface* filler); |
| 1010 | int EstimateObjectsCount(); |
| 1011 | bool IterateAndExtractReferences(SnapshotFillerInterface* filler); |
| 1012 | |
| 1013 | private: |
| 1014 | void FillRetainedObjects(); |
| 1015 | List<HeapObject*>* GetListMaybeDisposeInfo(v8::RetainedObjectInfo* info); |
| 1016 | void SetNativeRootReference(v8::RetainedObjectInfo* info); |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 1017 | void SetRootNativesRootReference(); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1018 | void SetWrapperNativeReferences(HeapObject* wrapper, |
| 1019 | v8::RetainedObjectInfo* info); |
| 1020 | void VisitSubtreeWrapper(Object** p, uint16_t class_id); |
| 1021 | |
| 1022 | static uint32_t InfoHash(v8::RetainedObjectInfo* info) { |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 1023 | return ComputeIntegerHash(static_cast<uint32_t>(info->GetHash()), |
| 1024 | v8::internal::kZeroHashSeed); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1025 | } |
| 1026 | static bool RetainedInfosMatch(void* key1, void* key2) { |
| 1027 | return key1 == key2 || |
| 1028 | (reinterpret_cast<v8::RetainedObjectInfo*>(key1))->IsEquivalent( |
| 1029 | reinterpret_cast<v8::RetainedObjectInfo*>(key2)); |
| 1030 | } |
| 1031 | |
| 1032 | HeapSnapshot* snapshot_; |
| 1033 | HeapSnapshotsCollection* collection_; |
| 1034 | SnapshottingProgressReportingInterface* progress_; |
| 1035 | bool embedder_queried_; |
| 1036 | HeapObjectsSet in_groups_; |
| 1037 | // RetainedObjectInfo* -> List<HeapObject*>* |
| 1038 | HashMap objects_by_info_; |
| 1039 | // Used during references extraction. |
| 1040 | SnapshotFillerInterface* filler_; |
| 1041 | |
| 1042 | static HeapThing const kNativesRootObject; |
| 1043 | |
| 1044 | friend class GlobalHandlesExtractor; |
| 1045 | |
| 1046 | DISALLOW_COPY_AND_ASSIGN(NativeObjectsExplorer); |
| 1047 | }; |
| 1048 | |
| 1049 | |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1050 | class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface { |
| 1051 | public: |
| 1052 | HeapSnapshotGenerator(HeapSnapshot* snapshot, |
| 1053 | v8::ActivityControl* control); |
| 1054 | bool GenerateSnapshot(); |
| 1055 | |
| 1056 | private: |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 1057 | bool ApproximateRetainedSizes(); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1058 | bool BuildDominatorTree(const Vector<HeapEntry*>& entries, |
Ben Murdoch | 85b7179 | 2012-04-11 18:30:58 +0100 | [diff] [blame^] | 1059 | Vector<HeapEntry*>* dominators); |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1060 | bool CountEntriesAndReferences(); |
| 1061 | bool FillReferences(); |
| 1062 | void FillReversePostorderIndexes(Vector<HeapEntry*>* entries); |
| 1063 | void ProgressStep(); |
| 1064 | bool ProgressReport(bool force = false); |
| 1065 | bool SetEntriesDominators(); |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1066 | void SetProgressTotal(int iterations_count); |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 1067 | |
| 1068 | HeapSnapshot* snapshot_; |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1069 | v8::ActivityControl* control_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1070 | V8HeapExplorer v8_heap_explorer_; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 1071 | NativeObjectsExplorer dom_explorer_; |
Ben Murdoch | e0cee9b | 2011-05-25 10:26:03 +0100 | [diff] [blame] | 1072 | // Mapping from HeapThing pointers to HeapEntry* pointers. |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 1073 | HeapEntriesMap entries_; |
Ben Murdoch | b0fe162 | 2011-05-05 13:52:32 +0100 | [diff] [blame] | 1074 | // Used during snapshot generation. |
| 1075 | int progress_counter_; |
| 1076 | int progress_total_; |
Iain Merrick | 7568138 | 2010-08-19 15:07:18 +0100 | [diff] [blame] | 1077 | |
Ben Murdoch | 7f4d5bd | 2010-06-15 11:15:29 +0100 | [diff] [blame] | 1078 | DISALLOW_COPY_AND_ASSIGN(HeapSnapshotGenerator); |
| 1079 | }; |
| 1080 | |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1081 | class OutputStreamWriter; |
| 1082 | |
| 1083 | class HeapSnapshotJSONSerializer { |
| 1084 | public: |
| 1085 | explicit HeapSnapshotJSONSerializer(HeapSnapshot* snapshot) |
| 1086 | : snapshot_(snapshot), |
| 1087 | nodes_(ObjectsMatch), |
| 1088 | strings_(ObjectsMatch), |
| 1089 | next_node_id_(1), |
| 1090 | next_string_id_(1), |
| 1091 | writer_(NULL) { |
| 1092 | } |
| 1093 | void Serialize(v8::OutputStream* stream); |
| 1094 | |
| 1095 | private: |
| 1096 | INLINE(static bool ObjectsMatch(void* key1, void* key2)) { |
| 1097 | return key1 == key2; |
| 1098 | } |
| 1099 | |
| 1100 | INLINE(static uint32_t ObjectHash(const void* key)) { |
| 1101 | return ComputeIntegerHash( |
Ben Murdoch | c7cc028 | 2012-03-05 14:35:55 +0000 | [diff] [blame] | 1102 | static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key)), |
| 1103 | v8::internal::kZeroHashSeed); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1104 | } |
| 1105 | |
| 1106 | void EnumerateNodes(); |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1107 | HeapSnapshot* CreateFakeSnapshot(); |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1108 | int GetNodeId(HeapEntry* entry); |
| 1109 | int GetStringId(const char* s); |
| 1110 | void SerializeEdge(HeapGraphEdge* edge); |
| 1111 | void SerializeImpl(); |
| 1112 | void SerializeNode(HeapEntry* entry); |
| 1113 | void SerializeNodes(); |
| 1114 | void SerializeSnapshot(); |
| 1115 | void SerializeString(const unsigned char* s); |
| 1116 | void SerializeStrings(); |
| 1117 | void SortHashMap(HashMap* map, List<HashMap::Entry*>* sorted_entries); |
| 1118 | |
Ben Murdoch | 69a99ed | 2011-11-30 16:03:39 +0000 | [diff] [blame] | 1119 | static const int kMaxSerializableSnapshotRawSize; |
| 1120 | |
Kristian Monsen | 0d5e116 | 2010-09-30 15:31:59 +0100 | [diff] [blame] | 1121 | HeapSnapshot* snapshot_; |
| 1122 | HashMap nodes_; |
| 1123 | HashMap strings_; |
| 1124 | int next_node_id_; |
| 1125 | int next_string_id_; |
| 1126 | OutputStreamWriter* writer_; |
| 1127 | |
| 1128 | friend class HeapSnapshotJSONSerializerEnumerator; |
| 1129 | friend class HeapSnapshotJSONSerializerIterator; |
| 1130 | |
| 1131 | DISALLOW_COPY_AND_ASSIGN(HeapSnapshotJSONSerializer); |
| 1132 | }; |
| 1133 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 1134 | } } // namespace v8::internal |
| 1135 | |
Steve Block | 6ded16b | 2010-05-10 14:33:55 +0100 | [diff] [blame] | 1136 | #endif // V8_PROFILE_GENERATOR_H_ |