Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame^] | 1 | // Copyright 2011 the V8 project authors. All rights reserved. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 2 | // Redistribution and use in source and binary forms, with or without |
| 3 | // modification, are permitted provided that the following conditions are |
| 4 | // met: |
| 5 | // |
| 6 | // * Redistributions of source code must retain the above copyright |
| 7 | // notice, this list of conditions and the following disclaimer. |
| 8 | // * Redistributions in binary form must reproduce the above |
| 9 | // copyright notice, this list of conditions and the following |
| 10 | // disclaimer in the documentation and/or other materials provided |
| 11 | // with the distribution. |
| 12 | // * Neither the name of Google Inc. nor the names of its |
| 13 | // contributors may be used to endorse or promote products derived |
| 14 | // from this software without specific prior written permission. |
| 15 | // |
| 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | |
| 28 | #ifndef V8_GLOBAL_HANDLES_H_ |
| 29 | #define V8_GLOBAL_HANDLES_H_ |
| 30 | |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 31 | #include "../include/v8-profiler.h" |
| 32 | |
Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame^] | 33 | #include "list.h" |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 34 | |
| 35 | namespace v8 { |
| 36 | namespace internal { |
| 37 | |
| 38 | // Structure for tracking global handles. |
| 39 | // A single list keeps all the allocated global handles. |
| 40 | // Destroyed handles stay in the list but is added to the free list. |
| 41 | // At GC the destroyed global handles are removed from the free list |
| 42 | // and deallocated. |
| 43 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 44 | // An object group is treated like a single JS object: if one of object in |
| 45 | // the group is alive, all objects in the same group are considered alive. |
| 46 | // An object group is used to simulate object relationship in a DOM tree. |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 47 | class ObjectGroup { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 48 | public: |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 49 | static ObjectGroup* New(Object*** handles, |
| 50 | size_t length, |
| 51 | v8::RetainedObjectInfo* info) { |
| 52 | ASSERT(length > 0); |
| 53 | ObjectGroup* group = reinterpret_cast<ObjectGroup*>( |
| 54 | malloc(OFFSET_OF(ObjectGroup, objects_[length]))); |
| 55 | group->length_ = length; |
| 56 | group->info_ = info; |
| 57 | CopyWords(group->objects_, handles, static_cast<int>(length)); |
| 58 | return group; |
| 59 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 60 | |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 61 | void Dispose() { |
| 62 | if (info_ != NULL) info_->Dispose(); |
| 63 | free(this); |
| 64 | } |
| 65 | |
| 66 | size_t length_; |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 67 | v8::RetainedObjectInfo* info_; |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 68 | Object** objects_[1]; // Variable sized array. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 69 | |
| 70 | private: |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 71 | void* operator new(size_t size); |
| 72 | void operator delete(void* p); |
| 73 | ~ObjectGroup(); |
| 74 | DISALLOW_IMPLICIT_CONSTRUCTORS(ObjectGroup); |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 75 | }; |
| 76 | |
| 77 | |
| 78 | // An implicit references group consists of two parts: a parent object and |
| 79 | // a list of children objects. If the parent is alive, all the children |
| 80 | // are alive too. |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 81 | class ImplicitRefGroup { |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 82 | public: |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 83 | static ImplicitRefGroup* New(HeapObject** parent, |
| 84 | Object*** children, |
| 85 | size_t length) { |
| 86 | ASSERT(length > 0); |
| 87 | ImplicitRefGroup* group = reinterpret_cast<ImplicitRefGroup*>( |
| 88 | malloc(OFFSET_OF(ImplicitRefGroup, children_[length]))); |
| 89 | group->parent_ = parent; |
| 90 | group->length_ = length; |
| 91 | CopyWords(group->children_, children, static_cast<int>(length)); |
| 92 | return group; |
| 93 | } |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 94 | |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 95 | void Dispose() { |
| 96 | free(this); |
| 97 | } |
| 98 | |
| 99 | HeapObject** parent_; |
| 100 | size_t length_; |
| 101 | Object** children_[1]; // Variable sized array. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 102 | |
| 103 | private: |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 104 | void* operator new(size_t size); |
| 105 | void operator delete(void* p); |
| 106 | ~ImplicitRefGroup(); |
| 107 | DISALLOW_IMPLICIT_CONSTRUCTORS(ImplicitRefGroup); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 108 | }; |
| 109 | |
| 110 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 111 | typedef void (*WeakReferenceGuest)(Object* object, void* parameter); |
| 112 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 113 | class GlobalHandles { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 114 | public: |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 115 | ~GlobalHandles(); |
| 116 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 117 | // Creates a new global handle that is alive until Destroy is called. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 118 | Handle<Object> Create(Object* value); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 119 | |
| 120 | // Destroy a global handle. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 121 | void Destroy(Object** location); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 122 | |
| 123 | // Make the global handle weak and set the callback parameter for the |
| 124 | // handle. When the garbage collector recognizes that only weak global |
| 125 | // handles point to an object the handles are cleared and the callback |
| 126 | // function is invoked (for each handle) with the handle and corresponding |
| 127 | // parameter as arguments. Note: cleared means set to Smi::FromInt(0). The |
| 128 | // reason is that Smi::FromInt(0) does not change during garage collection. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 129 | void MakeWeak(Object** location, |
| 130 | void* parameter, |
| 131 | WeakReferenceCallback callback); |
| 132 | |
| 133 | static void SetWrapperClassId(Object** location, uint16_t class_id); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 134 | |
| 135 | // Returns the current number of weak handles. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 136 | int NumberOfWeakHandles() { return number_of_weak_handles_; } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 137 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 138 | void RecordStats(HeapStats* stats); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 139 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 140 | // Returns the current number of weak handles to global objects. |
| 141 | // These handles are also included in NumberOfWeakHandles(). |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 142 | int NumberOfGlobalObjectWeakHandles() { |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 143 | return number_of_global_object_weak_handles_; |
| 144 | } |
| 145 | |
| 146 | // Clear the weakness of a global handle. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 147 | void ClearWeakness(Object** location); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 148 | |
Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame^] | 149 | // Clear the weakness of a global handle. |
| 150 | void MarkIndependent(Object** location); |
| 151 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 152 | // Tells whether global handle is near death. |
| 153 | static bool IsNearDeath(Object** location); |
| 154 | |
| 155 | // Tells whether global handle is weak. |
| 156 | static bool IsWeak(Object** location); |
| 157 | |
John Reck | 5913587 | 2010-11-02 12:39:01 -0700 | [diff] [blame] | 158 | // Process pending weak handles. |
Teng-Hui Zhu | 3e5fa29 | 2010-11-09 16:16:48 -0800 | [diff] [blame] | 159 | // Returns true if next major GC is likely to collect more garbage. |
Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame^] | 160 | bool PostGarbageCollectionProcessing(GarbageCollector collector); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 161 | |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 162 | // Iterates over all strong handles. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 163 | void IterateStrongRoots(ObjectVisitor* v); |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 164 | |
Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame^] | 165 | // Iterates over all strong and dependent handles. |
| 166 | void IterateStrongAndDependentRoots(ObjectVisitor* v); |
| 167 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 168 | // Iterates over all handles. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 169 | void IterateAllRoots(ObjectVisitor* v); |
| 170 | |
| 171 | // Iterates over all handles that have embedder-assigned class ID. |
| 172 | void IterateAllRootsWithClassIds(ObjectVisitor* v); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 173 | |
| 174 | // Iterates over all weak roots in heap. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 175 | void IterateWeakRoots(ObjectVisitor* v); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 176 | |
Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame^] | 177 | // Iterates over all weak independent roots in heap. |
| 178 | void IterateWeakIndependentRoots(ObjectVisitor* v); |
| 179 | |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 180 | // Iterates over weak roots that are bound to a given callback. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 181 | void IterateWeakRoots(WeakReferenceGuest f, |
| 182 | WeakReferenceCallback callback); |
Steve Block | 3ce2e20 | 2009-11-05 08:53:23 +0000 | [diff] [blame] | 183 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 184 | // Find all weak handles satisfying the callback predicate, mark |
| 185 | // them as pending. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 186 | void IdentifyWeakHandles(WeakSlotCallback f); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 187 | |
Ben Murdoch | 257744e | 2011-11-30 15:57:28 +0000 | [diff] [blame^] | 188 | // Find all weak independent handles satisfying the callback predicate, mark |
| 189 | // them as pending. |
| 190 | void IdentifyWeakIndependentHandles(WeakSlotCallbackWithHeap f); |
| 191 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 192 | // Add an object group. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 193 | // Should be only used in GC callback function before a collection. |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 194 | // All groups are destroyed after a mark-compact collection. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 195 | void AddObjectGroup(Object*** handles, |
| 196 | size_t length, |
| 197 | v8::RetainedObjectInfo* info); |
| 198 | |
| 199 | // Add an implicit references' group. |
| 200 | // Should be only used in GC callback function before a collection. |
| 201 | // All groups are destroyed after a mark-compact collection. |
Ben Murdoch | 8b112d2 | 2011-06-08 16:22:53 +0100 | [diff] [blame] | 202 | void AddImplicitReferences(HeapObject** parent, |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 203 | Object*** children, |
| 204 | size_t length); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 205 | |
| 206 | // Returns the object groups. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 207 | List<ObjectGroup*>* object_groups() { return &object_groups_; } |
| 208 | |
| 209 | // Returns the implicit references' groups. |
| 210 | List<ImplicitRefGroup*>* implicit_ref_groups() { |
| 211 | return &implicit_ref_groups_; |
| 212 | } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 213 | |
| 214 | // Remove bags, this should only happen after GC. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 215 | void RemoveObjectGroups(); |
| 216 | void RemoveImplicitRefGroups(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 217 | |
| 218 | // Tear down the global handle structure. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 219 | void TearDown(); |
| 220 | |
| 221 | Isolate* isolate() { return isolate_; } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 222 | |
| 223 | #ifdef DEBUG |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 224 | void PrintStats(); |
| 225 | void Print(); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 226 | #endif |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 227 | class Pool; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 228 | private: |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 229 | explicit GlobalHandles(Isolate* isolate); |
| 230 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 231 | // Internal node structure, one for each global handle. |
| 232 | class Node; |
| 233 | |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 234 | Isolate* isolate_; |
| 235 | |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 236 | // Field always containing the number of weak and near-death handles. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 237 | int number_of_weak_handles_; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 238 | |
| 239 | // Field always containing the number of weak and near-death handles |
| 240 | // to global objects. These objects are also included in |
| 241 | // number_of_weak_handles_. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 242 | int number_of_global_object_weak_handles_; |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 243 | |
| 244 | // Global handles are kept in a single linked list pointed to by head_. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 245 | Node* head_; |
| 246 | Node* head() { return head_; } |
| 247 | void set_head(Node* value) { head_ = value; } |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 248 | |
| 249 | // Free list for DESTROYED global handles not yet deallocated. |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 250 | Node* first_free_; |
| 251 | Node* first_free() { return first_free_; } |
| 252 | void set_first_free(Node* value) { first_free_ = value; } |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 253 | |
| 254 | // List of deallocated nodes. |
| 255 | // Deallocated nodes form a prefix of all the nodes and |
| 256 | // |first_deallocated| points to last deallocated node before |
| 257 | // |head|. Those deallocated nodes are additionally linked |
| 258 | // by |next_free|: |
| 259 | // 1st deallocated head |
| 260 | // | | |
| 261 | // V V |
| 262 | // node node ... node node |
| 263 | // .next -> .next -> .next -> |
| 264 | // <- .next_free <- .next_free <- .next_free |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 265 | Node* first_deallocated_; |
| 266 | Node* first_deallocated() { return first_deallocated_; } |
| 267 | void set_first_deallocated(Node* value) { |
Steve Block | d0582a6 | 2009-12-15 09:54:21 +0000 | [diff] [blame] | 268 | first_deallocated_ = value; |
| 269 | } |
Steve Block | 44f0eee | 2011-05-26 01:26:41 +0100 | [diff] [blame] | 270 | |
| 271 | Pool* pool_; |
| 272 | int post_gc_processing_count_; |
| 273 | List<ObjectGroup*> object_groups_; |
| 274 | List<ImplicitRefGroup*> implicit_ref_groups_; |
| 275 | |
| 276 | friend class Isolate; |
| 277 | |
| 278 | DISALLOW_COPY_AND_ASSIGN(GlobalHandles); |
Steve Block | a7e24c1 | 2009-10-30 11:49:00 +0000 | [diff] [blame] | 279 | }; |
| 280 | |
| 281 | |
| 282 | } } // namespace v8::internal |
| 283 | |
| 284 | #endif // V8_GLOBAL_HANDLES_H_ |